1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static bool simple_operand_p_2 (tree
);
116 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
117 static tree
range_predecessor (tree
);
118 static tree
range_successor (tree
);
119 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
120 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
121 static tree
unextend (tree
, int, int, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
!= UNKNOWN_LOCATION
? tloc
: loc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
195 quo
= double_int_divmod (tree_to_double_int (arg1
),
196 tree_to_double_int (arg2
),
199 if (double_int_zero_p (rem
))
200 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
214 static int fold_deferring_overflow_warnings
;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning
;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings
;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
247 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
252 gcc_assert (fold_deferring_overflow_warnings
> 0);
253 --fold_deferring_overflow_warnings
;
254 if (fold_deferring_overflow_warnings
> 0)
256 if (fold_deferred_overflow_warning
!= NULL
258 && code
< (int) fold_deferred_overflow_code
)
259 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
263 warnmsg
= fold_deferred_overflow_warning
;
264 fold_deferred_overflow_warning
= NULL
;
266 if (!issue
|| warnmsg
== NULL
)
269 if (gimple_no_warning_p (stmt
))
272 /* Use the smallest code level when deciding to issue the
274 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
275 code
= fold_deferred_overflow_code
;
277 if (!issue_strict_overflow_warning (code
))
281 locus
= input_location
;
283 locus
= gimple_location (stmt
);
284 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
287 /* Stop deferring overflow warnings, ignoring any deferred
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL
, 0);
296 /* Whether we are deferring overflow warnings. */
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings
> 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
308 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
310 if (fold_deferring_overflow_warnings
> 0)
312 if (fold_deferred_overflow_warning
== NULL
313 || wc
< fold_deferred_overflow_code
)
315 fold_deferred_overflow_warning
= gmsgid
;
316 fold_deferred_overflow_code
= wc
;
319 else if (issue_strict_overflow_warning (wc
))
320 warning (OPT_Wstrict_overflow
, gmsgid
);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
327 negate_mathfn_p (enum built_in_function code
)
331 CASE_FLT_FN (BUILT_IN_ASIN
):
332 CASE_FLT_FN (BUILT_IN_ASINH
):
333 CASE_FLT_FN (BUILT_IN_ATAN
):
334 CASE_FLT_FN (BUILT_IN_ATANH
):
335 CASE_FLT_FN (BUILT_IN_CASIN
):
336 CASE_FLT_FN (BUILT_IN_CASINH
):
337 CASE_FLT_FN (BUILT_IN_CATAN
):
338 CASE_FLT_FN (BUILT_IN_CATANH
):
339 CASE_FLT_FN (BUILT_IN_CBRT
):
340 CASE_FLT_FN (BUILT_IN_CPROJ
):
341 CASE_FLT_FN (BUILT_IN_CSIN
):
342 CASE_FLT_FN (BUILT_IN_CSINH
):
343 CASE_FLT_FN (BUILT_IN_CTAN
):
344 CASE_FLT_FN (BUILT_IN_CTANH
):
345 CASE_FLT_FN (BUILT_IN_ERF
):
346 CASE_FLT_FN (BUILT_IN_LLROUND
):
347 CASE_FLT_FN (BUILT_IN_LROUND
):
348 CASE_FLT_FN (BUILT_IN_ROUND
):
349 CASE_FLT_FN (BUILT_IN_SIN
):
350 CASE_FLT_FN (BUILT_IN_SINH
):
351 CASE_FLT_FN (BUILT_IN_TAN
):
352 CASE_FLT_FN (BUILT_IN_TANH
):
353 CASE_FLT_FN (BUILT_IN_TRUNC
):
356 CASE_FLT_FN (BUILT_IN_LLRINT
):
357 CASE_FLT_FN (BUILT_IN_LRINT
):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
359 CASE_FLT_FN (BUILT_IN_RINT
):
360 return !flag_rounding_math
;
368 /* Check whether we may negate an integer constant T without causing
372 may_negate_without_overflow_p (const_tree t
)
374 unsigned HOST_WIDE_INT val
;
378 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
380 type
= TREE_TYPE (t
);
381 if (TYPE_UNSIGNED (type
))
384 prec
= TYPE_PRECISION (type
);
385 if (prec
> HOST_BITS_PER_WIDE_INT
)
387 if (TREE_INT_CST_LOW (t
) != 0)
389 prec
-= HOST_BITS_PER_WIDE_INT
;
390 val
= TREE_INT_CST_HIGH (t
);
393 val
= TREE_INT_CST_LOW (t
);
394 if (prec
< HOST_BITS_PER_WIDE_INT
)
395 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
396 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
399 /* Determine whether an expression T can be cheaply negated using
400 the function negate_expr without introducing undefined overflow. */
403 negate_expr_p (tree t
)
410 type
= TREE_TYPE (t
);
413 switch (TREE_CODE (t
))
416 if (TYPE_OVERFLOW_WRAPS (type
))
419 /* Check that -CST will not overflow type. */
420 return may_negate_without_overflow_p (t
);
422 return (INTEGRAL_TYPE_P (type
)
423 && TYPE_OVERFLOW_WRAPS (type
));
430 /* We want to canonicalize to positive real constants. Pretend
431 that only negative ones can be easily negated. */
432 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
435 return negate_expr_p (TREE_REALPART (t
))
436 && negate_expr_p (TREE_IMAGPART (t
));
439 return negate_expr_p (TREE_OPERAND (t
, 0))
440 && negate_expr_p (TREE_OPERAND (t
, 1));
443 return negate_expr_p (TREE_OPERAND (t
, 0));
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t
, 1))
451 && reorder_operands_p (TREE_OPERAND (t
, 0),
452 TREE_OPERAND (t
, 1)))
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t
, 0));
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
461 && reorder_operands_p (TREE_OPERAND (t
, 0),
462 TREE_OPERAND (t
, 1));
465 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
472 return negate_expr_p (TREE_OPERAND (t
, 1))
473 || negate_expr_p (TREE_OPERAND (t
, 0));
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
487 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
489 return negate_expr_p (TREE_OPERAND (t
, 1))
490 || negate_expr_p (TREE_OPERAND (t
, 0));
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type
) == REAL_TYPE
)
496 tree tem
= strip_float_extensions (t
);
498 return negate_expr_p (tem
);
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t
)))
505 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
510 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
512 tree op1
= TREE_OPERAND (t
, 1);
513 if (TREE_INT_CST_HIGH (op1
) == 0
514 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
515 == TREE_INT_CST_LOW (op1
))
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
532 fold_negate_expr (location_t loc
, tree t
)
534 tree type
= TREE_TYPE (t
);
537 switch (TREE_CODE (t
))
539 /* Convert - (~A) to A + 1. */
541 if (INTEGRAL_TYPE_P (type
))
542 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
543 build_int_cst (type
, 1));
547 tem
= fold_negate_const (t
, type
);
548 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
549 || !TYPE_OVERFLOW_TRAPS (type
))
554 tem
= fold_negate_const (t
, type
);
555 /* Two's complement FP formats, such as c4x, may overflow. */
556 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
561 tem
= fold_negate_const (t
, type
);
566 tree rpart
= negate_expr (TREE_REALPART (t
));
567 tree ipart
= negate_expr (TREE_IMAGPART (t
));
569 if ((TREE_CODE (rpart
) == REAL_CST
570 && TREE_CODE (ipart
) == REAL_CST
)
571 || (TREE_CODE (rpart
) == INTEGER_CST
572 && TREE_CODE (ipart
) == INTEGER_CST
))
573 return build_complex (type
, rpart
, ipart
);
578 if (negate_expr_p (t
))
579 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
580 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
581 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
585 if (negate_expr_p (t
))
586 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
587 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
591 return TREE_OPERAND (t
, 0);
594 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
595 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
597 /* -(A + B) -> (-B) - A. */
598 if (negate_expr_p (TREE_OPERAND (t
, 1))
599 && reorder_operands_p (TREE_OPERAND (t
, 0),
600 TREE_OPERAND (t
, 1)))
602 tem
= negate_expr (TREE_OPERAND (t
, 1));
603 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
604 tem
, TREE_OPERAND (t
, 0));
607 /* -(A + B) -> (-A) - B. */
608 if (negate_expr_p (TREE_OPERAND (t
, 0)))
610 tem
= negate_expr (TREE_OPERAND (t
, 0));
611 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
612 tem
, TREE_OPERAND (t
, 1));
618 /* - (A - B) -> B - A */
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
621 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
622 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
623 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
627 if (TYPE_UNSIGNED (type
))
633 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
635 tem
= TREE_OPERAND (t
, 1);
636 if (negate_expr_p (tem
))
637 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
638 TREE_OPERAND (t
, 0), negate_expr (tem
));
639 tem
= TREE_OPERAND (t
, 0);
640 if (negate_expr_p (tem
))
641 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
642 negate_expr (tem
), TREE_OPERAND (t
, 1));
651 /* In general we can't negate A / B, because if A is INT_MIN and
652 B is 1, we may turn this into INT_MIN / -1 which is undefined
653 and actually traps on some architectures. But if overflow is
654 undefined, we can negate, because - (INT_MIN / 1) is an
656 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
658 const char * const warnmsg
= G_("assuming signed overflow does not "
659 "occur when negating a division");
660 tem
= TREE_OPERAND (t
, 1);
661 if (negate_expr_p (tem
))
663 if (INTEGRAL_TYPE_P (type
)
664 && (TREE_CODE (tem
) != INTEGER_CST
665 || integer_onep (tem
)))
666 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
667 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
668 TREE_OPERAND (t
, 0), negate_expr (tem
));
670 tem
= TREE_OPERAND (t
, 0);
671 if (negate_expr_p (tem
))
673 if (INTEGRAL_TYPE_P (type
)
674 && (TREE_CODE (tem
) != INTEGER_CST
675 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
676 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
677 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
678 negate_expr (tem
), TREE_OPERAND (t
, 1));
684 /* Convert -((double)float) into (double)(-float). */
685 if (TREE_CODE (type
) == REAL_TYPE
)
687 tem
= strip_float_extensions (t
);
688 if (tem
!= t
&& negate_expr_p (tem
))
689 return fold_convert_loc (loc
, type
, negate_expr (tem
));
694 /* Negate -f(x) as f(-x). */
695 if (negate_mathfn_p (builtin_mathfn_code (t
))
696 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
700 fndecl
= get_callee_fndecl (t
);
701 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
702 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
707 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
708 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
710 tree op1
= TREE_OPERAND (t
, 1);
711 if (TREE_INT_CST_HIGH (op1
) == 0
712 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
713 == TREE_INT_CST_LOW (op1
))
715 tree ntype
= TYPE_UNSIGNED (type
)
716 ? signed_type_for (type
)
717 : unsigned_type_for (type
);
718 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
719 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
720 return fold_convert_loc (loc
, type
, temp
);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
745 loc
= EXPR_LOCATION (t
);
746 type
= TREE_TYPE (t
);
749 tem
= fold_negate_expr (loc
, t
);
751 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
752 return fold_convert_loc (loc
, type
, tem
);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
776 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
777 tree
*minus_litp
, int negate_p
)
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in
);
788 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
789 || TREE_CODE (in
) == FIXED_CST
)
791 else if (TREE_CODE (in
) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
799 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
801 tree op0
= TREE_OPERAND (in
, 0);
802 tree op1
= TREE_OPERAND (in
, 1);
803 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
804 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
808 || TREE_CODE (op0
) == FIXED_CST
)
809 *litp
= op0
, op0
= 0;
810 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
811 || TREE_CODE (op1
) == FIXED_CST
)
812 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
814 if (op0
!= 0 && TREE_CONSTANT (op0
))
815 *conp
= op0
, op0
= 0;
816 else if (op1
!= 0 && TREE_CONSTANT (op1
))
817 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0
!= 0 && op1
!= 0)
826 var
= op1
, neg_var_p
= neg1_p
;
828 /* Now do any needed negations. */
830 *minus_litp
= *litp
, *litp
= 0;
832 *conp
= negate_expr (*conp
);
834 var
= negate_expr (var
);
836 else if (TREE_CONSTANT (in
))
844 *minus_litp
= *litp
, *litp
= 0;
845 else if (*minus_litp
)
846 *litp
= *minus_litp
, *minus_litp
= 0;
847 *conp
= negate_expr (*conp
);
848 var
= negate_expr (var
);
854 /* Re-associate trees split by the above function. T1 and T2 are
855 either expressions to associate or null. Return the new
856 expression, if any. LOC is the location of the new expression. If
857 we build an operation, do it in TYPE and with CODE. */
860 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
867 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
868 try to fold this since we will have infinite recursion. But do
869 deal with any NEGATE_EXPRs. */
870 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
871 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
873 if (code
== PLUS_EXPR
)
875 if (TREE_CODE (t1
) == NEGATE_EXPR
)
876 return build2_loc (loc
, MINUS_EXPR
, type
,
877 fold_convert_loc (loc
, type
, t2
),
878 fold_convert_loc (loc
, type
,
879 TREE_OPERAND (t1
, 0)));
880 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
881 return build2_loc (loc
, MINUS_EXPR
, type
,
882 fold_convert_loc (loc
, type
, t1
),
883 fold_convert_loc (loc
, type
,
884 TREE_OPERAND (t2
, 0)));
885 else if (integer_zerop (t2
))
886 return fold_convert_loc (loc
, type
, t1
);
888 else if (code
== MINUS_EXPR
)
890 if (integer_zerop (t2
))
891 return fold_convert_loc (loc
, type
, t1
);
894 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
895 fold_convert_loc (loc
, type
, t2
));
898 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
899 fold_convert_loc (loc
, type
, t2
));
902 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
903 for use in int_const_binop, size_binop and size_diffop. */
906 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
908 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
910 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
925 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
926 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
927 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
931 /* Combine two integer constants ARG1 and ARG2 under operation CODE
932 to produce a new constant. Return NULL_TREE if we don't know how
933 to evaluate CODE at compile-time. */
936 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
939 double_int op1
, op2
, res
, tmp
;
941 tree type
= TREE_TYPE (arg1
);
942 bool uns
= TYPE_UNSIGNED (type
);
943 bool overflow
= false;
945 op1
= tree_to_double_int (arg1
);
946 op2
= tree_to_double_int (arg2
);
951 res
= double_int_ior (op1
, op2
);
955 res
= double_int_xor (op1
, op2
);
959 res
= double_int_and (op1
, op2
);
963 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
964 TYPE_PRECISION (type
), !uns
);
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
972 TYPE_PRECISION (type
), !uns
);
976 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
977 TYPE_PRECISION (type
));
981 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
982 TYPE_PRECISION (type
));
986 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
987 &res
.low
, &res
.high
);
991 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
992 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
993 &res
.low
, &res
.high
);
994 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
998 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
999 &res
.low
, &res
.high
);
1002 case MULT_HIGHPART_EXPR
:
1003 /* ??? Need quad precision, or an additional shift operand
1004 to the multiply primitive, to handle very large highparts. */
1005 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
1007 tmp
= double_int_mul (op1
, op2
);
1008 res
= double_int_rshift (tmp
, TYPE_PRECISION (type
),
1009 TYPE_PRECISION (type
), !uns
);
1012 case TRUNC_DIV_EXPR
:
1013 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1014 case EXACT_DIV_EXPR
:
1015 /* This is a shortcut for a common special case. */
1016 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1017 && !TREE_OVERFLOW (arg1
)
1018 && !TREE_OVERFLOW (arg2
)
1019 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1021 if (code
== CEIL_DIV_EXPR
)
1022 op1
.low
+= op2
.low
- 1;
1024 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1028 /* ... fall through ... */
1030 case ROUND_DIV_EXPR
:
1031 if (double_int_zero_p (op2
))
1033 if (double_int_one_p (op2
))
1038 if (double_int_equal_p (op1
, op2
)
1039 && ! double_int_zero_p (op1
))
1041 res
= double_int_one
;
1044 overflow
= div_and_round_double (code
, uns
,
1045 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1046 &res
.low
, &res
.high
,
1047 &tmp
.low
, &tmp
.high
);
1050 case TRUNC_MOD_EXPR
:
1051 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1052 /* This is a shortcut for a common special case. */
1053 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1054 && !TREE_OVERFLOW (arg1
)
1055 && !TREE_OVERFLOW (arg2
)
1056 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1058 if (code
== CEIL_MOD_EXPR
)
1059 op1
.low
+= op2
.low
- 1;
1060 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1064 /* ... fall through ... */
1066 case ROUND_MOD_EXPR
:
1067 if (double_int_zero_p (op2
))
1069 overflow
= div_and_round_double (code
, uns
,
1070 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1071 &tmp
.low
, &tmp
.high
,
1072 &res
.low
, &res
.high
);
1076 res
= double_int_min (op1
, op2
, uns
);
1080 res
= double_int_max (op1
, op2
, uns
);
1087 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1089 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1095 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1097 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1100 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1101 constant. We assume ARG1 and ARG2 have the same data type, or at least
1102 are the same kind of constant and the same machine mode. Return zero if
1103 combining the constants is not allowed in the current operating mode. */
1106 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1108 /* Sanity check for the recursive cases. */
1115 if (TREE_CODE (arg1
) == INTEGER_CST
)
1116 return int_const_binop (code
, arg1
, arg2
);
1118 if (TREE_CODE (arg1
) == REAL_CST
)
1120 enum machine_mode mode
;
1123 REAL_VALUE_TYPE value
;
1124 REAL_VALUE_TYPE result
;
1128 /* The following codes are handled by real_arithmetic. */
1143 d1
= TREE_REAL_CST (arg1
);
1144 d2
= TREE_REAL_CST (arg2
);
1146 type
= TREE_TYPE (arg1
);
1147 mode
= TYPE_MODE (type
);
1149 /* Don't perform operation if we honor signaling NaNs and
1150 either operand is a NaN. */
1151 if (HONOR_SNANS (mode
)
1152 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1155 /* Don't perform operation if it would raise a division
1156 by zero exception. */
1157 if (code
== RDIV_EXPR
1158 && REAL_VALUES_EQUAL (d2
, dconst0
)
1159 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1162 /* If either operand is a NaN, just return it. Otherwise, set up
1163 for floating-point trap; we return an overflow. */
1164 if (REAL_VALUE_ISNAN (d1
))
1166 else if (REAL_VALUE_ISNAN (d2
))
1169 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1170 real_convert (&result
, mode
, &value
);
1172 /* Don't constant fold this floating point operation if
1173 the result has overflowed and flag_trapping_math. */
1174 if (flag_trapping_math
1175 && MODE_HAS_INFINITIES (mode
)
1176 && REAL_VALUE_ISINF (result
)
1177 && !REAL_VALUE_ISINF (d1
)
1178 && !REAL_VALUE_ISINF (d2
))
1181 /* Don't constant fold this floating point operation if the
1182 result may dependent upon the run-time rounding mode and
1183 flag_rounding_math is set, or if GCC's software emulation
1184 is unable to accurately represent the result. */
1185 if ((flag_rounding_math
1186 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1187 && (inexact
|| !real_identical (&result
, &value
)))
1190 t
= build_real (type
, result
);
1192 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1196 if (TREE_CODE (arg1
) == FIXED_CST
)
1198 FIXED_VALUE_TYPE f1
;
1199 FIXED_VALUE_TYPE f2
;
1200 FIXED_VALUE_TYPE result
;
1205 /* The following codes are handled by fixed_arithmetic. */
1211 case TRUNC_DIV_EXPR
:
1212 f2
= TREE_FIXED_CST (arg2
);
1217 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1218 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1226 f1
= TREE_FIXED_CST (arg1
);
1227 type
= TREE_TYPE (arg1
);
1228 sat_p
= TYPE_SATURATING (type
);
1229 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1230 t
= build_fixed (type
, result
);
1231 /* Propagate overflow flags. */
1232 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1233 TREE_OVERFLOW (t
) = 1;
1237 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1239 tree type
= TREE_TYPE (arg1
);
1240 tree r1
= TREE_REALPART (arg1
);
1241 tree i1
= TREE_IMAGPART (arg1
);
1242 tree r2
= TREE_REALPART (arg2
);
1243 tree i2
= TREE_IMAGPART (arg2
);
1250 real
= const_binop (code
, r1
, r2
);
1251 imag
= const_binop (code
, i1
, i2
);
1255 if (COMPLEX_FLOAT_TYPE_P (type
))
1256 return do_mpc_arg2 (arg1
, arg2
, type
,
1257 /* do_nonfinite= */ folding_initializer
,
1260 real
= const_binop (MINUS_EXPR
,
1261 const_binop (MULT_EXPR
, r1
, r2
),
1262 const_binop (MULT_EXPR
, i1
, i2
));
1263 imag
= const_binop (PLUS_EXPR
,
1264 const_binop (MULT_EXPR
, r1
, i2
),
1265 const_binop (MULT_EXPR
, i1
, r2
));
1269 if (COMPLEX_FLOAT_TYPE_P (type
))
1270 return do_mpc_arg2 (arg1
, arg2
, type
,
1271 /* do_nonfinite= */ folding_initializer
,
1274 case TRUNC_DIV_EXPR
:
1276 case FLOOR_DIV_EXPR
:
1277 case ROUND_DIV_EXPR
:
1278 if (flag_complex_method
== 0)
1280 /* Keep this algorithm in sync with
1281 tree-complex.c:expand_complex_div_straight().
1283 Expand complex division to scalars, straightforward algorithm.
1284 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1288 = const_binop (PLUS_EXPR
,
1289 const_binop (MULT_EXPR
, r2
, r2
),
1290 const_binop (MULT_EXPR
, i2
, i2
));
1292 = const_binop (PLUS_EXPR
,
1293 const_binop (MULT_EXPR
, r1
, r2
),
1294 const_binop (MULT_EXPR
, i1
, i2
));
1296 = const_binop (MINUS_EXPR
,
1297 const_binop (MULT_EXPR
, i1
, r2
),
1298 const_binop (MULT_EXPR
, r1
, i2
));
1300 real
= const_binop (code
, t1
, magsquared
);
1301 imag
= const_binop (code
, t2
, magsquared
);
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_wide().
1308 Expand complex division to scalars, modified algorithm to minimize
1309 overflow with wide input ranges. */
1310 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1311 fold_abs_const (r2
, TREE_TYPE (type
)),
1312 fold_abs_const (i2
, TREE_TYPE (type
)));
1314 if (integer_nonzerop (compare
))
1316 /* In the TRUE branch, we compute
1318 div = (br * ratio) + bi;
1319 tr = (ar * ratio) + ai;
1320 ti = (ai * ratio) - ar;
1323 tree ratio
= const_binop (code
, r2
, i2
);
1324 tree div
= const_binop (PLUS_EXPR
, i2
,
1325 const_binop (MULT_EXPR
, r2
, ratio
));
1326 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1327 real
= const_binop (PLUS_EXPR
, real
, i1
);
1328 real
= const_binop (code
, real
, div
);
1330 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1331 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1332 imag
= const_binop (code
, imag
, div
);
1336 /* In the FALSE branch, we compute
1338 divisor = (d * ratio) + c;
1339 tr = (b * ratio) + a;
1340 ti = b - (a * ratio);
1343 tree ratio
= const_binop (code
, i2
, r2
);
1344 tree div
= const_binop (PLUS_EXPR
, r2
,
1345 const_binop (MULT_EXPR
, i2
, ratio
));
1347 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1348 real
= const_binop (PLUS_EXPR
, real
, r1
);
1349 real
= const_binop (code
, real
, div
);
1351 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1352 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1353 imag
= const_binop (code
, imag
, div
);
1363 return build_complex (type
, real
, imag
);
1366 if (TREE_CODE (arg1
) == VECTOR_CST
1367 && TREE_CODE (arg2
) == VECTOR_CST
)
1369 tree type
= TREE_TYPE(arg1
);
1370 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1371 tree
*elts
= XALLOCAVEC (tree
, count
);
1373 for (i
= 0; i
< count
; i
++)
1375 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1376 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1378 elts
[i
] = const_binop (code
, elem1
, elem2
);
1380 /* It is possible that const_binop cannot handle the given
1381 code and return NULL_TREE */
1382 if(elts
[i
] == NULL_TREE
)
1386 return build_vector (type
, elts
);
1391 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1392 indicates which particular sizetype to create. */
1395 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1397 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1400 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1401 is a tree code. The type of the result is taken from the operands.
1402 Both must be equivalent integer types, ala int_binop_types_match_p.
1403 If the operands are constant, so is the result. */
1406 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1408 tree type
= TREE_TYPE (arg0
);
1410 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1411 return error_mark_node
;
1413 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1416 /* Handle the special case of two integer constants faster. */
1417 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1419 /* And some specific cases even faster than that. */
1420 if (code
== PLUS_EXPR
)
1422 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1424 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1427 else if (code
== MINUS_EXPR
)
1429 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1432 else if (code
== MULT_EXPR
)
1434 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1438 /* Handle general case of two integer constants. For sizetype
1439 constant calculations we always want to know about overflow,
1440 even in the unsigned case. */
1441 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1444 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1447 /* Given two values, either both of sizetype or both of bitsizetype,
1448 compute the difference between the two values. Return the value
1449 in signed type corresponding to the type of the operands. */
1452 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1454 tree type
= TREE_TYPE (arg0
);
1457 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1460 /* If the type is already signed, just do the simple thing. */
1461 if (!TYPE_UNSIGNED (type
))
1462 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1464 if (type
== sizetype
)
1466 else if (type
== bitsizetype
)
1467 ctype
= sbitsizetype
;
1469 ctype
= signed_type_for (type
);
1471 /* If either operand is not a constant, do the conversions to the signed
1472 type and subtract. The hardware will do the right thing with any
1473 overflow in the subtraction. */
1474 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1475 return size_binop_loc (loc
, MINUS_EXPR
,
1476 fold_convert_loc (loc
, ctype
, arg0
),
1477 fold_convert_loc (loc
, ctype
, arg1
));
1479 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1480 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1481 overflow) and negate (which can't either). Special-case a result
1482 of zero while we're here. */
1483 if (tree_int_cst_equal (arg0
, arg1
))
1484 return build_int_cst (ctype
, 0);
1485 else if (tree_int_cst_lt (arg1
, arg0
))
1486 return fold_convert_loc (loc
, ctype
,
1487 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1489 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1490 fold_convert_loc (loc
, ctype
,
1491 size_binop_loc (loc
,
1496 /* A subroutine of fold_convert_const handling conversions of an
1497 INTEGER_CST to another integer type. */
1500 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1504 /* Given an integer constant, make new constant with new type,
1505 appropriately sign-extended or truncated. */
1506 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1507 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1508 (TREE_INT_CST_HIGH (arg1
) < 0
1509 && (TYPE_UNSIGNED (type
)
1510 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1511 | TREE_OVERFLOW (arg1
));
1516 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1517 to an integer type. */
1520 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1525 /* The following code implements the floating point to integer
1526 conversion rules required by the Java Language Specification,
1527 that IEEE NaNs are mapped to zero and values that overflow
1528 the target precision saturate, i.e. values greater than
1529 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1530 are mapped to INT_MIN. These semantics are allowed by the
1531 C and C++ standards that simply state that the behavior of
1532 FP-to-integer conversion is unspecified upon overflow. */
1536 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1540 case FIX_TRUNC_EXPR
:
1541 real_trunc (&r
, VOIDmode
, &x
);
1548 /* If R is NaN, return zero and show we have an overflow. */
1549 if (REAL_VALUE_ISNAN (r
))
1552 val
= double_int_zero
;
1555 /* See if R is less than the lower bound or greater than the
1560 tree lt
= TYPE_MIN_VALUE (type
);
1561 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1562 if (REAL_VALUES_LESS (r
, l
))
1565 val
= tree_to_double_int (lt
);
1571 tree ut
= TYPE_MAX_VALUE (type
);
1574 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1575 if (REAL_VALUES_LESS (u
, r
))
1578 val
= tree_to_double_int (ut
);
1584 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1586 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1590 /* A subroutine of fold_convert_const handling conversions of a
1591 FIXED_CST to an integer type. */
1594 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1597 double_int temp
, temp_trunc
;
1600 /* Right shift FIXED_CST to temp by fbit. */
1601 temp
= TREE_FIXED_CST (arg1
).data
;
1602 mode
= TREE_FIXED_CST (arg1
).mode
;
1603 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1605 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1606 HOST_BITS_PER_DOUBLE_INT
,
1607 SIGNED_FIXED_POINT_MODE_P (mode
));
1609 /* Left shift temp to temp_trunc by fbit. */
1610 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1611 HOST_BITS_PER_DOUBLE_INT
,
1612 SIGNED_FIXED_POINT_MODE_P (mode
));
1616 temp
= double_int_zero
;
1617 temp_trunc
= double_int_zero
;
1620 /* If FIXED_CST is negative, we need to round the value toward 0.
1621 By checking if the fractional bits are not zero to add 1 to temp. */
1622 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1623 && double_int_negative_p (temp_trunc
)
1624 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1625 temp
= double_int_add (temp
, double_int_one
);
1627 /* Given a fixed-point constant, make new constant with new type,
1628 appropriately sign-extended or truncated. */
1629 t
= force_fit_type_double (type
, temp
, -1,
1630 (double_int_negative_p (temp
)
1631 && (TYPE_UNSIGNED (type
)
1632 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1633 | TREE_OVERFLOW (arg1
));
1638 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1639 to another floating point type. */
1642 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1644 REAL_VALUE_TYPE value
;
1647 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1648 t
= build_real (type
, value
);
1650 /* If converting an infinity or NAN to a representation that doesn't
1651 have one, set the overflow bit so that we can produce some kind of
1652 error message at the appropriate point if necessary. It's not the
1653 most user-friendly message, but it's better than nothing. */
1654 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1655 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1656 TREE_OVERFLOW (t
) = 1;
1657 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1658 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1659 TREE_OVERFLOW (t
) = 1;
1660 /* Regular overflow, conversion produced an infinity in a mode that
1661 can't represent them. */
1662 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1663 && REAL_VALUE_ISINF (value
)
1664 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1665 TREE_OVERFLOW (t
) = 1;
1667 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1671 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1672 to a floating point type. */
1675 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1677 REAL_VALUE_TYPE value
;
1680 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1681 t
= build_real (type
, value
);
1683 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1687 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1688 to another fixed-point type. */
1691 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1693 FIXED_VALUE_TYPE value
;
1697 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1698 TYPE_SATURATING (type
));
1699 t
= build_fixed (type
, value
);
1701 /* Propagate overflow flags. */
1702 if (overflow_p
| TREE_OVERFLOW (arg1
))
1703 TREE_OVERFLOW (t
) = 1;
1707 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1708 to a fixed-point type. */
1711 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1713 FIXED_VALUE_TYPE value
;
1717 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1718 TREE_INT_CST (arg1
),
1719 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1720 TYPE_SATURATING (type
));
1721 t
= build_fixed (type
, value
);
1723 /* Propagate overflow flags. */
1724 if (overflow_p
| TREE_OVERFLOW (arg1
))
1725 TREE_OVERFLOW (t
) = 1;
1729 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1730 to a fixed-point type. */
1733 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1735 FIXED_VALUE_TYPE value
;
1739 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1740 &TREE_REAL_CST (arg1
),
1741 TYPE_SATURATING (type
));
1742 t
= build_fixed (type
, value
);
1744 /* Propagate overflow flags. */
1745 if (overflow_p
| TREE_OVERFLOW (arg1
))
1746 TREE_OVERFLOW (t
) = 1;
1750 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1751 type TYPE. If no simplification can be done return NULL_TREE. */
1754 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1756 if (TREE_TYPE (arg1
) == type
)
1759 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1760 || TREE_CODE (type
) == OFFSET_TYPE
)
1762 if (TREE_CODE (arg1
) == INTEGER_CST
)
1763 return fold_convert_const_int_from_int (type
, arg1
);
1764 else if (TREE_CODE (arg1
) == REAL_CST
)
1765 return fold_convert_const_int_from_real (code
, type
, arg1
);
1766 else if (TREE_CODE (arg1
) == FIXED_CST
)
1767 return fold_convert_const_int_from_fixed (type
, arg1
);
1769 else if (TREE_CODE (type
) == REAL_TYPE
)
1771 if (TREE_CODE (arg1
) == INTEGER_CST
)
1772 return build_real_from_int_cst (type
, arg1
);
1773 else if (TREE_CODE (arg1
) == REAL_CST
)
1774 return fold_convert_const_real_from_real (type
, arg1
);
1775 else if (TREE_CODE (arg1
) == FIXED_CST
)
1776 return fold_convert_const_real_from_fixed (type
, arg1
);
1778 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1780 if (TREE_CODE (arg1
) == FIXED_CST
)
1781 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1782 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1783 return fold_convert_const_fixed_from_int (type
, arg1
);
1784 else if (TREE_CODE (arg1
) == REAL_CST
)
1785 return fold_convert_const_fixed_from_real (type
, arg1
);
1790 /* Construct a vector of zero elements of vector type TYPE. */
1793 build_zero_vector (tree type
)
1797 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1798 return build_vector_from_val (type
, t
);
1801 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1804 fold_convertible_p (const_tree type
, const_tree arg
)
1806 tree orig
= TREE_TYPE (arg
);
1811 if (TREE_CODE (arg
) == ERROR_MARK
1812 || TREE_CODE (type
) == ERROR_MARK
1813 || TREE_CODE (orig
) == ERROR_MARK
)
1816 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1819 switch (TREE_CODE (type
))
1821 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1822 case POINTER_TYPE
: case REFERENCE_TYPE
:
1824 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1825 || TREE_CODE (orig
) == OFFSET_TYPE
)
1827 return (TREE_CODE (orig
) == VECTOR_TYPE
1828 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1831 case FIXED_POINT_TYPE
:
1835 return TREE_CODE (type
) == TREE_CODE (orig
);
1842 /* Convert expression ARG to type TYPE. Used by the middle-end for
1843 simple conversions in preference to calling the front-end's convert. */
1846 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1848 tree orig
= TREE_TYPE (arg
);
1854 if (TREE_CODE (arg
) == ERROR_MARK
1855 || TREE_CODE (type
) == ERROR_MARK
1856 || TREE_CODE (orig
) == ERROR_MARK
)
1857 return error_mark_node
;
1859 switch (TREE_CODE (type
))
1862 case REFERENCE_TYPE
:
1863 /* Handle conversions between pointers to different address spaces. */
1864 if (POINTER_TYPE_P (orig
)
1865 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1866 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1867 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1870 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1872 if (TREE_CODE (arg
) == INTEGER_CST
)
1874 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1875 if (tem
!= NULL_TREE
)
1878 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1879 || TREE_CODE (orig
) == OFFSET_TYPE
)
1880 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1881 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1882 return fold_convert_loc (loc
, type
,
1883 fold_build1_loc (loc
, REALPART_EXPR
,
1884 TREE_TYPE (orig
), arg
));
1885 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1886 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1887 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1890 if (TREE_CODE (arg
) == INTEGER_CST
)
1892 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1893 if (tem
!= NULL_TREE
)
1896 else if (TREE_CODE (arg
) == REAL_CST
)
1898 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1899 if (tem
!= NULL_TREE
)
1902 else if (TREE_CODE (arg
) == FIXED_CST
)
1904 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1905 if (tem
!= NULL_TREE
)
1909 switch (TREE_CODE (orig
))
1912 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1913 case POINTER_TYPE
: case REFERENCE_TYPE
:
1914 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1917 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1919 case FIXED_POINT_TYPE
:
1920 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1923 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1924 return fold_convert_loc (loc
, type
, tem
);
1930 case FIXED_POINT_TYPE
:
1931 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1932 || TREE_CODE (arg
) == REAL_CST
)
1934 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1935 if (tem
!= NULL_TREE
)
1936 goto fold_convert_exit
;
1939 switch (TREE_CODE (orig
))
1941 case FIXED_POINT_TYPE
:
1946 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1949 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1950 return fold_convert_loc (loc
, type
, tem
);
1957 switch (TREE_CODE (orig
))
1960 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1961 case POINTER_TYPE
: case REFERENCE_TYPE
:
1963 case FIXED_POINT_TYPE
:
1964 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1965 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1966 fold_convert_loc (loc
, TREE_TYPE (type
),
1967 integer_zero_node
));
1972 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1974 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1975 TREE_OPERAND (arg
, 0));
1976 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1977 TREE_OPERAND (arg
, 1));
1978 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1981 arg
= save_expr (arg
);
1982 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1983 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1984 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1985 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1986 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1994 if (integer_zerop (arg
))
1995 return build_zero_vector (type
);
1996 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1997 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1998 || TREE_CODE (orig
) == VECTOR_TYPE
);
1999 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2002 tem
= fold_ignored_result (arg
);
2003 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2006 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2007 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2011 protected_set_expr_location_unshare (tem
, loc
);
2015 /* Return false if expr can be assumed not to be an lvalue, true
2019 maybe_lvalue_p (const_tree x
)
2021 /* We only need to wrap lvalue tree codes. */
2022 switch (TREE_CODE (x
))
2035 case ARRAY_RANGE_REF
:
2041 case PREINCREMENT_EXPR
:
2042 case PREDECREMENT_EXPR
:
2044 case TRY_CATCH_EXPR
:
2045 case WITH_CLEANUP_EXPR
:
2054 /* Assume the worst for front-end tree codes. */
2055 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2063 /* Return an expr equal to X but certainly not valid as an lvalue. */
2066 non_lvalue_loc (location_t loc
, tree x
)
2068 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2073 if (! maybe_lvalue_p (x
))
2075 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2078 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2079 Zero means allow extended lvalues. */
2081 int pedantic_lvalues
;
2083 /* When pedantic, return an expr equal to X but certainly not valid as a
2084 pedantic lvalue. Otherwise, return X. */
2087 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2089 if (pedantic_lvalues
)
2090 return non_lvalue_loc (loc
, x
);
2092 return protected_set_expr_location_unshare (x
, loc
);
2095 /* Given a tree comparison code, return the code that is the logical inverse.
2096 It is generally not safe to do this for floating-point comparisons, except
2097 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2098 ERROR_MARK in this case. */
2101 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2103 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2104 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2114 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2116 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2118 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2120 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2134 return UNORDERED_EXPR
;
2135 case UNORDERED_EXPR
:
2136 return ORDERED_EXPR
;
2142 /* Similar, but return the comparison that results if the operands are
2143 swapped. This is safe for floating-point. */
2146 swap_tree_comparison (enum tree_code code
)
2153 case UNORDERED_EXPR
:
2179 /* Convert a comparison tree code from an enum tree_code representation
2180 into a compcode bit-based encoding. This function is the inverse of
2181 compcode_to_comparison. */
2183 static enum comparison_code
2184 comparison_to_compcode (enum tree_code code
)
2201 return COMPCODE_ORD
;
2202 case UNORDERED_EXPR
:
2203 return COMPCODE_UNORD
;
2205 return COMPCODE_UNLT
;
2207 return COMPCODE_UNEQ
;
2209 return COMPCODE_UNLE
;
2211 return COMPCODE_UNGT
;
2213 return COMPCODE_LTGT
;
2215 return COMPCODE_UNGE
;
2221 /* Convert a compcode bit-based encoding of a comparison operator back
2222 to GCC's enum tree_code representation. This function is the
2223 inverse of comparison_to_compcode. */
2225 static enum tree_code
2226 compcode_to_comparison (enum comparison_code code
)
2243 return ORDERED_EXPR
;
2244 case COMPCODE_UNORD
:
2245 return UNORDERED_EXPR
;
2263 /* Return a tree for the comparison which is the combination of
2264 doing the AND or OR (depending on CODE) of the two operations LCODE
2265 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2266 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2267 if this makes the transformation invalid. */
2270 combine_comparisons (location_t loc
,
2271 enum tree_code code
, enum tree_code lcode
,
2272 enum tree_code rcode
, tree truth_type
,
2273 tree ll_arg
, tree lr_arg
)
2275 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2276 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2277 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2282 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2283 compcode
= lcompcode
& rcompcode
;
2286 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2287 compcode
= lcompcode
| rcompcode
;
2296 /* Eliminate unordered comparisons, as well as LTGT and ORD
2297 which are not used unless the mode has NaNs. */
2298 compcode
&= ~COMPCODE_UNORD
;
2299 if (compcode
== COMPCODE_LTGT
)
2300 compcode
= COMPCODE_NE
;
2301 else if (compcode
== COMPCODE_ORD
)
2302 compcode
= COMPCODE_TRUE
;
2304 else if (flag_trapping_math
)
2306 /* Check that the original operation and the optimized ones will trap
2307 under the same condition. */
2308 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2309 && (lcompcode
!= COMPCODE_EQ
)
2310 && (lcompcode
!= COMPCODE_ORD
);
2311 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2312 && (rcompcode
!= COMPCODE_EQ
)
2313 && (rcompcode
!= COMPCODE_ORD
);
2314 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2315 && (compcode
!= COMPCODE_EQ
)
2316 && (compcode
!= COMPCODE_ORD
);
2318 /* In a short-circuited boolean expression the LHS might be
2319 such that the RHS, if evaluated, will never trap. For
2320 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2321 if neither x nor y is NaN. (This is a mixed blessing: for
2322 example, the expression above will never trap, hence
2323 optimizing it to x < y would be invalid). */
2324 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2325 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2328 /* If the comparison was short-circuited, and only the RHS
2329 trapped, we may now generate a spurious trap. */
2331 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2334 /* If we changed the conditions that cause a trap, we lose. */
2335 if ((ltrap
|| rtrap
) != trap
)
2339 if (compcode
== COMPCODE_TRUE
)
2340 return constant_boolean_node (true, truth_type
);
2341 else if (compcode
== COMPCODE_FALSE
)
2342 return constant_boolean_node (false, truth_type
);
2345 enum tree_code tcode
;
2347 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2348 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2352 /* Return nonzero if two operands (typically of the same tree node)
2353 are necessarily equal. If either argument has side-effects this
2354 function returns zero. FLAGS modifies behavior as follows:
2356 If OEP_ONLY_CONST is set, only return nonzero for constants.
2357 This function tests whether the operands are indistinguishable;
2358 it does not test whether they are equal using C's == operation.
2359 The distinction is important for IEEE floating point, because
2360 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2361 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2363 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2364 even though it may hold multiple values during a function.
2365 This is because a GCC tree node guarantees that nothing else is
2366 executed between the evaluation of its "operands" (which may often
2367 be evaluated in arbitrary order). Hence if the operands themselves
2368 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2369 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2370 unset means assuming isochronic (or instantaneous) tree equivalence.
2371 Unless comparing arbitrary expression trees, such as from different
2372 statements, this flag can usually be left unset.
2374 If OEP_PURE_SAME is set, then pure functions with identical arguments
2375 are considered the same. It is used when the caller has other ways
2376 to ensure that global memory is unchanged in between. */
2379 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2381 /* If either is ERROR_MARK, they aren't equal. */
2382 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2383 || TREE_TYPE (arg0
) == error_mark_node
2384 || TREE_TYPE (arg1
) == error_mark_node
)
2387 /* Similar, if either does not have a type (like a released SSA name),
2388 they aren't equal. */
2389 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2392 /* Check equality of integer constants before bailing out due to
2393 precision differences. */
2394 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2395 return tree_int_cst_equal (arg0
, arg1
);
2397 /* If both types don't have the same signedness, then we can't consider
2398 them equal. We must check this before the STRIP_NOPS calls
2399 because they may change the signedness of the arguments. As pointers
2400 strictly don't have a signedness, require either two pointers or
2401 two non-pointers as well. */
2402 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2403 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2406 /* We cannot consider pointers to different address space equal. */
2407 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2408 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2409 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2412 /* If both types don't have the same precision, then it is not safe
2414 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2420 /* In case both args are comparisons but with different comparison
2421 code, try to swap the comparison operands of one arg to produce
2422 a match and compare that variant. */
2423 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2424 && COMPARISON_CLASS_P (arg0
)
2425 && COMPARISON_CLASS_P (arg1
))
2427 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2429 if (TREE_CODE (arg0
) == swap_code
)
2430 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2431 TREE_OPERAND (arg1
, 1), flags
)
2432 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2433 TREE_OPERAND (arg1
, 0), flags
);
2436 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2437 /* This is needed for conversions and for COMPONENT_REF.
2438 Might as well play it safe and always test this. */
2439 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2440 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2441 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2444 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2445 We don't care about side effects in that case because the SAVE_EXPR
2446 takes care of that for us. In all other cases, two expressions are
2447 equal if they have no side effects. If we have two identical
2448 expressions with side effects that should be treated the same due
2449 to the only side effects being identical SAVE_EXPR's, that will
2450 be detected in the recursive calls below.
2451 If we are taking an invariant address of two identical objects
2452 they are necessarily equal as well. */
2453 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2454 && (TREE_CODE (arg0
) == SAVE_EXPR
2455 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2456 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2459 /* Next handle constant cases, those for which we can return 1 even
2460 if ONLY_CONST is set. */
2461 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2462 switch (TREE_CODE (arg0
))
2465 return tree_int_cst_equal (arg0
, arg1
);
2468 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2469 TREE_FIXED_CST (arg1
));
2472 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2473 TREE_REAL_CST (arg1
)))
2477 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2479 /* If we do not distinguish between signed and unsigned zero,
2480 consider them equal. */
2481 if (real_zerop (arg0
) && real_zerop (arg1
))
2490 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2493 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2495 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2496 VECTOR_CST_ELT (arg1
, i
), flags
))
2503 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2505 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2509 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2510 && ! memcmp (TREE_STRING_POINTER (arg0
),
2511 TREE_STRING_POINTER (arg1
),
2512 TREE_STRING_LENGTH (arg0
)));
2515 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2516 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2517 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2522 if (flags
& OEP_ONLY_CONST
)
2525 /* Define macros to test an operand from arg0 and arg1 for equality and a
2526 variant that allows null and views null as being different from any
2527 non-null value. In the latter case, if either is null, the both
2528 must be; otherwise, do the normal comparison. */
2529 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2530 TREE_OPERAND (arg1, N), flags)
2532 #define OP_SAME_WITH_NULL(N) \
2533 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2534 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2536 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2539 /* Two conversions are equal only if signedness and modes match. */
2540 switch (TREE_CODE (arg0
))
2543 case FIX_TRUNC_EXPR
:
2544 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2545 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2555 case tcc_comparison
:
2557 if (OP_SAME (0) && OP_SAME (1))
2560 /* For commutative ops, allow the other order. */
2561 return (commutative_tree_code (TREE_CODE (arg0
))
2562 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2563 TREE_OPERAND (arg1
, 1), flags
)
2564 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2565 TREE_OPERAND (arg1
, 0), flags
));
2568 /* If either of the pointer (or reference) expressions we are
2569 dereferencing contain a side effect, these cannot be equal. */
2570 if (TREE_SIDE_EFFECTS (arg0
)
2571 || TREE_SIDE_EFFECTS (arg1
))
2574 switch (TREE_CODE (arg0
))
2581 case TARGET_MEM_REF
:
2582 /* Require equal extra operands and then fall through to MEM_REF
2583 handling of the two common operands. */
2584 if (!OP_SAME_WITH_NULL (2)
2585 || !OP_SAME_WITH_NULL (3)
2586 || !OP_SAME_WITH_NULL (4))
2590 /* Require equal access sizes, and similar pointer types.
2591 We can have incomplete types for array references of
2592 variable-sized arrays from the Fortran frontent
2594 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2595 || (TYPE_SIZE (TREE_TYPE (arg0
))
2596 && TYPE_SIZE (TREE_TYPE (arg1
))
2597 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2598 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2600 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2601 && OP_SAME (0) && OP_SAME (1));
2604 case ARRAY_RANGE_REF
:
2605 /* Operands 2 and 3 may be null.
2606 Compare the array index by value if it is constant first as we
2607 may have different types but same value here. */
2609 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2610 TREE_OPERAND (arg1
, 1))
2612 && OP_SAME_WITH_NULL (2)
2613 && OP_SAME_WITH_NULL (3));
2616 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2617 may be NULL when we're called to compare MEM_EXPRs. */
2618 return OP_SAME_WITH_NULL (0)
2620 && OP_SAME_WITH_NULL (2);
2623 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2629 case tcc_expression
:
2630 switch (TREE_CODE (arg0
))
2633 case TRUTH_NOT_EXPR
:
2636 case TRUTH_ANDIF_EXPR
:
2637 case TRUTH_ORIF_EXPR
:
2638 return OP_SAME (0) && OP_SAME (1);
2641 case WIDEN_MULT_PLUS_EXPR
:
2642 case WIDEN_MULT_MINUS_EXPR
:
2645 /* The multiplcation operands are commutative. */
2648 case TRUTH_AND_EXPR
:
2650 case TRUTH_XOR_EXPR
:
2651 if (OP_SAME (0) && OP_SAME (1))
2654 /* Otherwise take into account this is a commutative operation. */
2655 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2656 TREE_OPERAND (arg1
, 1), flags
)
2657 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2658 TREE_OPERAND (arg1
, 0), flags
));
2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2670 switch (TREE_CODE (arg0
))
2673 /* If the CALL_EXPRs call different functions, then they
2674 clearly can not be equal. */
2675 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2680 unsigned int cef
= call_expr_flags (arg0
);
2681 if (flags
& OEP_PURE_SAME
)
2682 cef
&= ECF_CONST
| ECF_PURE
;
2689 /* Now see if all the arguments are the same. */
2691 const_call_expr_arg_iterator iter0
, iter1
;
2693 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2694 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2696 a0
= next_const_call_expr_arg (&iter0
),
2697 a1
= next_const_call_expr_arg (&iter1
))
2698 if (! operand_equal_p (a0
, a1
, flags
))
2701 /* If we get here and both argument lists are exhausted
2702 then the CALL_EXPRs are equal. */
2703 return ! (a0
|| a1
);
2709 case tcc_declaration
:
2710 /* Consider __builtin_sqrt equal to sqrt. */
2711 return (TREE_CODE (arg0
) == FUNCTION_DECL
2712 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2713 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2714 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2721 #undef OP_SAME_WITH_NULL
2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2725 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727 When in doubt, return 0. */
2730 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2732 int unsignedp1
, unsignedpo
;
2733 tree primarg0
, primarg1
, primother
;
2734 unsigned int correct_width
;
2736 if (operand_equal_p (arg0
, arg1
, 0))
2739 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2740 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2743 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2744 and see if the inner values are the same. This removes any
2745 signedness comparison, which doesn't matter here. */
2746 primarg0
= arg0
, primarg1
= arg1
;
2747 STRIP_NOPS (primarg0
);
2748 STRIP_NOPS (primarg1
);
2749 if (operand_equal_p (primarg0
, primarg1
, 0))
2752 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2753 actual comparison operand, ARG0.
2755 First throw away any conversions to wider types
2756 already present in the operands. */
2758 primarg1
= get_narrower (arg1
, &unsignedp1
);
2759 primother
= get_narrower (other
, &unsignedpo
);
2761 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2762 if (unsignedp1
== unsignedpo
2763 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2764 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2766 tree type
= TREE_TYPE (arg0
);
2768 /* Make sure shorter operand is extended the right way
2769 to match the longer operand. */
2770 primarg1
= fold_convert (signed_or_unsigned_type_for
2771 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2773 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2780 /* See if ARG is an expression that is either a comparison or is performing
2781 arithmetic on comparisons. The comparisons must only be comparing
2782 two different values, which will be stored in *CVAL1 and *CVAL2; if
2783 they are nonzero it means that some operands have already been found.
2784 No variables may be used anywhere else in the expression except in the
2785 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2786 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788 If this is true, return 1. Otherwise, return zero. */
2791 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2793 enum tree_code code
= TREE_CODE (arg
);
2794 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2796 /* We can handle some of the tcc_expression cases here. */
2797 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2799 else if (tclass
== tcc_expression
2800 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2801 || code
== COMPOUND_EXPR
))
2802 tclass
= tcc_binary
;
2804 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2807 /* If we've already found a CVAL1 or CVAL2, this expression is
2808 two complex to handle. */
2809 if (*cval1
|| *cval2
)
2819 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2822 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2823 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2824 cval1
, cval2
, save_p
));
2829 case tcc_expression
:
2830 if (code
== COND_EXPR
)
2831 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2832 cval1
, cval2
, save_p
)
2833 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2834 cval1
, cval2
, save_p
)
2835 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2836 cval1
, cval2
, save_p
));
2839 case tcc_comparison
:
2840 /* First see if we can handle the first operand, then the second. For
2841 the second operand, we know *CVAL1 can't be zero. It must be that
2842 one side of the comparison is each of the values; test for the
2843 case where this isn't true by failing if the two operands
2846 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2847 TREE_OPERAND (arg
, 1), 0))
2851 *cval1
= TREE_OPERAND (arg
, 0);
2852 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2854 else if (*cval2
== 0)
2855 *cval2
= TREE_OPERAND (arg
, 0);
2856 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2861 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2863 else if (*cval2
== 0)
2864 *cval2
= TREE_OPERAND (arg
, 1);
2865 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2877 /* ARG is a tree that is known to contain just arithmetic operations and
2878 comparisons. Evaluate the operations in the tree substituting NEW0 for
2879 any occurrence of OLD0 as an operand of a comparison and likewise for
2883 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2884 tree old1
, tree new1
)
2886 tree type
= TREE_TYPE (arg
);
2887 enum tree_code code
= TREE_CODE (arg
);
2888 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2893 else if (tclass
== tcc_expression
2894 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2895 tclass
= tcc_binary
;
2900 return fold_build1_loc (loc
, code
, type
,
2901 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2902 old0
, new0
, old1
, new1
));
2905 return fold_build2_loc (loc
, code
, type
,
2906 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2907 old0
, new0
, old1
, new1
),
2908 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2909 old0
, new0
, old1
, new1
));
2911 case tcc_expression
:
2915 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2919 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2923 return fold_build3_loc (loc
, code
, type
,
2924 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2925 old0
, new0
, old1
, new1
),
2926 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2927 old0
, new0
, old1
, new1
),
2928 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2929 old0
, new0
, old1
, new1
));
2933 /* Fall through - ??? */
2935 case tcc_comparison
:
2937 tree arg0
= TREE_OPERAND (arg
, 0);
2938 tree arg1
= TREE_OPERAND (arg
, 1);
2940 /* We need to check both for exact equality and tree equality. The
2941 former will be true if the operand has a side-effect. In that
2942 case, we know the operand occurred exactly once. */
2944 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2946 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2949 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2951 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2954 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2962 /* Return a tree for the case when the result of an expression is RESULT
2963 converted to TYPE and OMITTED was previously an operand of the expression
2964 but is now not needed (e.g., we folded OMITTED * 0).
2966 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2967 the conversion of RESULT to TYPE. */
2970 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2972 tree t
= fold_convert_loc (loc
, type
, result
);
2974 /* If the resulting operand is an empty statement, just return the omitted
2975 statement casted to void. */
2976 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2977 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2978 fold_ignored_result (omitted
));
2980 if (TREE_SIDE_EFFECTS (omitted
))
2981 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2982 fold_ignored_result (omitted
), t
);
2984 return non_lvalue_loc (loc
, t
);
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2990 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2993 tree t
= fold_convert_loc (loc
, type
, result
);
2995 /* If the resulting operand is an empty statement, just return the omitted
2996 statement casted to void. */
2997 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2998 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2999 fold_ignored_result (omitted
));
3001 if (TREE_SIDE_EFFECTS (omitted
))
3002 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3003 fold_ignored_result (omitted
), t
);
3005 return pedantic_non_lvalue_loc (loc
, t
);
3008 /* Return a tree for the case when the result of an expression is RESULT
3009 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3010 of the expression but are now not needed.
3012 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3013 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3014 evaluated before OMITTED2. Otherwise, if neither has side effects,
3015 just do the conversion of RESULT to TYPE. */
3018 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3019 tree omitted1
, tree omitted2
)
3021 tree t
= fold_convert_loc (loc
, type
, result
);
3023 if (TREE_SIDE_EFFECTS (omitted2
))
3024 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3025 if (TREE_SIDE_EFFECTS (omitted1
))
3026 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3028 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3032 /* Return a simplified tree node for the truth-negation of ARG. This
3033 never alters ARG itself. We assume that ARG is an operation that
3034 returns a truth value (0 or 1).
3036 FIXME: one would think we would fold the result, but it causes
3037 problems with the dominator optimizer. */
3040 fold_truth_not_expr (location_t loc
, tree arg
)
3042 tree type
= TREE_TYPE (arg
);
3043 enum tree_code code
= TREE_CODE (arg
);
3044 location_t loc1
, loc2
;
3046 /* If this is a comparison, we can simply invert it, except for
3047 floating-point non-equality comparisons, in which case we just
3048 enclose a TRUTH_NOT_EXPR around what we have. */
3050 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3052 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3053 if (FLOAT_TYPE_P (op_type
)
3054 && flag_trapping_math
3055 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3056 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3059 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3060 if (code
== ERROR_MARK
)
3063 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3064 TREE_OPERAND (arg
, 1));
3070 return constant_boolean_node (integer_zerop (arg
), type
);
3072 case TRUTH_AND_EXPR
:
3073 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3074 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3075 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3076 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3077 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3080 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3081 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3082 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3083 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3084 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3086 case TRUTH_XOR_EXPR
:
3087 /* Here we can invert either operand. We invert the first operand
3088 unless the second operand is a TRUTH_NOT_EXPR in which case our
3089 result is the XOR of the first operand with the inside of the
3090 negation of the second operand. */
3092 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3093 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3094 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3096 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3097 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3098 TREE_OPERAND (arg
, 1));
3100 case TRUTH_ANDIF_EXPR
:
3101 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3102 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3103 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3104 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3105 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3107 case TRUTH_ORIF_EXPR
:
3108 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3109 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3110 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3111 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3112 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3114 case TRUTH_NOT_EXPR
:
3115 return TREE_OPERAND (arg
, 0);
3119 tree arg1
= TREE_OPERAND (arg
, 1);
3120 tree arg2
= TREE_OPERAND (arg
, 2);
3122 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3123 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3125 /* A COND_EXPR may have a throw as one operand, which
3126 then has void type. Just leave void operands
3128 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3129 VOID_TYPE_P (TREE_TYPE (arg1
))
3130 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3131 VOID_TYPE_P (TREE_TYPE (arg2
))
3132 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3136 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3137 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3138 TREE_OPERAND (arg
, 0),
3139 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3141 case NON_LVALUE_EXPR
:
3142 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3143 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3146 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3147 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3149 /* ... fall through ... */
3152 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3153 return build1_loc (loc
, TREE_CODE (arg
), type
,
3154 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3157 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3159 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3162 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3164 case CLEANUP_POINT_EXPR
:
3165 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3166 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3167 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3174 /* Return a simplified tree node for the truth-negation of ARG. This
3175 never alters ARG itself. We assume that ARG is an operation that
3176 returns a truth value (0 or 1).
3178 FIXME: one would think we would fold the result, but it causes
3179 problems with the dominator optimizer. */
3182 invert_truthvalue_loc (location_t loc
, tree arg
)
3186 if (TREE_CODE (arg
) == ERROR_MARK
)
3189 tem
= fold_truth_not_expr (loc
, arg
);
3191 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3197 operands are another bit-wise operation with a common input. If so,
3198 distribute the bit operations to save an operation and possibly two if
3199 constants are involved. For example, convert
3200 (A | B) & (A | C) into A | (B & C)
3201 Further simplification will occur if B and C are constants.
3203 If this optimization cannot be done, 0 will be returned. */
3206 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3207 tree arg0
, tree arg1
)
3212 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3213 || TREE_CODE (arg0
) == code
3214 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3215 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3218 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3220 common
= TREE_OPERAND (arg0
, 0);
3221 left
= TREE_OPERAND (arg0
, 1);
3222 right
= TREE_OPERAND (arg1
, 1);
3224 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3226 common
= TREE_OPERAND (arg0
, 0);
3227 left
= TREE_OPERAND (arg0
, 1);
3228 right
= TREE_OPERAND (arg1
, 0);
3230 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3232 common
= TREE_OPERAND (arg0
, 1);
3233 left
= TREE_OPERAND (arg0
, 0);
3234 right
= TREE_OPERAND (arg1
, 1);
3236 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3238 common
= TREE_OPERAND (arg0
, 1);
3239 left
= TREE_OPERAND (arg0
, 0);
3240 right
= TREE_OPERAND (arg1
, 0);
3245 common
= fold_convert_loc (loc
, type
, common
);
3246 left
= fold_convert_loc (loc
, type
, left
);
3247 right
= fold_convert_loc (loc
, type
, right
);
3248 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3249 fold_build2_loc (loc
, code
, type
, left
, right
));
3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3253 with code CODE. This optimization is unsafe. */
3255 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3256 tree arg0
, tree arg1
)
3258 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3259 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3261 /* (A / C) +- (B / C) -> (A +- B) / C. */
3263 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3264 TREE_OPERAND (arg1
, 1), 0))
3265 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3266 fold_build2_loc (loc
, code
, type
,
3267 TREE_OPERAND (arg0
, 0),
3268 TREE_OPERAND (arg1
, 0)),
3269 TREE_OPERAND (arg0
, 1));
3271 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3272 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3273 TREE_OPERAND (arg1
, 0), 0)
3274 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3275 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3277 REAL_VALUE_TYPE r0
, r1
;
3278 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3279 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3281 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3283 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3284 real_arithmetic (&r0
, code
, &r0
, &r1
);
3285 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3286 TREE_OPERAND (arg0
, 0),
3287 build_real (type
, r0
));
3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3294 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3297 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3298 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3300 tree result
, bftype
;
3304 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3305 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3306 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3307 && host_integerp (size
, 0)
3308 && tree_low_cst (size
, 0) == bitsize
)
3309 return fold_convert_loc (loc
, type
, inner
);
3313 if (TYPE_PRECISION (bftype
) != bitsize
3314 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3315 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3317 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3318 size_int (bitsize
), bitsize_int (bitpos
));
3321 result
= fold_convert_loc (loc
, type
, result
);
3326 /* Optimize a bit-field compare.
3328 There are two cases: First is a compare against a constant and the
3329 second is a comparison of two items where the fields are at the same
3330 bit position relative to the start of a chunk (byte, halfword, word)
3331 large enough to contain it. In these cases we can avoid the shift
3332 implicit in bitfield extractions.
3334 For constants, we emit a compare of the shifted constant with the
3335 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3336 compared. For two fields at the same position, we do the ANDs with the
3337 similar mask and compare the result of the ANDs.
3339 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3340 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3341 are the left and right operands of the comparison, respectively.
3343 If the optimization described above can be done, we return the resulting
3344 tree. Otherwise we return zero. */
3347 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3348 tree compare_type
, tree lhs
, tree rhs
)
3350 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3351 tree type
= TREE_TYPE (lhs
);
3352 tree signed_type
, unsigned_type
;
3353 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3354 enum machine_mode lmode
, rmode
, nmode
;
3355 int lunsignedp
, runsignedp
;
3356 int lvolatilep
= 0, rvolatilep
= 0;
3357 tree linner
, rinner
= NULL_TREE
;
3361 /* In the strict volatile bitfields case, doing code changes here may prevent
3362 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3363 if (flag_strict_volatile_bitfields
> 0)
3366 /* Get all the information about the extractions being done. If the bit size
3367 if the same as the size of the underlying object, we aren't doing an
3368 extraction at all and so can do nothing. We also don't want to
3369 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3370 then will no longer be able to replace it. */
3371 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3372 &lunsignedp
, &lvolatilep
, false);
3373 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3374 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3379 /* If this is not a constant, we can only do something if bit positions,
3380 sizes, and signedness are the same. */
3381 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3382 &runsignedp
, &rvolatilep
, false);
3384 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3385 || lunsignedp
!= runsignedp
|| offset
!= 0
3386 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3390 /* See if we can find a mode to refer to this field. We should be able to,
3391 but fail if we can't. */
3393 && GET_MODE_BITSIZE (lmode
) > 0
3394 && flag_strict_volatile_bitfields
> 0)
3397 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3398 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3399 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3400 TYPE_ALIGN (TREE_TYPE (rinner
))),
3401 word_mode
, lvolatilep
|| rvolatilep
);
3402 if (nmode
== VOIDmode
)
3405 /* Set signed and unsigned types of the precision of this mode for the
3407 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3408 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3410 /* Compute the bit position and size for the new reference and our offset
3411 within it. If the new reference is the same size as the original, we
3412 won't optimize anything, so return zero. */
3413 nbitsize
= GET_MODE_BITSIZE (nmode
);
3414 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3416 if (nbitsize
== lbitsize
)
3419 if (BYTES_BIG_ENDIAN
)
3420 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3422 /* Make the mask to be used against the extracted field. */
3423 mask
= build_int_cst_type (unsigned_type
, -1);
3424 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3425 mask
= const_binop (RSHIFT_EXPR
, mask
,
3426 size_int (nbitsize
- lbitsize
- lbitpos
));
3429 /* If not comparing with constant, just rework the comparison
3431 return fold_build2_loc (loc
, code
, compare_type
,
3432 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3433 make_bit_field_ref (loc
, linner
,
3438 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3439 make_bit_field_ref (loc
, rinner
,
3445 /* Otherwise, we are handling the constant case. See if the constant is too
3446 big for the field. Warn and return a tree of for 0 (false) if so. We do
3447 this not only for its own sake, but to avoid having to test for this
3448 error case below. If we didn't, we might generate wrong code.
3450 For unsigned fields, the constant shifted right by the field length should
3451 be all zero. For signed fields, the high-order bits should agree with
3456 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3457 fold_convert_loc (loc
,
3458 unsigned_type
, rhs
),
3459 size_int (lbitsize
))))
3461 warning (0, "comparison is always %d due to width of bit-field",
3463 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3468 tree tem
= const_binop (RSHIFT_EXPR
,
3469 fold_convert_loc (loc
, signed_type
, rhs
),
3470 size_int (lbitsize
- 1));
3471 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3473 warning (0, "comparison is always %d due to width of bit-field",
3475 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3479 /* Single-bit compares should always be against zero. */
3480 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3482 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3483 rhs
= build_int_cst (type
, 0);
3486 /* Make a new bitfield reference, shift the constant over the
3487 appropriate number of bits and mask it with the computed mask
3488 (in case this was a signed field). If we changed it, make a new one. */
3489 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3492 TREE_SIDE_EFFECTS (lhs
) = 1;
3493 TREE_THIS_VOLATILE (lhs
) = 1;
3496 rhs
= const_binop (BIT_AND_EXPR
,
3497 const_binop (LSHIFT_EXPR
,
3498 fold_convert_loc (loc
, unsigned_type
, rhs
),
3499 size_int (lbitpos
)),
3502 lhs
= build2_loc (loc
, code
, compare_type
,
3503 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3507 /* Subroutine for fold_truth_andor_1: decode a field reference.
3509 If EXP is a comparison reference, we return the innermost reference.
3511 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3512 set to the starting bit number.
3514 If the innermost field can be completely contained in a mode-sized
3515 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3517 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3518 otherwise it is not changed.
3520 *PUNSIGNEDP is set to the signedness of the field.
3522 *PMASK is set to the mask used. This is either contained in a
3523 BIT_AND_EXPR or derived from the width of the field.
3525 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3527 Return 0 if this is not a component reference or is one that we can't
3528 do anything with. */
3531 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3532 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3533 int *punsignedp
, int *pvolatilep
,
3534 tree
*pmask
, tree
*pand_mask
)
3536 tree outer_type
= 0;
3538 tree mask
, inner
, offset
;
3540 unsigned int precision
;
3542 /* All the optimizations using this function assume integer fields.
3543 There are problems with FP fields since the type_for_size call
3544 below can fail for, e.g., XFmode. */
3545 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3548 /* We are interested in the bare arrangement of bits, so strip everything
3549 that doesn't affect the machine mode. However, record the type of the
3550 outermost expression if it may matter below. */
3551 if (CONVERT_EXPR_P (exp
)
3552 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3553 outer_type
= TREE_TYPE (exp
);
3556 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3558 and_mask
= TREE_OPERAND (exp
, 1);
3559 exp
= TREE_OPERAND (exp
, 0);
3560 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3561 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3565 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3566 punsignedp
, pvolatilep
, false);
3567 if ((inner
== exp
&& and_mask
== 0)
3568 || *pbitsize
< 0 || offset
!= 0
3569 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3572 /* If the number of bits in the reference is the same as the bitsize of
3573 the outer type, then the outer type gives the signedness. Otherwise
3574 (in case of a small bitfield) the signedness is unchanged. */
3575 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3576 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3578 /* Compute the mask to access the bitfield. */
3579 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3580 precision
= TYPE_PRECISION (unsigned_type
);
3582 mask
= build_int_cst_type (unsigned_type
, -1);
3584 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3585 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3587 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3589 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3590 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3593 *pand_mask
= and_mask
;
3597 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3601 all_ones_mask_p (const_tree mask
, int size
)
3603 tree type
= TREE_TYPE (mask
);
3604 unsigned int precision
= TYPE_PRECISION (type
);
3607 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3610 tree_int_cst_equal (mask
,
3611 const_binop (RSHIFT_EXPR
,
3612 const_binop (LSHIFT_EXPR
, tmask
,
3613 size_int (precision
- size
)),
3614 size_int (precision
- size
)));
3617 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3618 represents the sign bit of EXP's type. If EXP represents a sign
3619 or zero extension, also test VAL against the unextended type.
3620 The return value is the (sub)expression whose sign bit is VAL,
3621 or NULL_TREE otherwise. */
3624 sign_bit_p (tree exp
, const_tree val
)
3626 unsigned HOST_WIDE_INT mask_lo
, lo
;
3627 HOST_WIDE_INT mask_hi
, hi
;
3631 /* Tree EXP must have an integral type. */
3632 t
= TREE_TYPE (exp
);
3633 if (! INTEGRAL_TYPE_P (t
))
3636 /* Tree VAL must be an integer constant. */
3637 if (TREE_CODE (val
) != INTEGER_CST
3638 || TREE_OVERFLOW (val
))
3641 width
= TYPE_PRECISION (t
);
3642 if (width
> HOST_BITS_PER_WIDE_INT
)
3644 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3647 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3648 >> (HOST_BITS_PER_DOUBLE_INT
- width
));
3654 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3657 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3658 >> (HOST_BITS_PER_WIDE_INT
- width
));
3661 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3662 treat VAL as if it were unsigned. */
3663 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3664 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3667 /* Handle extension from a narrower type. */
3668 if (TREE_CODE (exp
) == NOP_EXPR
3669 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3670 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3675 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3676 to be evaluated unconditionally. */
3679 simple_operand_p (const_tree exp
)
3681 /* Strip any conversions that don't change the machine mode. */
3684 return (CONSTANT_CLASS_P (exp
)
3685 || TREE_CODE (exp
) == SSA_NAME
3687 && ! TREE_ADDRESSABLE (exp
)
3688 && ! TREE_THIS_VOLATILE (exp
)
3689 && ! DECL_NONLOCAL (exp
)
3690 /* Don't regard global variables as simple. They may be
3691 allocated in ways unknown to the compiler (shared memory,
3692 #pragma weak, etc). */
3693 && ! TREE_PUBLIC (exp
)
3694 && ! DECL_EXTERNAL (exp
)
3695 /* Loading a static variable is unduly expensive, but global
3696 registers aren't expensive. */
3697 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3700 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3701 to be evaluated unconditionally.
3702 I addition to simple_operand_p, we assume that comparisons, conversions,
3703 and logic-not operations are simple, if their operands are simple, too. */
3706 simple_operand_p_2 (tree exp
)
3708 enum tree_code code
;
3710 if (TREE_SIDE_EFFECTS (exp
)
3711 || tree_could_trap_p (exp
))
3714 while (CONVERT_EXPR_P (exp
))
3715 exp
= TREE_OPERAND (exp
, 0);
3717 code
= TREE_CODE (exp
);
3719 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3720 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3721 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3723 if (code
== TRUTH_NOT_EXPR
)
3724 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3726 return simple_operand_p (exp
);
3730 /* The following functions are subroutines to fold_range_test and allow it to
3731 try to change a logical combination of comparisons into a range test.
3734 X == 2 || X == 3 || X == 4 || X == 5
3738 (unsigned) (X - 2) <= 3
3740 We describe each set of comparisons as being either inside or outside
3741 a range, using a variable named like IN_P, and then describe the
3742 range with a lower and upper bound. If one of the bounds is omitted,
3743 it represents either the highest or lowest value of the type.
3745 In the comments below, we represent a range by two numbers in brackets
3746 preceded by a "+" to designate being inside that range, or a "-" to
3747 designate being outside that range, so the condition can be inverted by
3748 flipping the prefix. An omitted bound is represented by a "-". For
3749 example, "- [-, 10]" means being outside the range starting at the lowest
3750 possible value and ending at 10, in other words, being greater than 10.
3751 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3754 We set up things so that the missing bounds are handled in a consistent
3755 manner so neither a missing bound nor "true" and "false" need to be
3756 handled using a special case. */
3758 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3759 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3760 and UPPER1_P are nonzero if the respective argument is an upper bound
3761 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3762 must be specified for a comparison. ARG1 will be converted to ARG0's
3763 type if both are specified. */
3766 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3767 tree arg1
, int upper1_p
)
3773 /* If neither arg represents infinity, do the normal operation.
3774 Else, if not a comparison, return infinity. Else handle the special
3775 comparison rules. Note that most of the cases below won't occur, but
3776 are handled for consistency. */
3778 if (arg0
!= 0 && arg1
!= 0)
3780 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3781 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3783 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3786 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3789 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3790 for neither. In real maths, we cannot assume open ended ranges are
3791 the same. But, this is computer arithmetic, where numbers are finite.
3792 We can therefore make the transformation of any unbounded range with
3793 the value Z, Z being greater than any representable number. This permits
3794 us to treat unbounded ranges as equal. */
3795 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3796 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3800 result
= sgn0
== sgn1
;
3803 result
= sgn0
!= sgn1
;
3806 result
= sgn0
< sgn1
;
3809 result
= sgn0
<= sgn1
;
3812 result
= sgn0
> sgn1
;
3815 result
= sgn0
>= sgn1
;
3821 return constant_boolean_node (result
, type
);
3824 /* Helper routine for make_range. Perform one step for it, return
3825 new expression if the loop should continue or NULL_TREE if it should
3829 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3830 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3831 bool *strict_overflow_p
)
3833 tree arg0_type
= TREE_TYPE (arg0
);
3834 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3835 int in_p
= *p_in_p
, n_in_p
;
3839 case TRUTH_NOT_EXPR
:
3843 case EQ_EXPR
: case NE_EXPR
:
3844 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3845 /* We can only do something if the range is testing for zero
3846 and if the second operand is an integer constant. Note that
3847 saying something is "in" the range we make is done by
3848 complementing IN_P since it will set in the initial case of
3849 being not equal to zero; "out" is leaving it alone. */
3850 if (low
== NULL_TREE
|| high
== NULL_TREE
3851 || ! integer_zerop (low
) || ! integer_zerop (high
)
3852 || TREE_CODE (arg1
) != INTEGER_CST
)
3857 case NE_EXPR
: /* - [c, c] */
3860 case EQ_EXPR
: /* + [c, c] */
3861 in_p
= ! in_p
, low
= high
= arg1
;
3863 case GT_EXPR
: /* - [-, c] */
3864 low
= 0, high
= arg1
;
3866 case GE_EXPR
: /* + [c, -] */
3867 in_p
= ! in_p
, low
= arg1
, high
= 0;
3869 case LT_EXPR
: /* - [c, -] */
3870 low
= arg1
, high
= 0;
3872 case LE_EXPR
: /* + [-, c] */
3873 in_p
= ! in_p
, low
= 0, high
= arg1
;
3879 /* If this is an unsigned comparison, we also know that EXP is
3880 greater than or equal to zero. We base the range tests we make
3881 on that fact, so we record it here so we can parse existing
3882 range tests. We test arg0_type since often the return type
3883 of, e.g. EQ_EXPR, is boolean. */
3884 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3886 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3888 build_int_cst (arg0_type
, 0),
3892 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3894 /* If the high bound is missing, but we have a nonzero low
3895 bound, reverse the range so it goes from zero to the low bound
3897 if (high
== 0 && low
&& ! integer_zerop (low
))
3900 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3901 integer_one_node
, 0);
3902 low
= build_int_cst (arg0_type
, 0);
3912 /* (-x) IN [a,b] -> x in [-b, -a] */
3913 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3914 build_int_cst (exp_type
, 0),
3916 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3917 build_int_cst (exp_type
, 0),
3919 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3925 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3926 build_int_cst (exp_type
, 1));
3930 if (TREE_CODE (arg1
) != INTEGER_CST
)
3933 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3934 move a constant to the other side. */
3935 if (!TYPE_UNSIGNED (arg0_type
)
3936 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3939 /* If EXP is signed, any overflow in the computation is undefined,
3940 so we don't worry about it so long as our computations on
3941 the bounds don't overflow. For unsigned, overflow is defined
3942 and this is exactly the right thing. */
3943 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3944 arg0_type
, low
, 0, arg1
, 0);
3945 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3946 arg0_type
, high
, 1, arg1
, 0);
3947 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3948 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3951 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3952 *strict_overflow_p
= true;
3955 /* Check for an unsigned range which has wrapped around the maximum
3956 value thus making n_high < n_low, and normalize it. */
3957 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3959 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3960 integer_one_node
, 0);
3961 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3962 integer_one_node
, 0);
3964 /* If the range is of the form +/- [ x+1, x ], we won't
3965 be able to normalize it. But then, it represents the
3966 whole range or the empty set, so make it
3968 if (tree_int_cst_equal (n_low
, low
)
3969 && tree_int_cst_equal (n_high
, high
))
3975 low
= n_low
, high
= n_high
;
3983 case NON_LVALUE_EXPR
:
3984 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3987 if (! INTEGRAL_TYPE_P (arg0_type
)
3988 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3989 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3992 n_low
= low
, n_high
= high
;
3995 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3998 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4000 /* If we're converting arg0 from an unsigned type, to exp,
4001 a signed type, we will be doing the comparison as unsigned.
4002 The tests above have already verified that LOW and HIGH
4005 So we have to ensure that we will handle large unsigned
4006 values the same way that the current signed bounds treat
4009 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4013 /* For fixed-point modes, we need to pass the saturating flag
4014 as the 2nd parameter. */
4015 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4017 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4018 TYPE_SATURATING (arg0_type
));
4021 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4023 /* A range without an upper bound is, naturally, unbounded.
4024 Since convert would have cropped a very large value, use
4025 the max value for the destination type. */
4027 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4028 : TYPE_MAX_VALUE (arg0_type
);
4030 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4031 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4032 fold_convert_loc (loc
, arg0_type
,
4034 build_int_cst (arg0_type
, 1));
4036 /* If the low bound is specified, "and" the range with the
4037 range for which the original unsigned value will be
4041 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4042 1, fold_convert_loc (loc
, arg0_type
,
4047 in_p
= (n_in_p
== in_p
);
4051 /* Otherwise, "or" the range with the range of the input
4052 that will be interpreted as negative. */
4053 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4054 1, fold_convert_loc (loc
, arg0_type
,
4059 in_p
= (in_p
!= n_in_p
);
4073 /* Given EXP, a logical expression, set the range it is testing into
4074 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4075 actually being tested. *PLOW and *PHIGH will be made of the same
4076 type as the returned expression. If EXP is not a comparison, we
4077 will most likely not be returning a useful value and range. Set
4078 *STRICT_OVERFLOW_P to true if the return value is only valid
4079 because signed overflow is undefined; otherwise, do not change
4080 *STRICT_OVERFLOW_P. */
4083 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4084 bool *strict_overflow_p
)
4086 enum tree_code code
;
4087 tree arg0
, arg1
= NULL_TREE
;
4088 tree exp_type
, nexp
;
4091 location_t loc
= EXPR_LOCATION (exp
);
4093 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4094 and see if we can refine the range. Some of the cases below may not
4095 happen, but it doesn't seem worth worrying about this. We "continue"
4096 the outer loop when we've changed something; otherwise we "break"
4097 the switch, which will "break" the while. */
4100 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4104 code
= TREE_CODE (exp
);
4105 exp_type
= TREE_TYPE (exp
);
4108 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4110 if (TREE_OPERAND_LENGTH (exp
) > 0)
4111 arg0
= TREE_OPERAND (exp
, 0);
4112 if (TREE_CODE_CLASS (code
) == tcc_binary
4113 || TREE_CODE_CLASS (code
) == tcc_comparison
4114 || (TREE_CODE_CLASS (code
) == tcc_expression
4115 && TREE_OPERAND_LENGTH (exp
) > 1))
4116 arg1
= TREE_OPERAND (exp
, 1);
4118 if (arg0
== NULL_TREE
)
4121 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4122 &high
, &in_p
, strict_overflow_p
);
4123 if (nexp
== NULL_TREE
)
4128 /* If EXP is a constant, we can evaluate whether this is true or false. */
4129 if (TREE_CODE (exp
) == INTEGER_CST
)
4131 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4133 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4139 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4143 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4144 type, TYPE, return an expression to test if EXP is in (or out of, depending
4145 on IN_P) the range. Return 0 if the test couldn't be created. */
4148 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4149 tree low
, tree high
)
4151 tree etype
= TREE_TYPE (exp
), value
;
4153 #ifdef HAVE_canonicalize_funcptr_for_compare
4154 /* Disable this optimization for function pointer expressions
4155 on targets that require function pointer canonicalization. */
4156 if (HAVE_canonicalize_funcptr_for_compare
4157 && TREE_CODE (etype
) == POINTER_TYPE
4158 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4164 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4166 return invert_truthvalue_loc (loc
, value
);
4171 if (low
== 0 && high
== 0)
4172 return build_int_cst (type
, 1);
4175 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4176 fold_convert_loc (loc
, etype
, high
));
4179 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4180 fold_convert_loc (loc
, etype
, low
));
4182 if (operand_equal_p (low
, high
, 0))
4183 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4184 fold_convert_loc (loc
, etype
, low
));
4186 if (integer_zerop (low
))
4188 if (! TYPE_UNSIGNED (etype
))
4190 etype
= unsigned_type_for (etype
);
4191 high
= fold_convert_loc (loc
, etype
, high
);
4192 exp
= fold_convert_loc (loc
, etype
, exp
);
4194 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4197 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4198 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4200 unsigned HOST_WIDE_INT lo
;
4204 prec
= TYPE_PRECISION (etype
);
4205 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4208 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4212 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4213 lo
= (unsigned HOST_WIDE_INT
) -1;
4216 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4218 if (TYPE_UNSIGNED (etype
))
4220 tree signed_etype
= signed_type_for (etype
);
4221 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4223 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4225 etype
= signed_etype
;
4226 exp
= fold_convert_loc (loc
, etype
, exp
);
4228 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4229 build_int_cst (etype
, 0));
4233 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4234 This requires wrap-around arithmetics for the type of the expression.
4235 First make sure that arithmetics in this type is valid, then make sure
4236 that it wraps around. */
4237 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4238 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4239 TYPE_UNSIGNED (etype
));
4241 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4243 tree utype
, minv
, maxv
;
4245 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4246 for the type in question, as we rely on this here. */
4247 utype
= unsigned_type_for (etype
);
4248 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4249 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4250 integer_one_node
, 1);
4251 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4253 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4260 high
= fold_convert_loc (loc
, etype
, high
);
4261 low
= fold_convert_loc (loc
, etype
, low
);
4262 exp
= fold_convert_loc (loc
, etype
, exp
);
4264 value
= const_binop (MINUS_EXPR
, high
, low
);
4267 if (POINTER_TYPE_P (etype
))
4269 if (value
!= 0 && !TREE_OVERFLOW (value
))
4271 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4272 return build_range_check (loc
, type
,
4273 fold_build_pointer_plus_loc (loc
, exp
, low
),
4274 1, build_int_cst (etype
, 0), value
);
4279 if (value
!= 0 && !TREE_OVERFLOW (value
))
4280 return build_range_check (loc
, type
,
4281 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4282 1, build_int_cst (etype
, 0), value
);
4287 /* Return the predecessor of VAL in its type, handling the infinite case. */
4290 range_predecessor (tree val
)
4292 tree type
= TREE_TYPE (val
);
4294 if (INTEGRAL_TYPE_P (type
)
4295 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4298 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4301 /* Return the successor of VAL in its type, handling the infinite case. */
4304 range_successor (tree val
)
4306 tree type
= TREE_TYPE (val
);
4308 if (INTEGRAL_TYPE_P (type
)
4309 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4312 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4315 /* Given two ranges, see if we can merge them into one. Return 1 if we
4316 can, 0 if we can't. Set the output range into the specified parameters. */
4319 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4320 tree high0
, int in1_p
, tree low1
, tree high1
)
4328 int lowequal
= ((low0
== 0 && low1
== 0)
4329 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4330 low0
, 0, low1
, 0)));
4331 int highequal
= ((high0
== 0 && high1
== 0)
4332 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4333 high0
, 1, high1
, 1)));
4335 /* Make range 0 be the range that starts first, or ends last if they
4336 start at the same value. Swap them if it isn't. */
4337 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4340 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4341 high1
, 1, high0
, 1))))
4343 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4344 tem
= low0
, low0
= low1
, low1
= tem
;
4345 tem
= high0
, high0
= high1
, high1
= tem
;
4348 /* Now flag two cases, whether the ranges are disjoint or whether the
4349 second range is totally subsumed in the first. Note that the tests
4350 below are simplified by the ones above. */
4351 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4352 high0
, 1, low1
, 0));
4353 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4354 high1
, 1, high0
, 1));
4356 /* We now have four cases, depending on whether we are including or
4357 excluding the two ranges. */
4360 /* If they don't overlap, the result is false. If the second range
4361 is a subset it is the result. Otherwise, the range is from the start
4362 of the second to the end of the first. */
4364 in_p
= 0, low
= high
= 0;
4366 in_p
= 1, low
= low1
, high
= high1
;
4368 in_p
= 1, low
= low1
, high
= high0
;
4371 else if (in0_p
&& ! in1_p
)
4373 /* If they don't overlap, the result is the first range. If they are
4374 equal, the result is false. If the second range is a subset of the
4375 first, and the ranges begin at the same place, we go from just after
4376 the end of the second range to the end of the first. If the second
4377 range is not a subset of the first, or if it is a subset and both
4378 ranges end at the same place, the range starts at the start of the
4379 first range and ends just before the second range.
4380 Otherwise, we can't describe this as a single range. */
4382 in_p
= 1, low
= low0
, high
= high0
;
4383 else if (lowequal
&& highequal
)
4384 in_p
= 0, low
= high
= 0;
4385 else if (subset
&& lowequal
)
4387 low
= range_successor (high1
);
4392 /* We are in the weird situation where high0 > high1 but
4393 high1 has no successor. Punt. */
4397 else if (! subset
|| highequal
)
4400 high
= range_predecessor (low1
);
4404 /* low0 < low1 but low1 has no predecessor. Punt. */
4412 else if (! in0_p
&& in1_p
)
4414 /* If they don't overlap, the result is the second range. If the second
4415 is a subset of the first, the result is false. Otherwise,
4416 the range starts just after the first range and ends at the
4417 end of the second. */
4419 in_p
= 1, low
= low1
, high
= high1
;
4420 else if (subset
|| highequal
)
4421 in_p
= 0, low
= high
= 0;
4424 low
= range_successor (high0
);
4429 /* high1 > high0 but high0 has no successor. Punt. */
4437 /* The case where we are excluding both ranges. Here the complex case
4438 is if they don't overlap. In that case, the only time we have a
4439 range is if they are adjacent. If the second is a subset of the
4440 first, the result is the first. Otherwise, the range to exclude
4441 starts at the beginning of the first range and ends at the end of the
4445 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4446 range_successor (high0
),
4448 in_p
= 0, low
= low0
, high
= high1
;
4451 /* Canonicalize - [min, x] into - [-, x]. */
4452 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4453 switch (TREE_CODE (TREE_TYPE (low0
)))
4456 if (TYPE_PRECISION (TREE_TYPE (low0
))
4457 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4461 if (tree_int_cst_equal (low0
,
4462 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4466 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4467 && integer_zerop (low0
))
4474 /* Canonicalize - [x, max] into - [x, -]. */
4475 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4476 switch (TREE_CODE (TREE_TYPE (high1
)))
4479 if (TYPE_PRECISION (TREE_TYPE (high1
))
4480 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4484 if (tree_int_cst_equal (high1
,
4485 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4489 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4490 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4492 integer_one_node
, 1)))
4499 /* The ranges might be also adjacent between the maximum and
4500 minimum values of the given type. For
4501 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4502 return + [x + 1, y - 1]. */
4503 if (low0
== 0 && high1
== 0)
4505 low
= range_successor (high0
);
4506 high
= range_predecessor (low1
);
4507 if (low
== 0 || high
== 0)
4517 in_p
= 0, low
= low0
, high
= high0
;
4519 in_p
= 0, low
= low0
, high
= high1
;
4522 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4527 /* Subroutine of fold, looking inside expressions of the form
4528 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4529 of the COND_EXPR. This function is being used also to optimize
4530 A op B ? C : A, by reversing the comparison first.
4532 Return a folded expression whose code is not a COND_EXPR
4533 anymore, or NULL_TREE if no folding opportunity is found. */
4536 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4537 tree arg0
, tree arg1
, tree arg2
)
4539 enum tree_code comp_code
= TREE_CODE (arg0
);
4540 tree arg00
= TREE_OPERAND (arg0
, 0);
4541 tree arg01
= TREE_OPERAND (arg0
, 1);
4542 tree arg1_type
= TREE_TYPE (arg1
);
4548 /* If we have A op 0 ? A : -A, consider applying the following
4551 A == 0? A : -A same as -A
4552 A != 0? A : -A same as A
4553 A >= 0? A : -A same as abs (A)
4554 A > 0? A : -A same as abs (A)
4555 A <= 0? A : -A same as -abs (A)
4556 A < 0? A : -A same as -abs (A)
4558 None of these transformations work for modes with signed
4559 zeros. If A is +/-0, the first two transformations will
4560 change the sign of the result (from +0 to -0, or vice
4561 versa). The last four will fix the sign of the result,
4562 even though the original expressions could be positive or
4563 negative, depending on the sign of A.
4565 Note that all these transformations are correct if A is
4566 NaN, since the two alternatives (A and -A) are also NaNs. */
4567 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4568 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4569 ? real_zerop (arg01
)
4570 : integer_zerop (arg01
))
4571 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4572 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4573 /* In the case that A is of the form X-Y, '-A' (arg2) may
4574 have already been folded to Y-X, check for that. */
4575 || (TREE_CODE (arg1
) == MINUS_EXPR
4576 && TREE_CODE (arg2
) == MINUS_EXPR
4577 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4578 TREE_OPERAND (arg2
, 1), 0)
4579 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4580 TREE_OPERAND (arg2
, 0), 0))))
4585 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4586 return pedantic_non_lvalue_loc (loc
,
4587 fold_convert_loc (loc
, type
,
4588 negate_expr (tem
)));
4591 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4594 if (flag_trapping_math
)
4599 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4600 arg1
= fold_convert_loc (loc
, signed_type_for
4601 (TREE_TYPE (arg1
)), arg1
);
4602 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4603 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4606 if (flag_trapping_math
)
4610 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4611 arg1
= fold_convert_loc (loc
, signed_type_for
4612 (TREE_TYPE (arg1
)), arg1
);
4613 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4614 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4616 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4620 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4621 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4622 both transformations are correct when A is NaN: A != 0
4623 is then true, and A == 0 is false. */
4625 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4626 && integer_zerop (arg01
) && integer_zerop (arg2
))
4628 if (comp_code
== NE_EXPR
)
4629 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4630 else if (comp_code
== EQ_EXPR
)
4631 return build_int_cst (type
, 0);
4634 /* Try some transformations of A op B ? A : B.
4636 A == B? A : B same as B
4637 A != B? A : B same as A
4638 A >= B? A : B same as max (A, B)
4639 A > B? A : B same as max (B, A)
4640 A <= B? A : B same as min (A, B)
4641 A < B? A : B same as min (B, A)
4643 As above, these transformations don't work in the presence
4644 of signed zeros. For example, if A and B are zeros of
4645 opposite sign, the first two transformations will change
4646 the sign of the result. In the last four, the original
4647 expressions give different results for (A=+0, B=-0) and
4648 (A=-0, B=+0), but the transformed expressions do not.
4650 The first two transformations are correct if either A or B
4651 is a NaN. In the first transformation, the condition will
4652 be false, and B will indeed be chosen. In the case of the
4653 second transformation, the condition A != B will be true,
4654 and A will be chosen.
4656 The conversions to max() and min() are not correct if B is
4657 a number and A is not. The conditions in the original
4658 expressions will be false, so all four give B. The min()
4659 and max() versions would give a NaN instead. */
4660 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4661 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4662 /* Avoid these transformations if the COND_EXPR may be used
4663 as an lvalue in the C++ front-end. PR c++/19199. */
4665 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4666 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4667 || ! maybe_lvalue_p (arg1
)
4668 || ! maybe_lvalue_p (arg2
)))
4670 tree comp_op0
= arg00
;
4671 tree comp_op1
= arg01
;
4672 tree comp_type
= TREE_TYPE (comp_op0
);
4674 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4675 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4685 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4687 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4692 /* In C++ a ?: expression can be an lvalue, so put the
4693 operand which will be used if they are equal first
4694 so that we can convert this back to the
4695 corresponding COND_EXPR. */
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4698 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4699 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4700 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4701 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4702 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4703 comp_op1
, comp_op0
);
4704 return pedantic_non_lvalue_loc (loc
,
4705 fold_convert_loc (loc
, type
, tem
));
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4714 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4715 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4716 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4717 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4718 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4719 comp_op1
, comp_op0
);
4720 return pedantic_non_lvalue_loc (loc
,
4721 fold_convert_loc (loc
, type
, tem
));
4725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4726 return pedantic_non_lvalue_loc (loc
,
4727 fold_convert_loc (loc
, type
, arg2
));
4730 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4731 return pedantic_non_lvalue_loc (loc
,
4732 fold_convert_loc (loc
, type
, arg1
));
4735 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4740 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4741 we might still be able to simplify this. For example,
4742 if C1 is one less or one more than C2, this might have started
4743 out as a MIN or MAX and been transformed by this function.
4744 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4746 if (INTEGRAL_TYPE_P (type
)
4747 && TREE_CODE (arg01
) == INTEGER_CST
4748 && TREE_CODE (arg2
) == INTEGER_CST
)
4752 if (TREE_CODE (arg1
) == INTEGER_CST
)
4754 /* We can replace A with C1 in this case. */
4755 arg1
= fold_convert_loc (loc
, type
, arg01
);
4756 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4759 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4760 MIN_EXPR, to preserve the signedness of the comparison. */
4761 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4763 && operand_equal_p (arg01
,
4764 const_binop (PLUS_EXPR
, arg2
,
4765 build_int_cst (type
, 1)),
4768 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4769 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4771 return pedantic_non_lvalue_loc (loc
,
4772 fold_convert_loc (loc
, type
, tem
));
4777 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4779 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4781 && operand_equal_p (arg01
,
4782 const_binop (MINUS_EXPR
, arg2
,
4783 build_int_cst (type
, 1)),
4786 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4787 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4789 return pedantic_non_lvalue_loc (loc
,
4790 fold_convert_loc (loc
, type
, tem
));
4795 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4796 MAX_EXPR, to preserve the signedness of the comparison. */
4797 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4799 && operand_equal_p (arg01
,
4800 const_binop (MINUS_EXPR
, arg2
,
4801 build_int_cst (type
, 1)),
4804 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4805 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4807 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4812 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4813 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4815 && operand_equal_p (arg01
,
4816 const_binop (PLUS_EXPR
, arg2
,
4817 build_int_cst (type
, 1)),
4820 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4821 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4823 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4837 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4838 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4839 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4843 /* EXP is some logical combination of boolean tests. See if we can
4844 merge it into some range test. Return the new tree if so. */
4847 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4850 int or_op
= (code
== TRUTH_ORIF_EXPR
4851 || code
== TRUTH_OR_EXPR
);
4852 int in0_p
, in1_p
, in_p
;
4853 tree low0
, low1
, low
, high0
, high1
, high
;
4854 bool strict_overflow_p
= false;
4855 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4856 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4858 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4859 "when simplifying range test");
4861 /* If this is an OR operation, invert both sides; we will invert
4862 again at the end. */
4864 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4866 /* If both expressions are the same, if we can merge the ranges, and we
4867 can build the range test, return it or it inverted. If one of the
4868 ranges is always true or always false, consider it to be the same
4869 expression as the other. */
4870 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4871 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4873 && 0 != (tem
= (build_range_check (loc
, type
,
4875 : rhs
!= 0 ? rhs
: integer_zero_node
,
4878 if (strict_overflow_p
)
4879 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4880 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4883 /* On machines where the branch cost is expensive, if this is a
4884 short-circuited branch and the underlying object on both sides
4885 is the same, make a non-short-circuit operation. */
4886 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4887 && lhs
!= 0 && rhs
!= 0
4888 && (code
== TRUTH_ANDIF_EXPR
4889 || code
== TRUTH_ORIF_EXPR
)
4890 && operand_equal_p (lhs
, rhs
, 0))
4892 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4893 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4894 which cases we can't do this. */
4895 if (simple_operand_p (lhs
))
4896 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4897 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4900 else if (!lang_hooks
.decls
.global_bindings_p ()
4901 && !CONTAINS_PLACEHOLDER_P (lhs
))
4903 tree common
= save_expr (lhs
);
4905 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4906 or_op
? ! in0_p
: in0_p
,
4908 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4909 or_op
? ! in1_p
: in1_p
,
4912 if (strict_overflow_p
)
4913 fold_overflow_warning (warnmsg
,
4914 WARN_STRICT_OVERFLOW_COMPARISON
);
4915 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4916 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4925 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4926 bit value. Arrange things so the extra bits will be set to zero if and
4927 only if C is signed-extended to its full width. If MASK is nonzero,
4928 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4931 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4933 tree type
= TREE_TYPE (c
);
4934 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4937 if (p
== modesize
|| unsignedp
)
4940 /* We work by getting just the sign bit into the low-order bit, then
4941 into the high-order bit, then sign-extend. We then XOR that value
4943 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4944 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4946 /* We must use a signed type in order to get an arithmetic right shift.
4947 However, we must also avoid introducing accidental overflows, so that
4948 a subsequent call to integer_zerop will work. Hence we must
4949 do the type conversion here. At this point, the constant is either
4950 zero or one, and the conversion to a signed type can never overflow.
4951 We could get an overflow if this conversion is done anywhere else. */
4952 if (TYPE_UNSIGNED (type
))
4953 temp
= fold_convert (signed_type_for (type
), temp
);
4955 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4956 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4958 temp
= const_binop (BIT_AND_EXPR
, temp
,
4959 fold_convert (TREE_TYPE (c
), mask
));
4960 /* If necessary, convert the type back to match the type of C. */
4961 if (TYPE_UNSIGNED (type
))
4962 temp
= fold_convert (type
, temp
);
4964 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4967 /* For an expression that has the form
4971 we can drop one of the inner expressions and simplify to
4975 LOC is the location of the resulting expression. OP is the inner
4976 logical operation; the left-hand side in the examples above, while CMPOP
4977 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4978 removing a condition that guards another, as in
4979 (A != NULL && A->...) || A == NULL
4980 which we must not transform. If RHS_ONLY is true, only eliminate the
4981 right-most operand of the inner logical operation. */
4984 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4987 tree type
= TREE_TYPE (cmpop
);
4988 enum tree_code code
= TREE_CODE (cmpop
);
4989 enum tree_code truthop_code
= TREE_CODE (op
);
4990 tree lhs
= TREE_OPERAND (op
, 0);
4991 tree rhs
= TREE_OPERAND (op
, 1);
4992 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4993 enum tree_code rhs_code
= TREE_CODE (rhs
);
4994 enum tree_code lhs_code
= TREE_CODE (lhs
);
4995 enum tree_code inv_code
;
4997 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5000 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5003 if (rhs_code
== truthop_code
)
5005 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5006 if (newrhs
!= NULL_TREE
)
5009 rhs_code
= TREE_CODE (rhs
);
5012 if (lhs_code
== truthop_code
&& !rhs_only
)
5014 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5015 if (newlhs
!= NULL_TREE
)
5018 lhs_code
= TREE_CODE (lhs
);
5022 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5023 if (inv_code
== rhs_code
5024 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5025 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5027 if (!rhs_only
&& inv_code
== lhs_code
5028 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5029 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5031 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5032 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5037 /* Find ways of folding logical expressions of LHS and RHS:
5038 Try to merge two comparisons to the same innermost item.
5039 Look for range tests like "ch >= '0' && ch <= '9'".
5040 Look for combinations of simple terms on machines with expensive branches
5041 and evaluate the RHS unconditionally.
5043 For example, if we have p->a == 2 && p->b == 4 and we can make an
5044 object large enough to span both A and B, we can do this with a comparison
5045 against the object ANDed with the a mask.
5047 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5048 operations to do this with one comparison.
5050 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5051 function and the one above.
5053 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5054 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5056 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5059 We return the simplified tree or 0 if no optimization is possible. */
5062 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5065 /* If this is the "or" of two comparisons, we can do something if
5066 the comparisons are NE_EXPR. If this is the "and", we can do something
5067 if the comparisons are EQ_EXPR. I.e.,
5068 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5070 WANTED_CODE is this operation code. For single bit fields, we can
5071 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5072 comparison for one-bit fields. */
5074 enum tree_code wanted_code
;
5075 enum tree_code lcode
, rcode
;
5076 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5077 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5078 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5079 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5080 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5081 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5082 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5083 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5084 enum machine_mode lnmode
, rnmode
;
5085 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5086 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5087 tree l_const
, r_const
;
5088 tree lntype
, rntype
, result
;
5089 HOST_WIDE_INT first_bit
, end_bit
;
5092 /* Start by getting the comparison codes. Fail if anything is volatile.
5093 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5094 it were surrounded with a NE_EXPR. */
5096 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5099 lcode
= TREE_CODE (lhs
);
5100 rcode
= TREE_CODE (rhs
);
5102 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5104 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5105 build_int_cst (TREE_TYPE (lhs
), 0));
5109 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5111 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5112 build_int_cst (TREE_TYPE (rhs
), 0));
5116 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5117 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5120 ll_arg
= TREE_OPERAND (lhs
, 0);
5121 lr_arg
= TREE_OPERAND (lhs
, 1);
5122 rl_arg
= TREE_OPERAND (rhs
, 0);
5123 rr_arg
= TREE_OPERAND (rhs
, 1);
5125 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5126 if (simple_operand_p (ll_arg
)
5127 && simple_operand_p (lr_arg
))
5129 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5130 && operand_equal_p (lr_arg
, rr_arg
, 0))
5132 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5133 truth_type
, ll_arg
, lr_arg
);
5137 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5138 && operand_equal_p (lr_arg
, rl_arg
, 0))
5140 result
= combine_comparisons (loc
, code
, lcode
,
5141 swap_tree_comparison (rcode
),
5142 truth_type
, ll_arg
, lr_arg
);
5148 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5149 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5151 /* If the RHS can be evaluated unconditionally and its operands are
5152 simple, it wins to evaluate the RHS unconditionally on machines
5153 with expensive branches. In this case, this isn't a comparison
5154 that can be merged. */
5156 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5158 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5159 && simple_operand_p (rl_arg
)
5160 && simple_operand_p (rr_arg
))
5162 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5163 if (code
== TRUTH_OR_EXPR
5164 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5165 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5166 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5167 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5168 return build2_loc (loc
, NE_EXPR
, truth_type
,
5169 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5171 build_int_cst (TREE_TYPE (ll_arg
), 0));
5173 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5174 if (code
== TRUTH_AND_EXPR
5175 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5176 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5177 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5178 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5179 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5180 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5182 build_int_cst (TREE_TYPE (ll_arg
), 0));
5185 /* See if the comparisons can be merged. Then get all the parameters for
5188 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5189 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5193 ll_inner
= decode_field_reference (loc
, ll_arg
,
5194 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5195 &ll_unsignedp
, &volatilep
, &ll_mask
,
5197 lr_inner
= decode_field_reference (loc
, lr_arg
,
5198 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5199 &lr_unsignedp
, &volatilep
, &lr_mask
,
5201 rl_inner
= decode_field_reference (loc
, rl_arg
,
5202 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5203 &rl_unsignedp
, &volatilep
, &rl_mask
,
5205 rr_inner
= decode_field_reference (loc
, rr_arg
,
5206 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5207 &rr_unsignedp
, &volatilep
, &rr_mask
,
5210 /* It must be true that the inner operation on the lhs of each
5211 comparison must be the same if we are to be able to do anything.
5212 Then see if we have constants. If not, the same must be true for
5214 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5215 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5218 if (TREE_CODE (lr_arg
) == INTEGER_CST
5219 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5220 l_const
= lr_arg
, r_const
= rr_arg
;
5221 else if (lr_inner
== 0 || rr_inner
== 0
5222 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5225 l_const
= r_const
= 0;
5227 /* If either comparison code is not correct for our logical operation,
5228 fail. However, we can convert a one-bit comparison against zero into
5229 the opposite comparison against that bit being set in the field. */
5231 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5232 if (lcode
!= wanted_code
)
5234 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5236 /* Make the left operand unsigned, since we are only interested
5237 in the value of one bit. Otherwise we are doing the wrong
5246 /* This is analogous to the code for l_const above. */
5247 if (rcode
!= wanted_code
)
5249 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5258 /* See if we can find a mode that contains both fields being compared on
5259 the left. If we can't, fail. Otherwise, update all constants and masks
5260 to be relative to a field of that size. */
5261 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5262 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5263 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5264 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5266 if (lnmode
== VOIDmode
)
5269 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5270 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5271 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5272 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5274 if (BYTES_BIG_ENDIAN
)
5276 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5277 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5280 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5281 size_int (xll_bitpos
));
5282 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5283 size_int (xrl_bitpos
));
5287 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5288 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5289 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5290 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5291 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5294 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5296 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5301 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5302 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5303 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5304 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5305 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5308 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5310 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5314 /* If the right sides are not constant, do the same for it. Also,
5315 disallow this optimization if a size or signedness mismatch occurs
5316 between the left and right sides. */
5319 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5320 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5321 /* Make sure the two fields on the right
5322 correspond to the left without being swapped. */
5323 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5326 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5327 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5328 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5329 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5331 if (rnmode
== VOIDmode
)
5334 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5335 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5336 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5337 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5339 if (BYTES_BIG_ENDIAN
)
5341 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5342 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5345 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5347 size_int (xlr_bitpos
));
5348 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5350 size_int (xrr_bitpos
));
5352 /* Make a mask that corresponds to both fields being compared.
5353 Do this for both items being compared. If the operands are the
5354 same size and the bits being compared are in the same position
5355 then we can do this by masking both and comparing the masked
5357 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5358 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5359 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5361 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5362 ll_unsignedp
|| rl_unsignedp
);
5363 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5364 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5366 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5367 lr_unsignedp
|| rr_unsignedp
);
5368 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5369 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5371 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5374 /* There is still another way we can do something: If both pairs of
5375 fields being compared are adjacent, we may be able to make a wider
5376 field containing them both.
5378 Note that we still must mask the lhs/rhs expressions. Furthermore,
5379 the mask must be shifted to account for the shift done by
5380 make_bit_field_ref. */
5381 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5382 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5383 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5384 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5388 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5389 ll_bitsize
+ rl_bitsize
,
5390 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5391 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5392 lr_bitsize
+ rr_bitsize
,
5393 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5395 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5396 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5397 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5398 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5400 /* Convert to the smaller type before masking out unwanted bits. */
5402 if (lntype
!= rntype
)
5404 if (lnbitsize
> rnbitsize
)
5406 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5407 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5410 else if (lnbitsize
< rnbitsize
)
5412 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5413 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5418 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5419 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5421 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5422 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5424 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5430 /* Handle the case of comparisons with constants. If there is something in
5431 common between the masks, those bits of the constants must be the same.
5432 If not, the condition is always false. Test for this to avoid generating
5433 incorrect code below. */
5434 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5435 if (! integer_zerop (result
)
5436 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5437 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5439 if (wanted_code
== NE_EXPR
)
5441 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5442 return constant_boolean_node (true, truth_type
);
5446 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5447 return constant_boolean_node (false, truth_type
);
5451 /* Construct the expression we will return. First get the component
5452 reference we will make. Unless the mask is all ones the width of
5453 that field, perform the mask operation. Then compare with the
5455 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5456 ll_unsignedp
|| rl_unsignedp
);
5458 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5459 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5460 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5462 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5463 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5466 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5470 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5474 enum tree_code op_code
;
5477 int consts_equal
, consts_lt
;
5480 STRIP_SIGN_NOPS (arg0
);
5482 op_code
= TREE_CODE (arg0
);
5483 minmax_const
= TREE_OPERAND (arg0
, 1);
5484 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5485 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5486 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5487 inner
= TREE_OPERAND (arg0
, 0);
5489 /* If something does not permit us to optimize, return the original tree. */
5490 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5491 || TREE_CODE (comp_const
) != INTEGER_CST
5492 || TREE_OVERFLOW (comp_const
)
5493 || TREE_CODE (minmax_const
) != INTEGER_CST
5494 || TREE_OVERFLOW (minmax_const
))
5497 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5498 and GT_EXPR, doing the rest with recursive calls using logical
5502 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5505 = optimize_minmax_comparison (loc
,
5506 invert_tree_comparison (code
, false),
5509 return invert_truthvalue_loc (loc
, tem
);
5515 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5516 optimize_minmax_comparison
5517 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5518 optimize_minmax_comparison
5519 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5522 if (op_code
== MAX_EXPR
&& consts_equal
)
5523 /* MAX (X, 0) == 0 -> X <= 0 */
5524 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5526 else if (op_code
== MAX_EXPR
&& consts_lt
)
5527 /* MAX (X, 0) == 5 -> X == 5 */
5528 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5530 else if (op_code
== MAX_EXPR
)
5531 /* MAX (X, 0) == -1 -> false */
5532 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5534 else if (consts_equal
)
5535 /* MIN (X, 0) == 0 -> X >= 0 */
5536 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5539 /* MIN (X, 0) == 5 -> false */
5540 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5543 /* MIN (X, 0) == -1 -> X == -1 */
5544 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5547 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5548 /* MAX (X, 0) > 0 -> X > 0
5549 MAX (X, 0) > 5 -> X > 5 */
5550 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5552 else if (op_code
== MAX_EXPR
)
5553 /* MAX (X, 0) > -1 -> true */
5554 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5556 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5557 /* MIN (X, 0) > 0 -> false
5558 MIN (X, 0) > 5 -> false */
5559 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5562 /* MIN (X, 0) > -1 -> X > -1 */
5563 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5570 /* T is an integer expression that is being multiplied, divided, or taken a
5571 modulus (CODE says which and what kind of divide or modulus) by a
5572 constant C. See if we can eliminate that operation by folding it with
5573 other operations already in T. WIDE_TYPE, if non-null, is a type that
5574 should be used for the computation if wider than our type.
5576 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5577 (X * 2) + (Y * 4). We must, however, be assured that either the original
5578 expression would not overflow or that overflow is undefined for the type
5579 in the language in question.
5581 If we return a non-null expression, it is an equivalent form of the
5582 original computation, but need not be in the original type.
5584 We set *STRICT_OVERFLOW_P to true if the return values depends on
5585 signed overflow being undefined. Otherwise we do not change
5586 *STRICT_OVERFLOW_P. */
5589 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5590 bool *strict_overflow_p
)
5592 /* To avoid exponential search depth, refuse to allow recursion past
5593 three levels. Beyond that (1) it's highly unlikely that we'll find
5594 something interesting and (2) we've probably processed it before
5595 when we built the inner expression. */
5604 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5611 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5612 bool *strict_overflow_p
)
5614 tree type
= TREE_TYPE (t
);
5615 enum tree_code tcode
= TREE_CODE (t
);
5616 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5617 > GET_MODE_SIZE (TYPE_MODE (type
)))
5618 ? wide_type
: type
);
5620 int same_p
= tcode
== code
;
5621 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5622 bool sub_strict_overflow_p
;
5624 /* Don't deal with constants of zero here; they confuse the code below. */
5625 if (integer_zerop (c
))
5628 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5629 op0
= TREE_OPERAND (t
, 0);
5631 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5632 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5634 /* Note that we need not handle conditional operations here since fold
5635 already handles those cases. So just do arithmetic here. */
5639 /* For a constant, we can always simplify if we are a multiply
5640 or (for divide and modulus) if it is a multiple of our constant. */
5641 if (code
== MULT_EXPR
5642 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5643 return const_binop (code
, fold_convert (ctype
, t
),
5644 fold_convert (ctype
, c
));
5647 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5648 /* If op0 is an expression ... */
5649 if ((COMPARISON_CLASS_P (op0
)
5650 || UNARY_CLASS_P (op0
)
5651 || BINARY_CLASS_P (op0
)
5652 || VL_EXP_CLASS_P (op0
)
5653 || EXPRESSION_CLASS_P (op0
))
5654 /* ... and has wrapping overflow, and its type is smaller
5655 than ctype, then we cannot pass through as widening. */
5656 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5657 && (TYPE_PRECISION (ctype
)
5658 > TYPE_PRECISION (TREE_TYPE (op0
))))
5659 /* ... or this is a truncation (t is narrower than op0),
5660 then we cannot pass through this narrowing. */
5661 || (TYPE_PRECISION (type
)
5662 < TYPE_PRECISION (TREE_TYPE (op0
)))
5663 /* ... or signedness changes for division or modulus,
5664 then we cannot pass through this conversion. */
5665 || (code
!= MULT_EXPR
5666 && (TYPE_UNSIGNED (ctype
)
5667 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5668 /* ... or has undefined overflow while the converted to
5669 type has not, we cannot do the operation in the inner type
5670 as that would introduce undefined overflow. */
5671 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5672 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5675 /* Pass the constant down and see if we can make a simplification. If
5676 we can, replace this expression with the inner simplification for
5677 possible later conversion to our or some other type. */
5678 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5679 && TREE_CODE (t2
) == INTEGER_CST
5680 && !TREE_OVERFLOW (t2
)
5681 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5683 ? ctype
: NULL_TREE
,
5684 strict_overflow_p
))))
5689 /* If widening the type changes it from signed to unsigned, then we
5690 must avoid building ABS_EXPR itself as unsigned. */
5691 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5693 tree cstype
= (*signed_type_for
) (ctype
);
5694 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5697 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5698 return fold_convert (ctype
, t1
);
5702 /* If the constant is negative, we cannot simplify this. */
5703 if (tree_int_cst_sgn (c
) == -1)
5707 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5709 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5712 case MIN_EXPR
: case MAX_EXPR
:
5713 /* If widening the type changes the signedness, then we can't perform
5714 this optimization as that changes the result. */
5715 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5718 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5719 sub_strict_overflow_p
= false;
5720 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5721 &sub_strict_overflow_p
)) != 0
5722 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5723 &sub_strict_overflow_p
)) != 0)
5725 if (tree_int_cst_sgn (c
) < 0)
5726 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5727 if (sub_strict_overflow_p
)
5728 *strict_overflow_p
= true;
5729 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5730 fold_convert (ctype
, t2
));
5734 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5735 /* If the second operand is constant, this is a multiplication
5736 or floor division, by a power of two, so we can treat it that
5737 way unless the multiplier or divisor overflows. Signed
5738 left-shift overflow is implementation-defined rather than
5739 undefined in C90, so do not convert signed left shift into
5741 if (TREE_CODE (op1
) == INTEGER_CST
5742 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5743 /* const_binop may not detect overflow correctly,
5744 so check for it explicitly here. */
5745 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5746 && TREE_INT_CST_HIGH (op1
) == 0
5747 && 0 != (t1
= fold_convert (ctype
,
5748 const_binop (LSHIFT_EXPR
,
5751 && !TREE_OVERFLOW (t1
))
5752 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5753 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5755 fold_convert (ctype
, op0
),
5757 c
, code
, wide_type
, strict_overflow_p
);
5760 case PLUS_EXPR
: case MINUS_EXPR
:
5761 /* See if we can eliminate the operation on both sides. If we can, we
5762 can return a new PLUS or MINUS. If we can't, the only remaining
5763 cases where we can do anything are if the second operand is a
5765 sub_strict_overflow_p
= false;
5766 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5767 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5768 if (t1
!= 0 && t2
!= 0
5769 && (code
== MULT_EXPR
5770 /* If not multiplication, we can only do this if both operands
5771 are divisible by c. */
5772 || (multiple_of_p (ctype
, op0
, c
)
5773 && multiple_of_p (ctype
, op1
, c
))))
5775 if (sub_strict_overflow_p
)
5776 *strict_overflow_p
= true;
5777 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5778 fold_convert (ctype
, t2
));
5781 /* If this was a subtraction, negate OP1 and set it to be an addition.
5782 This simplifies the logic below. */
5783 if (tcode
== MINUS_EXPR
)
5785 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5786 /* If OP1 was not easily negatable, the constant may be OP0. */
5787 if (TREE_CODE (op0
) == INTEGER_CST
)
5798 if (TREE_CODE (op1
) != INTEGER_CST
)
5801 /* If either OP1 or C are negative, this optimization is not safe for
5802 some of the division and remainder types while for others we need
5803 to change the code. */
5804 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5806 if (code
== CEIL_DIV_EXPR
)
5807 code
= FLOOR_DIV_EXPR
;
5808 else if (code
== FLOOR_DIV_EXPR
)
5809 code
= CEIL_DIV_EXPR
;
5810 else if (code
!= MULT_EXPR
5811 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5815 /* If it's a multiply or a division/modulus operation of a multiple
5816 of our constant, do the operation and verify it doesn't overflow. */
5817 if (code
== MULT_EXPR
5818 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5820 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5821 fold_convert (ctype
, c
));
5822 /* We allow the constant to overflow with wrapping semantics. */
5824 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5830 /* If we have an unsigned type is not a sizetype, we cannot widen
5831 the operation since it will change the result if the original
5832 computation overflowed. */
5833 if (TYPE_UNSIGNED (ctype
)
5837 /* If we were able to eliminate our operation from the first side,
5838 apply our operation to the second side and reform the PLUS. */
5839 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5840 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5842 /* The last case is if we are a multiply. In that case, we can
5843 apply the distributive law to commute the multiply and addition
5844 if the multiplication of the constants doesn't overflow. */
5845 if (code
== MULT_EXPR
)
5846 return fold_build2 (tcode
, ctype
,
5847 fold_build2 (code
, ctype
,
5848 fold_convert (ctype
, op0
),
5849 fold_convert (ctype
, c
)),
5855 /* We have a special case here if we are doing something like
5856 (C * 8) % 4 since we know that's zero. */
5857 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5858 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5859 /* If the multiplication can overflow we cannot optimize this. */
5860 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5861 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5862 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5864 *strict_overflow_p
= true;
5865 return omit_one_operand (type
, integer_zero_node
, op0
);
5868 /* ... fall through ... */
5870 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5871 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5872 /* If we can extract our operation from the LHS, do so and return a
5873 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5874 do something only if the second operand is a constant. */
5876 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5877 strict_overflow_p
)) != 0)
5878 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5879 fold_convert (ctype
, op1
));
5880 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5881 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5882 strict_overflow_p
)) != 0)
5883 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5884 fold_convert (ctype
, t1
));
5885 else if (TREE_CODE (op1
) != INTEGER_CST
)
5888 /* If these are the same operation types, we can associate them
5889 assuming no overflow. */
5894 mul
= double_int_mul_with_sign
5896 (tree_to_double_int (op1
),
5897 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5899 (tree_to_double_int (c
),
5900 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5901 false, &overflow_p
);
5902 overflow_p
= ((!TYPE_UNSIGNED (ctype
) && overflow_p
)
5903 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5904 if (!double_int_fits_to_tree_p (ctype
, mul
)
5905 && ((TYPE_UNSIGNED (ctype
) && tcode
!= MULT_EXPR
)
5906 || !TYPE_UNSIGNED (ctype
)))
5909 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5910 double_int_to_tree (ctype
, mul
));
5913 /* If these operations "cancel" each other, we have the main
5914 optimizations of this pass, which occur when either constant is a
5915 multiple of the other, in which case we replace this with either an
5916 operation or CODE or TCODE.
5918 If we have an unsigned type, we cannot do this since it will change
5919 the result if the original computation overflowed. */
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5921 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5922 || (tcode
== MULT_EXPR
5923 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5924 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5925 && code
!= MULT_EXPR
)))
5927 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5930 *strict_overflow_p
= true;
5931 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5932 fold_convert (ctype
,
5933 const_binop (TRUNC_DIV_EXPR
,
5936 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5938 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5939 *strict_overflow_p
= true;
5940 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5941 fold_convert (ctype
,
5942 const_binop (TRUNC_DIV_EXPR
,
5955 /* Return a node which has the indicated constant VALUE (either 0 or
5956 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5957 and is of the indicated TYPE. */
5960 constant_boolean_node (bool value
, tree type
)
5962 if (type
== integer_type_node
)
5963 return value
? integer_one_node
: integer_zero_node
;
5964 else if (type
== boolean_type_node
)
5965 return value
? boolean_true_node
: boolean_false_node
;
5966 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5967 return build_vector_from_val (type
,
5968 build_int_cst (TREE_TYPE (type
),
5971 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5975 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5976 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5977 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5978 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5979 COND is the first argument to CODE; otherwise (as in the example
5980 given here), it is the second argument. TYPE is the type of the
5981 original expression. Return NULL_TREE if no simplification is
5985 fold_binary_op_with_conditional_arg (location_t loc
,
5986 enum tree_code code
,
5987 tree type
, tree op0
, tree op1
,
5988 tree cond
, tree arg
, int cond_first_p
)
5990 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5991 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5992 tree test
, true_value
, false_value
;
5993 tree lhs
= NULL_TREE
;
5994 tree rhs
= NULL_TREE
;
5996 if (TREE_CODE (cond
) == COND_EXPR
)
5998 test
= TREE_OPERAND (cond
, 0);
5999 true_value
= TREE_OPERAND (cond
, 1);
6000 false_value
= TREE_OPERAND (cond
, 2);
6001 /* If this operand throws an expression, then it does not make
6002 sense to try to perform a logical or arithmetic operation
6004 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6006 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6011 tree testtype
= TREE_TYPE (cond
);
6013 true_value
= constant_boolean_node (true, testtype
);
6014 false_value
= constant_boolean_node (false, testtype
);
6017 /* This transformation is only worthwhile if we don't have to wrap ARG
6018 in a SAVE_EXPR and the operation can be simplified on at least one
6019 of the branches once its pushed inside the COND_EXPR. */
6020 if (!TREE_CONSTANT (arg
)
6021 && (TREE_SIDE_EFFECTS (arg
)
6022 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6025 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6028 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6030 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6032 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6036 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6038 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6040 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6043 /* Check that we have simplified at least one of the branches. */
6044 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6047 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6051 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6053 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6054 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6055 ADDEND is the same as X.
6057 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6058 and finite. The problematic cases are when X is zero, and its mode
6059 has signed zeros. In the case of rounding towards -infinity,
6060 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6061 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6064 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6066 if (!real_zerop (addend
))
6069 /* Don't allow the fold with -fsignaling-nans. */
6070 if (HONOR_SNANS (TYPE_MODE (type
)))
6073 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6074 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6077 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6078 if (TREE_CODE (addend
) == REAL_CST
6079 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6082 /* The mode has signed zeros, and we have to honor their sign.
6083 In this situation, there is only one case we can return true for.
6084 X - 0 is the same as X unless rounding towards -infinity is
6086 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6089 /* Subroutine of fold() that checks comparisons of built-in math
6090 functions against real constants.
6092 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6093 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6094 is the type of the result and ARG0 and ARG1 are the operands of the
6095 comparison. ARG1 must be a TREE_REAL_CST.
6097 The function returns the constant folded tree if a simplification
6098 can be made, and NULL_TREE otherwise. */
6101 fold_mathfn_compare (location_t loc
,
6102 enum built_in_function fcode
, enum tree_code code
,
6103 tree type
, tree arg0
, tree arg1
)
6107 if (BUILTIN_SQRT_P (fcode
))
6109 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6110 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6112 c
= TREE_REAL_CST (arg1
);
6113 if (REAL_VALUE_NEGATIVE (c
))
6115 /* sqrt(x) < y is always false, if y is negative. */
6116 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6117 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6119 /* sqrt(x) > y is always true, if y is negative and we
6120 don't care about NaNs, i.e. negative values of x. */
6121 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6122 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6124 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6125 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6126 build_real (TREE_TYPE (arg
), dconst0
));
6128 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6132 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6133 real_convert (&c2
, mode
, &c2
);
6135 if (REAL_VALUE_ISINF (c2
))
6137 /* sqrt(x) > y is x == +Inf, when y is very large. */
6138 if (HONOR_INFINITIES (mode
))
6139 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6140 build_real (TREE_TYPE (arg
), c2
));
6142 /* sqrt(x) > y is always false, when y is very large
6143 and we don't care about infinities. */
6144 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6147 /* sqrt(x) > c is the same as x > c*c. */
6148 return fold_build2_loc (loc
, code
, type
, arg
,
6149 build_real (TREE_TYPE (arg
), c2
));
6151 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6155 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6156 real_convert (&c2
, mode
, &c2
);
6158 if (REAL_VALUE_ISINF (c2
))
6160 /* sqrt(x) < y is always true, when y is a very large
6161 value and we don't care about NaNs or Infinities. */
6162 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6163 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6165 /* sqrt(x) < y is x != +Inf when y is very large and we
6166 don't care about NaNs. */
6167 if (! HONOR_NANS (mode
))
6168 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6169 build_real (TREE_TYPE (arg
), c2
));
6171 /* sqrt(x) < y is x >= 0 when y is very large and we
6172 don't care about Infinities. */
6173 if (! HONOR_INFINITIES (mode
))
6174 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6175 build_real (TREE_TYPE (arg
), dconst0
));
6177 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6178 arg
= save_expr (arg
);
6179 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6180 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6181 build_real (TREE_TYPE (arg
),
6183 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6184 build_real (TREE_TYPE (arg
),
6188 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6189 if (! HONOR_NANS (mode
))
6190 return fold_build2_loc (loc
, code
, type
, arg
,
6191 build_real (TREE_TYPE (arg
), c2
));
6193 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6194 arg
= save_expr (arg
);
6195 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6196 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6197 build_real (TREE_TYPE (arg
),
6199 fold_build2_loc (loc
, code
, type
, arg
,
6200 build_real (TREE_TYPE (arg
),
6208 /* Subroutine of fold() that optimizes comparisons against Infinities,
6209 either +Inf or -Inf.
6211 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6212 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6213 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6215 The function returns the constant folded tree if a simplification
6216 can be made, and NULL_TREE otherwise. */
6219 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6220 tree arg0
, tree arg1
)
6222 enum machine_mode mode
;
6223 REAL_VALUE_TYPE max
;
6227 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6229 /* For negative infinity swap the sense of the comparison. */
6230 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6232 code
= swap_tree_comparison (code
);
6237 /* x > +Inf is always false, if with ignore sNANs. */
6238 if (HONOR_SNANS (mode
))
6240 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6243 /* x <= +Inf is always true, if we don't case about NaNs. */
6244 if (! HONOR_NANS (mode
))
6245 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6247 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6248 arg0
= save_expr (arg0
);
6249 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6253 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6254 real_maxval (&max
, neg
, mode
);
6255 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6256 arg0
, build_real (TREE_TYPE (arg0
), max
));
6259 /* x < +Inf is always equal to x <= DBL_MAX. */
6260 real_maxval (&max
, neg
, mode
);
6261 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6262 arg0
, build_real (TREE_TYPE (arg0
), max
));
6265 /* x != +Inf is always equal to !(x > DBL_MAX). */
6266 real_maxval (&max
, neg
, mode
);
6267 if (! HONOR_NANS (mode
))
6268 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6269 arg0
, build_real (TREE_TYPE (arg0
), max
));
6271 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6272 arg0
, build_real (TREE_TYPE (arg0
), max
));
6273 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6282 /* Subroutine of fold() that optimizes comparisons of a division by
6283 a nonzero integer constant against an integer constant, i.e.
6286 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6287 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6288 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6290 The function returns the constant folded tree if a simplification
6291 can be made, and NULL_TREE otherwise. */
6294 fold_div_compare (location_t loc
,
6295 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6297 tree prod
, tmp
, hi
, lo
;
6298 tree arg00
= TREE_OPERAND (arg0
, 0);
6299 tree arg01
= TREE_OPERAND (arg0
, 1);
6301 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6305 /* We have to do this the hard way to detect unsigned overflow.
6306 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6307 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6308 TREE_INT_CST_HIGH (arg01
),
6309 TREE_INT_CST_LOW (arg1
),
6310 TREE_INT_CST_HIGH (arg1
),
6311 &val
.low
, &val
.high
, unsigned_p
);
6312 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6313 neg_overflow
= false;
6317 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6318 build_int_cst (TREE_TYPE (arg01
), 1));
6321 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6322 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6323 TREE_INT_CST_HIGH (prod
),
6324 TREE_INT_CST_LOW (tmp
),
6325 TREE_INT_CST_HIGH (tmp
),
6326 &val
.low
, &val
.high
, unsigned_p
);
6327 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6328 -1, overflow
| TREE_OVERFLOW (prod
));
6330 else if (tree_int_cst_sgn (arg01
) >= 0)
6332 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6333 build_int_cst (TREE_TYPE (arg01
), 1));
6334 switch (tree_int_cst_sgn (arg1
))
6337 neg_overflow
= true;
6338 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6343 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6348 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6358 /* A negative divisor reverses the relational operators. */
6359 code
= swap_tree_comparison (code
);
6361 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6362 build_int_cst (TREE_TYPE (arg01
), 1));
6363 switch (tree_int_cst_sgn (arg1
))
6366 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6371 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6376 neg_overflow
= true;
6377 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6389 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6390 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6391 if (TREE_OVERFLOW (hi
))
6392 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6393 if (TREE_OVERFLOW (lo
))
6394 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6395 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6398 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6399 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6400 if (TREE_OVERFLOW (hi
))
6401 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6402 if (TREE_OVERFLOW (lo
))
6403 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6404 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6407 if (TREE_OVERFLOW (lo
))
6409 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6410 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6412 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6415 if (TREE_OVERFLOW (hi
))
6417 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6418 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6420 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6423 if (TREE_OVERFLOW (hi
))
6425 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6426 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6428 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6431 if (TREE_OVERFLOW (lo
))
6433 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6434 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6436 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6446 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6447 equality/inequality test, then return a simplified form of the test
6448 using a sign testing. Otherwise return NULL. TYPE is the desired
6452 fold_single_bit_test_into_sign_test (location_t loc
,
6453 enum tree_code code
, tree arg0
, tree arg1
,
6456 /* If this is testing a single bit, we can optimize the test. */
6457 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6458 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6459 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6461 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6462 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6463 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6465 if (arg00
!= NULL_TREE
6466 /* This is only a win if casting to a signed type is cheap,
6467 i.e. when arg00's type is not a partial mode. */
6468 && TYPE_PRECISION (TREE_TYPE (arg00
))
6469 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6471 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6472 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6474 fold_convert_loc (loc
, stype
, arg00
),
6475 build_int_cst (stype
, 0));
6482 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6483 equality/inequality test, then return a simplified form of
6484 the test using shifts and logical operations. Otherwise return
6485 NULL. TYPE is the desired result type. */
6488 fold_single_bit_test (location_t loc
, enum tree_code code
,
6489 tree arg0
, tree arg1
, tree result_type
)
6491 /* If this is testing a single bit, we can optimize the test. */
6492 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6493 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6494 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6496 tree inner
= TREE_OPERAND (arg0
, 0);
6497 tree type
= TREE_TYPE (arg0
);
6498 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6499 enum machine_mode operand_mode
= TYPE_MODE (type
);
6501 tree signed_type
, unsigned_type
, intermediate_type
;
6504 /* First, see if we can fold the single bit test into a sign-bit
6506 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6511 /* Otherwise we have (A & C) != 0 where C is a single bit,
6512 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6513 Similarly for (A & C) == 0. */
6515 /* If INNER is a right shift of a constant and it plus BITNUM does
6516 not overflow, adjust BITNUM and INNER. */
6517 if (TREE_CODE (inner
) == RSHIFT_EXPR
6518 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6519 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6520 && bitnum
< TYPE_PRECISION (type
)
6521 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6522 bitnum
- TYPE_PRECISION (type
)))
6524 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6525 inner
= TREE_OPERAND (inner
, 0);
6528 /* If we are going to be able to omit the AND below, we must do our
6529 operations as unsigned. If we must use the AND, we have a choice.
6530 Normally unsigned is faster, but for some machines signed is. */
6531 #ifdef LOAD_EXTEND_OP
6532 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6533 && !flag_syntax_only
) ? 0 : 1;
6538 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6539 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6540 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6541 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6544 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6545 inner
, size_int (bitnum
));
6547 one
= build_int_cst (intermediate_type
, 1);
6549 if (code
== EQ_EXPR
)
6550 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6552 /* Put the AND last so it can combine with more things. */
6553 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6555 /* Make sure to return the proper type. */
6556 inner
= fold_convert_loc (loc
, result_type
, inner
);
6563 /* Check whether we are allowed to reorder operands arg0 and arg1,
6564 such that the evaluation of arg1 occurs before arg0. */
6567 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6569 if (! flag_evaluation_order
)
6571 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6573 return ! TREE_SIDE_EFFECTS (arg0
)
6574 && ! TREE_SIDE_EFFECTS (arg1
);
6577 /* Test whether it is preferable two swap two operands, ARG0 and
6578 ARG1, for example because ARG0 is an integer constant and ARG1
6579 isn't. If REORDER is true, only recommend swapping if we can
6580 evaluate the operands in reverse order. */
6583 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6585 STRIP_SIGN_NOPS (arg0
);
6586 STRIP_SIGN_NOPS (arg1
);
6588 if (TREE_CODE (arg1
) == INTEGER_CST
)
6590 if (TREE_CODE (arg0
) == INTEGER_CST
)
6593 if (TREE_CODE (arg1
) == REAL_CST
)
6595 if (TREE_CODE (arg0
) == REAL_CST
)
6598 if (TREE_CODE (arg1
) == FIXED_CST
)
6600 if (TREE_CODE (arg0
) == FIXED_CST
)
6603 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6605 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6608 if (TREE_CONSTANT (arg1
))
6610 if (TREE_CONSTANT (arg0
))
6613 if (optimize_function_for_size_p (cfun
))
6616 if (reorder
&& flag_evaluation_order
6617 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6620 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6621 for commutative and comparison operators. Ensuring a canonical
6622 form allows the optimizers to find additional redundancies without
6623 having to explicitly check for both orderings. */
6624 if (TREE_CODE (arg0
) == SSA_NAME
6625 && TREE_CODE (arg1
) == SSA_NAME
6626 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6629 /* Put SSA_NAMEs last. */
6630 if (TREE_CODE (arg1
) == SSA_NAME
)
6632 if (TREE_CODE (arg0
) == SSA_NAME
)
6635 /* Put variables last. */
6644 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6645 ARG0 is extended to a wider type. */
6648 fold_widened_comparison (location_t loc
, enum tree_code code
,
6649 tree type
, tree arg0
, tree arg1
)
6651 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6653 tree shorter_type
, outer_type
;
6657 if (arg0_unw
== arg0
)
6659 shorter_type
= TREE_TYPE (arg0_unw
);
6661 #ifdef HAVE_canonicalize_funcptr_for_compare
6662 /* Disable this optimization if we're casting a function pointer
6663 type on targets that require function pointer canonicalization. */
6664 if (HAVE_canonicalize_funcptr_for_compare
6665 && TREE_CODE (shorter_type
) == POINTER_TYPE
6666 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6670 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6673 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6675 /* If possible, express the comparison in the shorter mode. */
6676 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6677 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6678 && (TREE_TYPE (arg1_unw
) == shorter_type
6679 || ((TYPE_PRECISION (shorter_type
)
6680 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6681 && (TYPE_UNSIGNED (shorter_type
)
6682 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6683 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6684 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6685 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6686 && int_fits_type_p (arg1_unw
, shorter_type
))))
6687 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6688 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6690 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6691 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6692 || !int_fits_type_p (arg1_unw
, shorter_type
))
6695 /* If we are comparing with the integer that does not fit into the range
6696 of the shorter type, the result is known. */
6697 outer_type
= TREE_TYPE (arg1_unw
);
6698 min
= lower_bound_in_type (outer_type
, shorter_type
);
6699 max
= upper_bound_in_type (outer_type
, shorter_type
);
6701 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6703 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6710 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6715 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6721 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6723 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6728 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6730 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6739 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6740 ARG0 just the signedness is changed. */
6743 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6744 tree arg0
, tree arg1
)
6747 tree inner_type
, outer_type
;
6749 if (!CONVERT_EXPR_P (arg0
))
6752 outer_type
= TREE_TYPE (arg0
);
6753 arg0_inner
= TREE_OPERAND (arg0
, 0);
6754 inner_type
= TREE_TYPE (arg0_inner
);
6756 #ifdef HAVE_canonicalize_funcptr_for_compare
6757 /* Disable this optimization if we're casting a function pointer
6758 type on targets that require function pointer canonicalization. */
6759 if (HAVE_canonicalize_funcptr_for_compare
6760 && TREE_CODE (inner_type
) == POINTER_TYPE
6761 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6765 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6768 if (TREE_CODE (arg1
) != INTEGER_CST
6769 && !(CONVERT_EXPR_P (arg1
)
6770 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6773 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6774 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6779 if (TREE_CODE (arg1
) == INTEGER_CST
)
6780 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6781 0, TREE_OVERFLOW (arg1
));
6783 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6785 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6788 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6789 step of the array. Reconstructs s and delta in the case of s *
6790 delta being an integer constant (and thus already folded). ADDR is
6791 the address. MULT is the multiplicative expression. If the
6792 function succeeds, the new address expression is returned.
6793 Otherwise NULL_TREE is returned. LOC is the location of the
6794 resulting expression. */
6797 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6799 tree s
, delta
, step
;
6800 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6805 /* Strip the nops that might be added when converting op1 to sizetype. */
6808 /* Canonicalize op1 into a possibly non-constant delta
6809 and an INTEGER_CST s. */
6810 if (TREE_CODE (op1
) == MULT_EXPR
)
6812 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6817 if (TREE_CODE (arg0
) == INTEGER_CST
)
6822 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6830 else if (TREE_CODE (op1
) == INTEGER_CST
)
6837 /* Simulate we are delta * 1. */
6839 s
= integer_one_node
;
6842 /* Handle &x.array the same as we would handle &x.array[0]. */
6843 if (TREE_CODE (ref
) == COMPONENT_REF
6844 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6848 /* Remember if this was a multi-dimensional array. */
6849 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6852 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6855 itype
= TREE_TYPE (domain
);
6857 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6858 if (TREE_CODE (step
) != INTEGER_CST
)
6863 if (! tree_int_cst_equal (step
, s
))
6868 /* Try if delta is a multiple of step. */
6869 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6875 /* Only fold here if we can verify we do not overflow one
6876 dimension of a multi-dimensional array. */
6881 if (!TYPE_MIN_VALUE (domain
)
6882 || !TYPE_MAX_VALUE (domain
)
6883 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6886 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6887 fold_convert_loc (loc
, itype
,
6888 TYPE_MIN_VALUE (domain
)),
6889 fold_convert_loc (loc
, itype
, delta
));
6890 if (TREE_CODE (tmp
) != INTEGER_CST
6891 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6895 /* We found a suitable component reference. */
6897 pref
= TREE_OPERAND (addr
, 0);
6898 ret
= copy_node (pref
);
6899 SET_EXPR_LOCATION (ret
, loc
);
6901 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6903 (loc
, PLUS_EXPR
, itype
,
6904 fold_convert_loc (loc
, itype
,
6906 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6907 fold_convert_loc (loc
, itype
, delta
)),
6908 NULL_TREE
, NULL_TREE
);
6909 return build_fold_addr_expr_loc (loc
, ret
);
6914 for (;; ref
= TREE_OPERAND (ref
, 0))
6916 if (TREE_CODE (ref
) == ARRAY_REF
)
6920 /* Remember if this was a multi-dimensional array. */
6921 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6924 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6927 itype
= TREE_TYPE (domain
);
6929 step
= array_ref_element_size (ref
);
6930 if (TREE_CODE (step
) != INTEGER_CST
)
6935 if (! tree_int_cst_equal (step
, s
))
6940 /* Try if delta is a multiple of step. */
6941 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6947 /* Only fold here if we can verify we do not overflow one
6948 dimension of a multi-dimensional array. */
6953 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6954 || !TYPE_MAX_VALUE (domain
)
6955 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6958 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6959 fold_convert_loc (loc
, itype
,
6960 TREE_OPERAND (ref
, 1)),
6961 fold_convert_loc (loc
, itype
, delta
));
6963 || TREE_CODE (tmp
) != INTEGER_CST
6964 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6973 if (!handled_component_p (ref
))
6977 /* We found the suitable array reference. So copy everything up to it,
6978 and replace the index. */
6980 pref
= TREE_OPERAND (addr
, 0);
6981 ret
= copy_node (pref
);
6982 SET_EXPR_LOCATION (ret
, loc
);
6987 pref
= TREE_OPERAND (pref
, 0);
6988 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6989 pos
= TREE_OPERAND (pos
, 0);
6992 TREE_OPERAND (pos
, 1)
6993 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6994 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6995 fold_convert_loc (loc
, itype
, delta
));
6996 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7000 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7001 means A >= Y && A != MAX, but in this case we know that
7002 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7005 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7007 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7009 if (TREE_CODE (bound
) == LT_EXPR
)
7010 a
= TREE_OPERAND (bound
, 0);
7011 else if (TREE_CODE (bound
) == GT_EXPR
)
7012 a
= TREE_OPERAND (bound
, 1);
7016 typea
= TREE_TYPE (a
);
7017 if (!INTEGRAL_TYPE_P (typea
)
7018 && !POINTER_TYPE_P (typea
))
7021 if (TREE_CODE (ineq
) == LT_EXPR
)
7023 a1
= TREE_OPERAND (ineq
, 1);
7024 y
= TREE_OPERAND (ineq
, 0);
7026 else if (TREE_CODE (ineq
) == GT_EXPR
)
7028 a1
= TREE_OPERAND (ineq
, 0);
7029 y
= TREE_OPERAND (ineq
, 1);
7034 if (TREE_TYPE (a1
) != typea
)
7037 if (POINTER_TYPE_P (typea
))
7039 /* Convert the pointer types into integer before taking the difference. */
7040 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7041 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7042 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7045 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7047 if (!diff
|| !integer_onep (diff
))
7050 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7053 /* Fold a sum or difference of at least one multiplication.
7054 Returns the folded tree or NULL if no simplification could be made. */
7057 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7058 tree arg0
, tree arg1
)
7060 tree arg00
, arg01
, arg10
, arg11
;
7061 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7063 /* (A * C) +- (B * C) -> (A+-B) * C.
7064 (A * C) +- A -> A * (C+-1).
7065 We are most concerned about the case where C is a constant,
7066 but other combinations show up during loop reduction. Since
7067 it is not difficult, try all four possibilities. */
7069 if (TREE_CODE (arg0
) == MULT_EXPR
)
7071 arg00
= TREE_OPERAND (arg0
, 0);
7072 arg01
= TREE_OPERAND (arg0
, 1);
7074 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7076 arg00
= build_one_cst (type
);
7081 /* We cannot generate constant 1 for fract. */
7082 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7085 arg01
= build_one_cst (type
);
7087 if (TREE_CODE (arg1
) == MULT_EXPR
)
7089 arg10
= TREE_OPERAND (arg1
, 0);
7090 arg11
= TREE_OPERAND (arg1
, 1);
7092 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7094 arg10
= build_one_cst (type
);
7095 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7096 the purpose of this canonicalization. */
7097 if (TREE_INT_CST_HIGH (arg1
) == -1
7098 && negate_expr_p (arg1
)
7099 && code
== PLUS_EXPR
)
7101 arg11
= negate_expr (arg1
);
7109 /* We cannot generate constant 1 for fract. */
7110 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7113 arg11
= build_one_cst (type
);
7117 if (operand_equal_p (arg01
, arg11
, 0))
7118 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7119 else if (operand_equal_p (arg00
, arg10
, 0))
7120 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7121 else if (operand_equal_p (arg00
, arg11
, 0))
7122 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7123 else if (operand_equal_p (arg01
, arg10
, 0))
7124 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7126 /* No identical multiplicands; see if we can find a common
7127 power-of-two factor in non-power-of-two multiplies. This
7128 can help in multi-dimensional array access. */
7129 else if (host_integerp (arg01
, 0)
7130 && host_integerp (arg11
, 0))
7132 HOST_WIDE_INT int01
, int11
, tmp
;
7135 int01
= TREE_INT_CST_LOW (arg01
);
7136 int11
= TREE_INT_CST_LOW (arg11
);
7138 /* Move min of absolute values to int11. */
7139 if (absu_hwi (int01
) < absu_hwi (int11
))
7141 tmp
= int01
, int01
= int11
, int11
= tmp
;
7142 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7149 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7150 /* The remainder should not be a constant, otherwise we
7151 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7152 increased the number of multiplications necessary. */
7153 && TREE_CODE (arg10
) != INTEGER_CST
)
7155 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7156 build_int_cst (TREE_TYPE (arg00
),
7161 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7166 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7167 fold_build2_loc (loc
, code
, type
,
7168 fold_convert_loc (loc
, type
, alt0
),
7169 fold_convert_loc (loc
, type
, alt1
)),
7170 fold_convert_loc (loc
, type
, same
));
7175 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7181 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7183 tree type
= TREE_TYPE (expr
);
7184 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7185 int byte
, offset
, word
, words
;
7186 unsigned char value
;
7188 if (total_bytes
> len
)
7190 words
= total_bytes
/ UNITS_PER_WORD
;
7192 for (byte
= 0; byte
< total_bytes
; byte
++)
7194 int bitpos
= byte
* BITS_PER_UNIT
;
7195 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7196 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7198 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7199 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7201 if (total_bytes
> UNITS_PER_WORD
)
7203 word
= byte
/ UNITS_PER_WORD
;
7204 if (WORDS_BIG_ENDIAN
)
7205 word
= (words
- 1) - word
;
7206 offset
= word
* UNITS_PER_WORD
;
7207 if (BYTES_BIG_ENDIAN
)
7208 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7210 offset
+= byte
% UNITS_PER_WORD
;
7213 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7214 ptr
[offset
] = value
;
7220 /* Subroutine of native_encode_expr. Encode the REAL_CST
7221 specified by EXPR into the buffer PTR of length LEN bytes.
7222 Return the number of bytes placed in the buffer, or zero
7226 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7228 tree type
= TREE_TYPE (expr
);
7229 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7230 int byte
, offset
, word
, words
, bitpos
;
7231 unsigned char value
;
7233 /* There are always 32 bits in each long, no matter the size of
7234 the hosts long. We handle floating point representations with
7238 if (total_bytes
> len
)
7240 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7242 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7244 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7245 bitpos
+= BITS_PER_UNIT
)
7247 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7248 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7250 if (UNITS_PER_WORD
< 4)
7252 word
= byte
/ UNITS_PER_WORD
;
7253 if (WORDS_BIG_ENDIAN
)
7254 word
= (words
- 1) - word
;
7255 offset
= word
* UNITS_PER_WORD
;
7256 if (BYTES_BIG_ENDIAN
)
7257 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7259 offset
+= byte
% UNITS_PER_WORD
;
7262 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7263 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7268 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7269 specified by EXPR into the buffer PTR of length LEN bytes.
7270 Return the number of bytes placed in the buffer, or zero
7274 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7279 part
= TREE_REALPART (expr
);
7280 rsize
= native_encode_expr (part
, ptr
, len
);
7283 part
= TREE_IMAGPART (expr
);
7284 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7287 return rsize
+ isize
;
7291 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7292 specified by EXPR into the buffer PTR of length LEN bytes.
7293 Return the number of bytes placed in the buffer, or zero
7297 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7304 count
= VECTOR_CST_NELTS (expr
);
7305 itype
= TREE_TYPE (TREE_TYPE (expr
));
7306 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7307 for (i
= 0; i
< count
; i
++)
7309 elem
= VECTOR_CST_ELT (expr
, i
);
7310 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7318 /* Subroutine of native_encode_expr. Encode the STRING_CST
7319 specified by EXPR into the buffer PTR of length LEN bytes.
7320 Return the number of bytes placed in the buffer, or zero
7324 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7326 tree type
= TREE_TYPE (expr
);
7327 HOST_WIDE_INT total_bytes
;
7329 if (TREE_CODE (type
) != ARRAY_TYPE
7330 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7331 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7332 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7334 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7335 if (total_bytes
> len
)
7337 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7339 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7340 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7341 total_bytes
- TREE_STRING_LENGTH (expr
));
7344 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7349 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7350 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7351 buffer PTR of length LEN bytes. Return the number of bytes
7352 placed in the buffer, or zero upon failure. */
7355 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7357 switch (TREE_CODE (expr
))
7360 return native_encode_int (expr
, ptr
, len
);
7363 return native_encode_real (expr
, ptr
, len
);
7366 return native_encode_complex (expr
, ptr
, len
);
7369 return native_encode_vector (expr
, ptr
, len
);
7372 return native_encode_string (expr
, ptr
, len
);
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7385 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7387 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7388 int byte
, offset
, word
, words
;
7389 unsigned char value
;
7392 if (total_bytes
> len
)
7394 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7397 result
= double_int_zero
;
7398 words
= total_bytes
/ UNITS_PER_WORD
;
7400 for (byte
= 0; byte
< total_bytes
; byte
++)
7402 int bitpos
= byte
* BITS_PER_UNIT
;
7403 if (total_bytes
> UNITS_PER_WORD
)
7405 word
= byte
/ UNITS_PER_WORD
;
7406 if (WORDS_BIG_ENDIAN
)
7407 word
= (words
- 1) - word
;
7408 offset
= word
* UNITS_PER_WORD
;
7409 if (BYTES_BIG_ENDIAN
)
7410 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7412 offset
+= byte
% UNITS_PER_WORD
;
7415 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7416 value
= ptr
[offset
];
7418 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7419 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7421 result
.high
|= (unsigned HOST_WIDE_INT
) value
7422 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7425 return double_int_to_tree (type
, result
);
7429 /* Subroutine of native_interpret_expr. Interpret the contents of
7430 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7431 If the buffer cannot be interpreted, return NULL_TREE. */
7434 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7436 enum machine_mode mode
= TYPE_MODE (type
);
7437 int total_bytes
= GET_MODE_SIZE (mode
);
7438 int byte
, offset
, word
, words
, bitpos
;
7439 unsigned char value
;
7440 /* There are always 32 bits in each long, no matter the size of
7441 the hosts long. We handle floating point representations with
7446 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7447 if (total_bytes
> len
|| total_bytes
> 24)
7449 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7451 memset (tmp
, 0, sizeof (tmp
));
7452 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7453 bitpos
+= BITS_PER_UNIT
)
7455 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7456 if (UNITS_PER_WORD
< 4)
7458 word
= byte
/ UNITS_PER_WORD
;
7459 if (WORDS_BIG_ENDIAN
)
7460 word
= (words
- 1) - word
;
7461 offset
= word
* UNITS_PER_WORD
;
7462 if (BYTES_BIG_ENDIAN
)
7463 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7465 offset
+= byte
% UNITS_PER_WORD
;
7468 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7469 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7471 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7474 real_from_target (&r
, tmp
, mode
);
7475 return build_real (type
, r
);
7479 /* Subroutine of native_interpret_expr. Interpret the contents of
7480 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7481 If the buffer cannot be interpreted, return NULL_TREE. */
7484 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7486 tree etype
, rpart
, ipart
;
7489 etype
= TREE_TYPE (type
);
7490 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7493 rpart
= native_interpret_expr (etype
, ptr
, size
);
7496 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7499 return build_complex (type
, rpart
, ipart
);
7503 /* Subroutine of native_interpret_expr. Interpret the contents of
7504 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7505 If the buffer cannot be interpreted, return NULL_TREE. */
7508 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7514 etype
= TREE_TYPE (type
);
7515 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7516 count
= TYPE_VECTOR_SUBPARTS (type
);
7517 if (size
* count
> len
)
7520 elements
= XALLOCAVEC (tree
, count
);
7521 for (i
= count
- 1; i
>= 0; i
--)
7523 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7528 return build_vector (type
, elements
);
7532 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7533 the buffer PTR of length LEN as a constant of type TYPE. For
7534 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7535 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7536 return NULL_TREE. */
7539 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7541 switch (TREE_CODE (type
))
7547 case REFERENCE_TYPE
:
7548 return native_interpret_int (type
, ptr
, len
);
7551 return native_interpret_real (type
, ptr
, len
);
7554 return native_interpret_complex (type
, ptr
, len
);
7557 return native_interpret_vector (type
, ptr
, len
);
7564 /* Returns true if we can interpret the contents of a native encoding
7568 can_native_interpret_type_p (tree type
)
7570 switch (TREE_CODE (type
))
7576 case REFERENCE_TYPE
:
7586 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7587 TYPE at compile-time. If we're unable to perform the conversion
7588 return NULL_TREE. */
7591 fold_view_convert_expr (tree type
, tree expr
)
7593 /* We support up to 512-bit values (for V8DFmode). */
7594 unsigned char buffer
[64];
7597 /* Check that the host and target are sane. */
7598 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7601 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7605 return native_interpret_expr (type
, buffer
, len
);
7608 /* Build an expression for the address of T. Folds away INDIRECT_REF
7609 to avoid confusing the gimplify process. */
7612 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7614 /* The size of the object is not relevant when talking about its address. */
7615 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7616 t
= TREE_OPERAND (t
, 0);
7618 if (TREE_CODE (t
) == INDIRECT_REF
)
7620 t
= TREE_OPERAND (t
, 0);
7622 if (TREE_TYPE (t
) != ptrtype
)
7623 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7625 else if (TREE_CODE (t
) == MEM_REF
7626 && integer_zerop (TREE_OPERAND (t
, 1)))
7627 return TREE_OPERAND (t
, 0);
7628 else if (TREE_CODE (t
) == MEM_REF
7629 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7630 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7631 TREE_OPERAND (t
, 0),
7632 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7633 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7635 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7637 if (TREE_TYPE (t
) != ptrtype
)
7638 t
= fold_convert_loc (loc
, ptrtype
, t
);
7641 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7646 /* Build an expression for the address of T. */
7649 build_fold_addr_expr_loc (location_t loc
, tree t
)
7651 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7653 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7656 static bool vec_cst_ctor_to_array (tree
, tree
*);
7658 /* Fold a unary expression of code CODE and type TYPE with operand
7659 OP0. Return the folded expression if folding is successful.
7660 Otherwise, return NULL_TREE. */
7663 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7667 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7669 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7670 && TREE_CODE_LENGTH (code
) == 1);
7675 if (CONVERT_EXPR_CODE_P (code
)
7676 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7678 /* Don't use STRIP_NOPS, because signedness of argument type
7680 STRIP_SIGN_NOPS (arg0
);
7684 /* Strip any conversions that don't change the mode. This
7685 is safe for every expression, except for a comparison
7686 expression because its signedness is derived from its
7689 Note that this is done as an internal manipulation within
7690 the constant folder, in order to find the simplest
7691 representation of the arguments so that their form can be
7692 studied. In any cases, the appropriate type conversions
7693 should be put back in the tree that will get out of the
7699 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7701 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7702 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7703 fold_build1_loc (loc
, code
, type
,
7704 fold_convert_loc (loc
, TREE_TYPE (op0
),
7705 TREE_OPERAND (arg0
, 1))));
7706 else if (TREE_CODE (arg0
) == COND_EXPR
)
7708 tree arg01
= TREE_OPERAND (arg0
, 1);
7709 tree arg02
= TREE_OPERAND (arg0
, 2);
7710 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7711 arg01
= fold_build1_loc (loc
, code
, type
,
7712 fold_convert_loc (loc
,
7713 TREE_TYPE (op0
), arg01
));
7714 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7715 arg02
= fold_build1_loc (loc
, code
, type
,
7716 fold_convert_loc (loc
,
7717 TREE_TYPE (op0
), arg02
));
7718 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7721 /* If this was a conversion, and all we did was to move into
7722 inside the COND_EXPR, bring it back out. But leave it if
7723 it is a conversion from integer to integer and the
7724 result precision is no wider than a word since such a
7725 conversion is cheap and may be optimized away by combine,
7726 while it couldn't if it were outside the COND_EXPR. Then return
7727 so we don't get into an infinite recursion loop taking the
7728 conversion out and then back in. */
7730 if ((CONVERT_EXPR_CODE_P (code
)
7731 || code
== NON_LVALUE_EXPR
)
7732 && TREE_CODE (tem
) == COND_EXPR
7733 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7734 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7735 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7736 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7737 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7738 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7739 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7741 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7742 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7743 || flag_syntax_only
))
7744 tem
= build1_loc (loc
, code
, type
,
7746 TREE_TYPE (TREE_OPERAND
7747 (TREE_OPERAND (tem
, 1), 0)),
7748 TREE_OPERAND (tem
, 0),
7749 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7750 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7759 /* Re-association barriers around constants and other re-association
7760 barriers can be removed. */
7761 if (CONSTANT_CLASS_P (op0
)
7762 || TREE_CODE (op0
) == PAREN_EXPR
)
7763 return fold_convert_loc (loc
, type
, op0
);
7768 case FIX_TRUNC_EXPR
:
7769 if (TREE_TYPE (op0
) == type
)
7772 if (COMPARISON_CLASS_P (op0
))
7774 /* If we have (type) (a CMP b) and type is an integral type, return
7775 new expression involving the new type. Canonicalize
7776 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7778 Do not fold the result as that would not simplify further, also
7779 folding again results in recursions. */
7780 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7781 return build2_loc (loc
, TREE_CODE (op0
), type
,
7782 TREE_OPERAND (op0
, 0),
7783 TREE_OPERAND (op0
, 1));
7784 else if (!INTEGRAL_TYPE_P (type
))
7785 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7786 constant_boolean_node (true, type
),
7787 constant_boolean_node (false, type
));
7790 /* Handle cases of two conversions in a row. */
7791 if (CONVERT_EXPR_P (op0
))
7793 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7794 tree inter_type
= TREE_TYPE (op0
);
7795 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7796 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7797 int inside_float
= FLOAT_TYPE_P (inside_type
);
7798 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7799 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7800 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7801 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7802 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7803 int inter_float
= FLOAT_TYPE_P (inter_type
);
7804 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7805 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7806 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7807 int final_int
= INTEGRAL_TYPE_P (type
);
7808 int final_ptr
= POINTER_TYPE_P (type
);
7809 int final_float
= FLOAT_TYPE_P (type
);
7810 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7811 unsigned int final_prec
= TYPE_PRECISION (type
);
7812 int final_unsignedp
= TYPE_UNSIGNED (type
);
7814 /* In addition to the cases of two conversions in a row
7815 handled below, if we are converting something to its own
7816 type via an object of identical or wider precision, neither
7817 conversion is needed. */
7818 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7819 && (((inter_int
|| inter_ptr
) && final_int
)
7820 || (inter_float
&& final_float
))
7821 && inter_prec
>= final_prec
)
7822 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7824 /* Likewise, if the intermediate and initial types are either both
7825 float or both integer, we don't need the middle conversion if the
7826 former is wider than the latter and doesn't change the signedness
7827 (for integers). Avoid this if the final type is a pointer since
7828 then we sometimes need the middle conversion. Likewise if the
7829 final type has a precision not equal to the size of its mode. */
7830 if (((inter_int
&& inside_int
)
7831 || (inter_float
&& inside_float
)
7832 || (inter_vec
&& inside_vec
))
7833 && inter_prec
>= inside_prec
7834 && (inter_float
|| inter_vec
7835 || inter_unsignedp
== inside_unsignedp
)
7836 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7837 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7839 && (! final_vec
|| inter_prec
== inside_prec
))
7840 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7842 /* If we have a sign-extension of a zero-extended value, we can
7843 replace that by a single zero-extension. Likewise if the
7844 final conversion does not change precision we can drop the
7845 intermediate conversion. */
7846 if (inside_int
&& inter_int
&& final_int
7847 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7848 && inside_unsignedp
&& !inter_unsignedp
)
7849 || final_prec
== inter_prec
))
7850 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7852 /* Two conversions in a row are not needed unless:
7853 - some conversion is floating-point (overstrict for now), or
7854 - some conversion is a vector (overstrict for now), or
7855 - the intermediate type is narrower than both initial and
7857 - the intermediate type and innermost type differ in signedness,
7858 and the outermost type is wider than the intermediate, or
7859 - the initial type is a pointer type and the precisions of the
7860 intermediate and final types differ, or
7861 - the final type is a pointer type and the precisions of the
7862 initial and intermediate types differ. */
7863 if (! inside_float
&& ! inter_float
&& ! final_float
7864 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7865 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7866 && ! (inside_int
&& inter_int
7867 && inter_unsignedp
!= inside_unsignedp
7868 && inter_prec
< final_prec
)
7869 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7870 == (final_unsignedp
&& final_prec
> inter_prec
))
7871 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7872 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7873 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7874 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7875 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7878 /* Handle (T *)&A.B.C for A being of type T and B and C
7879 living at offset zero. This occurs frequently in
7880 C++ upcasting and then accessing the base. */
7881 if (TREE_CODE (op0
) == ADDR_EXPR
7882 && POINTER_TYPE_P (type
)
7883 && handled_component_p (TREE_OPERAND (op0
, 0)))
7885 HOST_WIDE_INT bitsize
, bitpos
;
7887 enum machine_mode mode
;
7888 int unsignedp
, volatilep
;
7889 tree base
= TREE_OPERAND (op0
, 0);
7890 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7891 &mode
, &unsignedp
, &volatilep
, false);
7892 /* If the reference was to a (constant) zero offset, we can use
7893 the address of the base if it has the same base type
7894 as the result type and the pointer type is unqualified. */
7895 if (! offset
&& bitpos
== 0
7896 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7897 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7898 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7899 return fold_convert_loc (loc
, type
,
7900 build_fold_addr_expr_loc (loc
, base
));
7903 if (TREE_CODE (op0
) == MODIFY_EXPR
7904 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7905 /* Detect assigning a bitfield. */
7906 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7908 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7910 /* Don't leave an assignment inside a conversion
7911 unless assigning a bitfield. */
7912 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7913 /* First do the assignment, then return converted constant. */
7914 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7915 TREE_NO_WARNING (tem
) = 1;
7916 TREE_USED (tem
) = 1;
7920 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7921 constants (if x has signed type, the sign bit cannot be set
7922 in c). This folds extension into the BIT_AND_EXPR.
7923 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7924 very likely don't have maximal range for their precision and this
7925 transformation effectively doesn't preserve non-maximal ranges. */
7926 if (TREE_CODE (type
) == INTEGER_TYPE
7927 && TREE_CODE (op0
) == BIT_AND_EXPR
7928 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7930 tree and_expr
= op0
;
7931 tree and0
= TREE_OPERAND (and_expr
, 0);
7932 tree and1
= TREE_OPERAND (and_expr
, 1);
7935 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7936 || (TYPE_PRECISION (type
)
7937 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7939 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7940 <= HOST_BITS_PER_WIDE_INT
7941 && host_integerp (and1
, 1))
7943 unsigned HOST_WIDE_INT cst
;
7945 cst
= tree_low_cst (and1
, 1);
7946 cst
&= (HOST_WIDE_INT
) -1
7947 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7948 change
= (cst
== 0);
7949 #ifdef LOAD_EXTEND_OP
7951 && !flag_syntax_only
7952 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7955 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7956 and0
= fold_convert_loc (loc
, uns
, and0
);
7957 and1
= fold_convert_loc (loc
, uns
, and1
);
7963 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7964 0, TREE_OVERFLOW (and1
));
7965 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7966 fold_convert_loc (loc
, type
, and0
), tem
);
7970 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7971 when one of the new casts will fold away. Conservatively we assume
7972 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7973 if (POINTER_TYPE_P (type
)
7974 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7975 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7976 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7977 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7978 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7980 tree arg00
= TREE_OPERAND (arg0
, 0);
7981 tree arg01
= TREE_OPERAND (arg0
, 1);
7983 return fold_build_pointer_plus_loc
7984 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7987 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7988 of the same precision, and X is an integer type not narrower than
7989 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7990 if (INTEGRAL_TYPE_P (type
)
7991 && TREE_CODE (op0
) == BIT_NOT_EXPR
7992 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7993 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7994 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7996 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7997 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7998 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7999 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8000 fold_convert_loc (loc
, type
, tem
));
8003 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8004 type of X and Y (integer types only). */
8005 if (INTEGRAL_TYPE_P (type
)
8006 && TREE_CODE (op0
) == MULT_EXPR
8007 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8008 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8010 /* Be careful not to introduce new overflows. */
8012 if (TYPE_OVERFLOW_WRAPS (type
))
8015 mult_type
= unsigned_type_for (type
);
8017 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8019 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8020 fold_convert_loc (loc
, mult_type
,
8021 TREE_OPERAND (op0
, 0)),
8022 fold_convert_loc (loc
, mult_type
,
8023 TREE_OPERAND (op0
, 1)));
8024 return fold_convert_loc (loc
, type
, tem
);
8028 tem
= fold_convert_const (code
, type
, op0
);
8029 return tem
? tem
: NULL_TREE
;
8031 case ADDR_SPACE_CONVERT_EXPR
:
8032 if (integer_zerop (arg0
))
8033 return fold_convert_const (code
, type
, arg0
);
8036 case FIXED_CONVERT_EXPR
:
8037 tem
= fold_convert_const (code
, type
, arg0
);
8038 return tem
? tem
: NULL_TREE
;
8040 case VIEW_CONVERT_EXPR
:
8041 if (TREE_TYPE (op0
) == type
)
8043 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8044 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8045 type
, TREE_OPERAND (op0
, 0));
8046 if (TREE_CODE (op0
) == MEM_REF
)
8047 return fold_build2_loc (loc
, MEM_REF
, type
,
8048 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8050 /* For integral conversions with the same precision or pointer
8051 conversions use a NOP_EXPR instead. */
8052 if ((INTEGRAL_TYPE_P (type
)
8053 || POINTER_TYPE_P (type
))
8054 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8055 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8056 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8057 return fold_convert_loc (loc
, type
, op0
);
8059 /* Strip inner integral conversions that do not change the precision. */
8060 if (CONVERT_EXPR_P (op0
)
8061 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8062 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8063 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8064 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8065 && (TYPE_PRECISION (TREE_TYPE (op0
))
8066 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8067 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8068 type
, TREE_OPERAND (op0
, 0));
8070 return fold_view_convert_expr (type
, op0
);
8073 tem
= fold_negate_expr (loc
, arg0
);
8075 return fold_convert_loc (loc
, type
, tem
);
8079 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8080 return fold_abs_const (arg0
, type
);
8081 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8082 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8083 /* Convert fabs((double)float) into (double)fabsf(float). */
8084 else if (TREE_CODE (arg0
) == NOP_EXPR
8085 && TREE_CODE (type
) == REAL_TYPE
)
8087 tree targ0
= strip_float_extensions (arg0
);
8089 return fold_convert_loc (loc
, type
,
8090 fold_build1_loc (loc
, ABS_EXPR
,
8094 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8095 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8097 else if (tree_expr_nonnegative_p (arg0
))
8100 /* Strip sign ops from argument. */
8101 if (TREE_CODE (type
) == REAL_TYPE
)
8103 tem
= fold_strip_sign_ops (arg0
);
8105 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8106 fold_convert_loc (loc
, type
, tem
));
8111 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8112 return fold_convert_loc (loc
, type
, arg0
);
8113 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8115 tree itype
= TREE_TYPE (type
);
8116 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8117 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8118 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8119 negate_expr (ipart
));
8121 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8123 tree itype
= TREE_TYPE (type
);
8124 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8125 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8126 return build_complex (type
, rpart
, negate_expr (ipart
));
8128 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8129 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8133 if (TREE_CODE (arg0
) == INTEGER_CST
)
8134 return fold_not_const (arg0
, type
);
8135 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8136 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8137 /* Convert ~ (-A) to A - 1. */
8138 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8139 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8140 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8141 build_int_cst (type
, 1));
8142 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8143 else if (INTEGRAL_TYPE_P (type
)
8144 && ((TREE_CODE (arg0
) == MINUS_EXPR
8145 && integer_onep (TREE_OPERAND (arg0
, 1)))
8146 || (TREE_CODE (arg0
) == PLUS_EXPR
8147 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8148 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8149 fold_convert_loc (loc
, type
,
8150 TREE_OPERAND (arg0
, 0)));
8151 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8152 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8153 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8154 fold_convert_loc (loc
, type
,
8155 TREE_OPERAND (arg0
, 0)))))
8156 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8157 fold_convert_loc (loc
, type
,
8158 TREE_OPERAND (arg0
, 1)));
8159 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8160 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8161 fold_convert_loc (loc
, type
,
8162 TREE_OPERAND (arg0
, 1)))))
8163 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8164 fold_convert_loc (loc
, type
,
8165 TREE_OPERAND (arg0
, 0)), tem
);
8166 /* Perform BIT_NOT_EXPR on each element individually. */
8167 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8171 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8173 elements
= XALLOCAVEC (tree
, count
);
8174 for (i
= 0; i
< count
; i
++)
8176 elem
= VECTOR_CST_ELT (arg0
, i
);
8177 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8178 if (elem
== NULL_TREE
)
8183 return build_vector (type
, elements
);
8188 case TRUTH_NOT_EXPR
:
8189 /* The argument to invert_truthvalue must have Boolean type. */
8190 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8191 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8193 /* Note that the operand of this must be an int
8194 and its values must be 0 or 1.
8195 ("true" is a fixed value perhaps depending on the language,
8196 but we don't handle values other than 1 correctly yet.) */
8197 tem
= fold_truth_not_expr (loc
, arg0
);
8200 return fold_convert_loc (loc
, type
, tem
);
8203 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8204 return fold_convert_loc (loc
, type
, arg0
);
8205 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8206 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8207 TREE_OPERAND (arg0
, 1));
8208 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8209 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8210 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8212 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8213 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8214 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8215 TREE_OPERAND (arg0
, 0)),
8216 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8217 TREE_OPERAND (arg0
, 1)));
8218 return fold_convert_loc (loc
, type
, tem
);
8220 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8222 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8223 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8224 TREE_OPERAND (arg0
, 0));
8225 return fold_convert_loc (loc
, type
, tem
);
8227 if (TREE_CODE (arg0
) == CALL_EXPR
)
8229 tree fn
= get_callee_fndecl (arg0
);
8230 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8231 switch (DECL_FUNCTION_CODE (fn
))
8233 CASE_FLT_FN (BUILT_IN_CEXPI
):
8234 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8236 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8246 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8247 return build_zero_cst (type
);
8248 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8249 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8250 TREE_OPERAND (arg0
, 0));
8251 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8252 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8253 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8255 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8256 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8257 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8258 TREE_OPERAND (arg0
, 0)),
8259 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8260 TREE_OPERAND (arg0
, 1)));
8261 return fold_convert_loc (loc
, type
, tem
);
8263 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8265 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8266 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8267 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8269 if (TREE_CODE (arg0
) == CALL_EXPR
)
8271 tree fn
= get_callee_fndecl (arg0
);
8272 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8273 switch (DECL_FUNCTION_CODE (fn
))
8275 CASE_FLT_FN (BUILT_IN_CEXPI
):
8276 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8278 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8288 /* Fold *&X to X if X is an lvalue. */
8289 if (TREE_CODE (op0
) == ADDR_EXPR
)
8291 tree op00
= TREE_OPERAND (op0
, 0);
8292 if ((TREE_CODE (op00
) == VAR_DECL
8293 || TREE_CODE (op00
) == PARM_DECL
8294 || TREE_CODE (op00
) == RESULT_DECL
)
8295 && !TREE_READONLY (op00
))
8300 case VEC_UNPACK_LO_EXPR
:
8301 case VEC_UNPACK_HI_EXPR
:
8302 case VEC_UNPACK_FLOAT_LO_EXPR
:
8303 case VEC_UNPACK_FLOAT_HI_EXPR
:
8305 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8307 enum tree_code subcode
;
8309 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8310 if (TREE_CODE (arg0
) != VECTOR_CST
)
8313 elts
= XALLOCAVEC (tree
, nelts
* 2);
8314 if (!vec_cst_ctor_to_array (arg0
, elts
))
8317 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8318 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8321 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8324 subcode
= FLOAT_EXPR
;
8326 for (i
= 0; i
< nelts
; i
++)
8328 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8329 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8333 return build_vector (type
, elts
);
8338 } /* switch (code) */
8342 /* If the operation was a conversion do _not_ mark a resulting constant
8343 with TREE_OVERFLOW if the original constant was not. These conversions
8344 have implementation defined behavior and retaining the TREE_OVERFLOW
8345 flag here would confuse later passes such as VRP. */
8347 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8348 tree type
, tree op0
)
8350 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8352 && TREE_CODE (res
) == INTEGER_CST
8353 && TREE_CODE (op0
) == INTEGER_CST
8354 && CONVERT_EXPR_CODE_P (code
))
8355 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8360 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8361 operands OP0 and OP1. LOC is the location of the resulting expression.
8362 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8363 Return the folded expression if folding is successful. Otherwise,
8364 return NULL_TREE. */
8366 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8367 tree arg0
, tree arg1
, tree op0
, tree op1
)
8371 /* We only do these simplifications if we are optimizing. */
8375 /* Check for things like (A || B) && (A || C). We can convert this
8376 to A || (B && C). Note that either operator can be any of the four
8377 truth and/or operations and the transformation will still be
8378 valid. Also note that we only care about order for the
8379 ANDIF and ORIF operators. If B contains side effects, this
8380 might change the truth-value of A. */
8381 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8382 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8383 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8384 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8385 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8386 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8388 tree a00
= TREE_OPERAND (arg0
, 0);
8389 tree a01
= TREE_OPERAND (arg0
, 1);
8390 tree a10
= TREE_OPERAND (arg1
, 0);
8391 tree a11
= TREE_OPERAND (arg1
, 1);
8392 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8393 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8394 && (code
== TRUTH_AND_EXPR
8395 || code
== TRUTH_OR_EXPR
));
8397 if (operand_equal_p (a00
, a10
, 0))
8398 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8399 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8400 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8401 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8402 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8403 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8404 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8405 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8407 /* This case if tricky because we must either have commutative
8408 operators or else A10 must not have side-effects. */
8410 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8411 && operand_equal_p (a01
, a11
, 0))
8412 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8413 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8417 /* See if we can build a range comparison. */
8418 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8421 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8422 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8424 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8426 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8429 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8430 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8432 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8434 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8437 /* Check for the possibility of merging component references. If our
8438 lhs is another similar operation, try to merge its rhs with our
8439 rhs. Then try to merge our lhs and rhs. */
8440 if (TREE_CODE (arg0
) == code
8441 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8442 TREE_OPERAND (arg0
, 1), arg1
)))
8443 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8445 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8448 if ((BRANCH_COST (optimize_function_for_speed_p (cfun
),
8450 && LOGICAL_OP_NON_SHORT_CIRCUIT
8451 && (code
== TRUTH_AND_EXPR
8452 || code
== TRUTH_ANDIF_EXPR
8453 || code
== TRUTH_OR_EXPR
8454 || code
== TRUTH_ORIF_EXPR
))
8456 enum tree_code ncode
, icode
;
8458 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8459 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8460 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8462 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8463 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8464 We don't want to pack more than two leafs to a non-IF AND/OR
8466 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8467 equal to IF-CODE, then we don't want to add right-hand operand.
8468 If the inner right-hand side of left-hand operand has
8469 side-effects, or isn't simple, then we can't add to it,
8470 as otherwise we might destroy if-sequence. */
8471 if (TREE_CODE (arg0
) == icode
8472 && simple_operand_p_2 (arg1
)
8473 /* Needed for sequence points to handle trappings, and
8475 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8477 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8479 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8482 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8483 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8484 else if (TREE_CODE (arg1
) == icode
8485 && simple_operand_p_2 (arg0
)
8486 /* Needed for sequence points to handle trappings, and
8488 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8490 tem
= fold_build2_loc (loc
, ncode
, type
,
8491 arg0
, TREE_OPERAND (arg1
, 0));
8492 return fold_build2_loc (loc
, icode
, type
, tem
,
8493 TREE_OPERAND (arg1
, 1));
8495 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8497 For sequence point consistancy, we need to check for trapping,
8498 and side-effects. */
8499 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8500 && simple_operand_p_2 (arg1
))
8501 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8507 /* Fold a binary expression of code CODE and type TYPE with operands
8508 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8509 Return the folded expression if folding is successful. Otherwise,
8510 return NULL_TREE. */
8513 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8515 enum tree_code compl_code
;
8517 if (code
== MIN_EXPR
)
8518 compl_code
= MAX_EXPR
;
8519 else if (code
== MAX_EXPR
)
8520 compl_code
= MIN_EXPR
;
8524 /* MIN (MAX (a, b), b) == b. */
8525 if (TREE_CODE (op0
) == compl_code
8526 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8527 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8529 /* MIN (MAX (b, a), b) == b. */
8530 if (TREE_CODE (op0
) == compl_code
8531 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8532 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8533 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8535 /* MIN (a, MAX (a, b)) == a. */
8536 if (TREE_CODE (op1
) == compl_code
8537 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8538 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8539 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8541 /* MIN (a, MAX (b, a)) == a. */
8542 if (TREE_CODE (op1
) == compl_code
8543 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8544 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8545 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8550 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8551 by changing CODE to reduce the magnitude of constants involved in
8552 ARG0 of the comparison.
8553 Returns a canonicalized comparison tree if a simplification was
8554 possible, otherwise returns NULL_TREE.
8555 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8556 valid if signed overflow is undefined. */
8559 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8560 tree arg0
, tree arg1
,
8561 bool *strict_overflow_p
)
8563 enum tree_code code0
= TREE_CODE (arg0
);
8564 tree t
, cst0
= NULL_TREE
;
8568 /* Match A +- CST code arg1 and CST code arg1. We can change the
8569 first form only if overflow is undefined. */
8570 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8571 /* In principle pointers also have undefined overflow behavior,
8572 but that causes problems elsewhere. */
8573 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8574 && (code0
== MINUS_EXPR
8575 || code0
== PLUS_EXPR
)
8576 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8577 || code0
== INTEGER_CST
))
8580 /* Identify the constant in arg0 and its sign. */
8581 if (code0
== INTEGER_CST
)
8584 cst0
= TREE_OPERAND (arg0
, 1);
8585 sgn0
= tree_int_cst_sgn (cst0
);
8587 /* Overflowed constants and zero will cause problems. */
8588 if (integer_zerop (cst0
)
8589 || TREE_OVERFLOW (cst0
))
8592 /* See if we can reduce the magnitude of the constant in
8593 arg0 by changing the comparison code. */
8594 if (code0
== INTEGER_CST
)
8596 /* CST <= arg1 -> CST-1 < arg1. */
8597 if (code
== LE_EXPR
&& sgn0
== 1)
8599 /* -CST < arg1 -> -CST-1 <= arg1. */
8600 else if (code
== LT_EXPR
&& sgn0
== -1)
8602 /* CST > arg1 -> CST-1 >= arg1. */
8603 else if (code
== GT_EXPR
&& sgn0
== 1)
8605 /* -CST >= arg1 -> -CST-1 > arg1. */
8606 else if (code
== GE_EXPR
&& sgn0
== -1)
8610 /* arg1 code' CST' might be more canonical. */
8615 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8617 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8619 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8620 else if (code
== GT_EXPR
8621 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8623 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8624 else if (code
== LE_EXPR
8625 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8627 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8628 else if (code
== GE_EXPR
8629 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8633 *strict_overflow_p
= true;
8636 /* Now build the constant reduced in magnitude. But not if that
8637 would produce one outside of its types range. */
8638 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8640 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8641 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8643 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8644 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8645 /* We cannot swap the comparison here as that would cause us to
8646 endlessly recurse. */
8649 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8650 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8651 if (code0
!= INTEGER_CST
)
8652 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8653 t
= fold_convert (TREE_TYPE (arg1
), t
);
8655 /* If swapping might yield to a more canonical form, do so. */
8657 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8659 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8662 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8663 overflow further. Try to decrease the magnitude of constants involved
8664 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8665 and put sole constants at the second argument position.
8666 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8669 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8670 tree arg0
, tree arg1
)
8673 bool strict_overflow_p
;
8674 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8675 "when reducing constant in comparison");
8677 /* Try canonicalization by simplifying arg0. */
8678 strict_overflow_p
= false;
8679 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8680 &strict_overflow_p
);
8683 if (strict_overflow_p
)
8684 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8688 /* Try canonicalization by simplifying arg1 using the swapped
8690 code
= swap_tree_comparison (code
);
8691 strict_overflow_p
= false;
8692 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8693 &strict_overflow_p
);
8694 if (t
&& strict_overflow_p
)
8695 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8699 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8700 space. This is used to avoid issuing overflow warnings for
8701 expressions like &p->x which can not wrap. */
8704 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8706 unsigned HOST_WIDE_INT offset_low
, total_low
;
8707 HOST_WIDE_INT size
, offset_high
, total_high
;
8709 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8715 if (offset
== NULL_TREE
)
8720 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8724 offset_low
= TREE_INT_CST_LOW (offset
);
8725 offset_high
= TREE_INT_CST_HIGH (offset
);
8728 if (add_double_with_sign (offset_low
, offset_high
,
8729 bitpos
/ BITS_PER_UNIT
, 0,
8730 &total_low
, &total_high
,
8734 if (total_high
!= 0)
8737 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8741 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8743 if (TREE_CODE (base
) == ADDR_EXPR
)
8745 HOST_WIDE_INT base_size
;
8747 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8748 if (base_size
> 0 && size
< base_size
)
8752 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8755 /* Subroutine of fold_binary. This routine performs all of the
8756 transformations that are common to the equality/inequality
8757 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8758 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8759 fold_binary should call fold_binary. Fold a comparison with
8760 tree code CODE and type TYPE with operands OP0 and OP1. Return
8761 the folded comparison or NULL_TREE. */
8764 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8767 tree arg0
, arg1
, tem
;
8772 STRIP_SIGN_NOPS (arg0
);
8773 STRIP_SIGN_NOPS (arg1
);
8775 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8776 if (tem
!= NULL_TREE
)
8779 /* If one arg is a real or integer constant, put it last. */
8780 if (tree_swap_operands_p (arg0
, arg1
, true))
8781 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8783 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8784 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8785 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8786 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8787 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8788 && (TREE_CODE (arg1
) == INTEGER_CST
8789 && !TREE_OVERFLOW (arg1
)))
8791 tree const1
= TREE_OPERAND (arg0
, 1);
8793 tree variable
= TREE_OPERAND (arg0
, 0);
8796 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8798 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8799 TREE_TYPE (arg1
), const2
, const1
);
8801 /* If the constant operation overflowed this can be
8802 simplified as a comparison against INT_MAX/INT_MIN. */
8803 if (TREE_CODE (lhs
) == INTEGER_CST
8804 && TREE_OVERFLOW (lhs
))
8806 int const1_sgn
= tree_int_cst_sgn (const1
);
8807 enum tree_code code2
= code
;
8809 /* Get the sign of the constant on the lhs if the
8810 operation were VARIABLE + CONST1. */
8811 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8812 const1_sgn
= -const1_sgn
;
8814 /* The sign of the constant determines if we overflowed
8815 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8816 Canonicalize to the INT_MIN overflow by swapping the comparison
8818 if (const1_sgn
== -1)
8819 code2
= swap_tree_comparison (code
);
8821 /* We now can look at the canonicalized case
8822 VARIABLE + 1 CODE2 INT_MIN
8823 and decide on the result. */
8824 if (code2
== LT_EXPR
8826 || code2
== EQ_EXPR
)
8827 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8828 else if (code2
== NE_EXPR
8830 || code2
== GT_EXPR
)
8831 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8834 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8835 && (TREE_CODE (lhs
) != INTEGER_CST
8836 || !TREE_OVERFLOW (lhs
)))
8838 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8839 fold_overflow_warning ("assuming signed overflow does not occur "
8840 "when changing X +- C1 cmp C2 to "
8842 WARN_STRICT_OVERFLOW_COMPARISON
);
8843 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8847 /* For comparisons of pointers we can decompose it to a compile time
8848 comparison of the base objects and the offsets into the object.
8849 This requires at least one operand being an ADDR_EXPR or a
8850 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8851 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8852 && (TREE_CODE (arg0
) == ADDR_EXPR
8853 || TREE_CODE (arg1
) == ADDR_EXPR
8854 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8855 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8857 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8858 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8859 enum machine_mode mode
;
8860 int volatilep
, unsignedp
;
8861 bool indirect_base0
= false, indirect_base1
= false;
8863 /* Get base and offset for the access. Strip ADDR_EXPR for
8864 get_inner_reference, but put it back by stripping INDIRECT_REF
8865 off the base object if possible. indirect_baseN will be true
8866 if baseN is not an address but refers to the object itself. */
8868 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8870 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8871 &bitsize
, &bitpos0
, &offset0
, &mode
,
8872 &unsignedp
, &volatilep
, false);
8873 if (TREE_CODE (base0
) == INDIRECT_REF
)
8874 base0
= TREE_OPERAND (base0
, 0);
8876 indirect_base0
= true;
8878 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8880 base0
= TREE_OPERAND (arg0
, 0);
8881 STRIP_SIGN_NOPS (base0
);
8882 if (TREE_CODE (base0
) == ADDR_EXPR
)
8884 base0
= TREE_OPERAND (base0
, 0);
8885 indirect_base0
= true;
8887 offset0
= TREE_OPERAND (arg0
, 1);
8888 if (host_integerp (offset0
, 0))
8890 HOST_WIDE_INT off
= size_low_cst (offset0
);
8891 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8893 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8895 bitpos0
= off
* BITS_PER_UNIT
;
8896 offset0
= NULL_TREE
;
8902 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8904 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8905 &bitsize
, &bitpos1
, &offset1
, &mode
,
8906 &unsignedp
, &volatilep
, false);
8907 if (TREE_CODE (base1
) == INDIRECT_REF
)
8908 base1
= TREE_OPERAND (base1
, 0);
8910 indirect_base1
= true;
8912 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8914 base1
= TREE_OPERAND (arg1
, 0);
8915 STRIP_SIGN_NOPS (base1
);
8916 if (TREE_CODE (base1
) == ADDR_EXPR
)
8918 base1
= TREE_OPERAND (base1
, 0);
8919 indirect_base1
= true;
8921 offset1
= TREE_OPERAND (arg1
, 1);
8922 if (host_integerp (offset1
, 0))
8924 HOST_WIDE_INT off
= size_low_cst (offset1
);
8925 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8927 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8929 bitpos1
= off
* BITS_PER_UNIT
;
8930 offset1
= NULL_TREE
;
8935 /* A local variable can never be pointed to by
8936 the default SSA name of an incoming parameter. */
8937 if ((TREE_CODE (arg0
) == ADDR_EXPR
8939 && TREE_CODE (base0
) == VAR_DECL
8940 && auto_var_in_fn_p (base0
, current_function_decl
)
8942 && TREE_CODE (base1
) == SSA_NAME
8943 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8944 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8945 || (TREE_CODE (arg1
) == ADDR_EXPR
8947 && TREE_CODE (base1
) == VAR_DECL
8948 && auto_var_in_fn_p (base1
, current_function_decl
)
8950 && TREE_CODE (base0
) == SSA_NAME
8951 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8952 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8954 if (code
== NE_EXPR
)
8955 return constant_boolean_node (1, type
);
8956 else if (code
== EQ_EXPR
)
8957 return constant_boolean_node (0, type
);
8959 /* If we have equivalent bases we might be able to simplify. */
8960 else if (indirect_base0
== indirect_base1
8961 && operand_equal_p (base0
, base1
, 0))
8963 /* We can fold this expression to a constant if the non-constant
8964 offset parts are equal. */
8965 if ((offset0
== offset1
8966 || (offset0
&& offset1
8967 && operand_equal_p (offset0
, offset1
, 0)))
8970 || (indirect_base0
&& DECL_P (base0
))
8971 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8976 && bitpos0
!= bitpos1
8977 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8978 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8979 fold_overflow_warning (("assuming pointer wraparound does not "
8980 "occur when comparing P +- C1 with "
8982 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8987 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8989 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8991 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8993 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8995 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8997 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9001 /* We can simplify the comparison to a comparison of the variable
9002 offset parts if the constant offset parts are equal.
9003 Be careful to use signed size type here because otherwise we
9004 mess with array offsets in the wrong way. This is possible
9005 because pointer arithmetic is restricted to retain within an
9006 object and overflow on pointer differences is undefined as of
9007 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9008 else if (bitpos0
== bitpos1
9009 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9010 || (indirect_base0
&& DECL_P (base0
))
9011 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9013 /* By converting to signed size type we cover middle-end pointer
9014 arithmetic which operates on unsigned pointer types of size
9015 type size and ARRAY_REF offsets which are properly sign or
9016 zero extended from their type in case it is narrower than
9018 if (offset0
== NULL_TREE
)
9019 offset0
= build_int_cst (ssizetype
, 0);
9021 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9022 if (offset1
== NULL_TREE
)
9023 offset1
= build_int_cst (ssizetype
, 0);
9025 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9029 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9030 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9031 fold_overflow_warning (("assuming pointer wraparound does not "
9032 "occur when comparing P +- C1 with "
9034 WARN_STRICT_OVERFLOW_COMPARISON
);
9036 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9039 /* For non-equal bases we can simplify if they are addresses
9040 of local binding decls or constants. */
9041 else if (indirect_base0
&& indirect_base1
9042 /* We know that !operand_equal_p (base0, base1, 0)
9043 because the if condition was false. But make
9044 sure two decls are not the same. */
9046 && TREE_CODE (arg0
) == ADDR_EXPR
9047 && TREE_CODE (arg1
) == ADDR_EXPR
9048 && (((TREE_CODE (base0
) == VAR_DECL
9049 || TREE_CODE (base0
) == PARM_DECL
)
9050 && (targetm
.binds_local_p (base0
)
9051 || CONSTANT_CLASS_P (base1
)))
9052 || CONSTANT_CLASS_P (base0
))
9053 && (((TREE_CODE (base1
) == VAR_DECL
9054 || TREE_CODE (base1
) == PARM_DECL
)
9055 && (targetm
.binds_local_p (base1
)
9056 || CONSTANT_CLASS_P (base0
)))
9057 || CONSTANT_CLASS_P (base1
)))
9059 if (code
== EQ_EXPR
)
9060 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9062 else if (code
== NE_EXPR
)
9063 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9066 /* For equal offsets we can simplify to a comparison of the
9068 else if (bitpos0
== bitpos1
9070 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9072 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9073 && ((offset0
== offset1
)
9074 || (offset0
&& offset1
9075 && operand_equal_p (offset0
, offset1
, 0))))
9078 base0
= build_fold_addr_expr_loc (loc
, base0
);
9080 base1
= build_fold_addr_expr_loc (loc
, base1
);
9081 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9085 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9086 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9087 the resulting offset is smaller in absolute value than the
9089 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9090 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9091 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9092 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9093 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9094 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9095 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9097 tree const1
= TREE_OPERAND (arg0
, 1);
9098 tree const2
= TREE_OPERAND (arg1
, 1);
9099 tree variable1
= TREE_OPERAND (arg0
, 0);
9100 tree variable2
= TREE_OPERAND (arg1
, 0);
9102 const char * const warnmsg
= G_("assuming signed overflow does not "
9103 "occur when combining constants around "
9106 /* Put the constant on the side where it doesn't overflow and is
9107 of lower absolute value than before. */
9108 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9109 ? MINUS_EXPR
: PLUS_EXPR
,
9111 if (!TREE_OVERFLOW (cst
)
9112 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9114 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9115 return fold_build2_loc (loc
, code
, type
,
9117 fold_build2_loc (loc
,
9118 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9122 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9123 ? MINUS_EXPR
: PLUS_EXPR
,
9125 if (!TREE_OVERFLOW (cst
)
9126 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9128 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9129 return fold_build2_loc (loc
, code
, type
,
9130 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9136 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9137 signed arithmetic case. That form is created by the compiler
9138 often enough for folding it to be of value. One example is in
9139 computing loop trip counts after Operator Strength Reduction. */
9140 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9141 && TREE_CODE (arg0
) == MULT_EXPR
9142 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9143 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9144 && integer_zerop (arg1
))
9146 tree const1
= TREE_OPERAND (arg0
, 1);
9147 tree const2
= arg1
; /* zero */
9148 tree variable1
= TREE_OPERAND (arg0
, 0);
9149 enum tree_code cmp_code
= code
;
9151 /* Handle unfolded multiplication by zero. */
9152 if (integer_zerop (const1
))
9153 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9155 fold_overflow_warning (("assuming signed overflow does not occur when "
9156 "eliminating multiplication in comparison "
9158 WARN_STRICT_OVERFLOW_COMPARISON
);
9160 /* If const1 is negative we swap the sense of the comparison. */
9161 if (tree_int_cst_sgn (const1
) < 0)
9162 cmp_code
= swap_tree_comparison (cmp_code
);
9164 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9167 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9171 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9173 tree targ0
= strip_float_extensions (arg0
);
9174 tree targ1
= strip_float_extensions (arg1
);
9175 tree newtype
= TREE_TYPE (targ0
);
9177 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9178 newtype
= TREE_TYPE (targ1
);
9180 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9181 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9182 return fold_build2_loc (loc
, code
, type
,
9183 fold_convert_loc (loc
, newtype
, targ0
),
9184 fold_convert_loc (loc
, newtype
, targ1
));
9186 /* (-a) CMP (-b) -> b CMP a */
9187 if (TREE_CODE (arg0
) == NEGATE_EXPR
9188 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9189 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9190 TREE_OPERAND (arg0
, 0));
9192 if (TREE_CODE (arg1
) == REAL_CST
)
9194 REAL_VALUE_TYPE cst
;
9195 cst
= TREE_REAL_CST (arg1
);
9197 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9198 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9199 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9200 TREE_OPERAND (arg0
, 0),
9201 build_real (TREE_TYPE (arg1
),
9202 real_value_negate (&cst
)));
9204 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9205 /* a CMP (-0) -> a CMP 0 */
9206 if (REAL_VALUE_MINUS_ZERO (cst
))
9207 return fold_build2_loc (loc
, code
, type
, arg0
,
9208 build_real (TREE_TYPE (arg1
), dconst0
));
9210 /* x != NaN is always true, other ops are always false. */
9211 if (REAL_VALUE_ISNAN (cst
)
9212 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9214 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9215 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9218 /* Fold comparisons against infinity. */
9219 if (REAL_VALUE_ISINF (cst
)
9220 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9222 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9223 if (tem
!= NULL_TREE
)
9228 /* If this is a comparison of a real constant with a PLUS_EXPR
9229 or a MINUS_EXPR of a real constant, we can convert it into a
9230 comparison with a revised real constant as long as no overflow
9231 occurs when unsafe_math_optimizations are enabled. */
9232 if (flag_unsafe_math_optimizations
9233 && TREE_CODE (arg1
) == REAL_CST
9234 && (TREE_CODE (arg0
) == PLUS_EXPR
9235 || TREE_CODE (arg0
) == MINUS_EXPR
)
9236 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9237 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9238 ? MINUS_EXPR
: PLUS_EXPR
,
9239 arg1
, TREE_OPERAND (arg0
, 1)))
9240 && !TREE_OVERFLOW (tem
))
9241 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9243 /* Likewise, we can simplify a comparison of a real constant with
9244 a MINUS_EXPR whose first operand is also a real constant, i.e.
9245 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9246 floating-point types only if -fassociative-math is set. */
9247 if (flag_associative_math
9248 && TREE_CODE (arg1
) == REAL_CST
9249 && TREE_CODE (arg0
) == MINUS_EXPR
9250 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9251 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9253 && !TREE_OVERFLOW (tem
))
9254 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9255 TREE_OPERAND (arg0
, 1), tem
);
9257 /* Fold comparisons against built-in math functions. */
9258 if (TREE_CODE (arg1
) == REAL_CST
9259 && flag_unsafe_math_optimizations
9260 && ! flag_errno_math
)
9262 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9264 if (fcode
!= END_BUILTINS
)
9266 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9267 if (tem
!= NULL_TREE
)
9273 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9274 && CONVERT_EXPR_P (arg0
))
9276 /* If we are widening one operand of an integer comparison,
9277 see if the other operand is similarly being widened. Perhaps we
9278 can do the comparison in the narrower type. */
9279 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9283 /* Or if we are changing signedness. */
9284 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9289 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9290 constant, we can simplify it. */
9291 if (TREE_CODE (arg1
) == INTEGER_CST
9292 && (TREE_CODE (arg0
) == MIN_EXPR
9293 || TREE_CODE (arg0
) == MAX_EXPR
)
9294 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9296 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9301 /* Simplify comparison of something with itself. (For IEEE
9302 floating-point, we can only do some of these simplifications.) */
9303 if (operand_equal_p (arg0
, arg1
, 0))
9308 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9309 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9310 return constant_boolean_node (1, type
);
9315 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9316 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9317 return constant_boolean_node (1, type
);
9318 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9321 /* For NE, we can only do this simplification if integer
9322 or we don't honor IEEE floating point NaNs. */
9323 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9324 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9326 /* ... fall through ... */
9329 return constant_boolean_node (0, type
);
9335 /* If we are comparing an expression that just has comparisons
9336 of two integer values, arithmetic expressions of those comparisons,
9337 and constants, we can simplify it. There are only three cases
9338 to check: the two values can either be equal, the first can be
9339 greater, or the second can be greater. Fold the expression for
9340 those three values. Since each value must be 0 or 1, we have
9341 eight possibilities, each of which corresponds to the constant 0
9342 or 1 or one of the six possible comparisons.
9344 This handles common cases like (a > b) == 0 but also handles
9345 expressions like ((x > y) - (y > x)) > 0, which supposedly
9346 occur in macroized code. */
9348 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9350 tree cval1
= 0, cval2
= 0;
9353 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9354 /* Don't handle degenerate cases here; they should already
9355 have been handled anyway. */
9356 && cval1
!= 0 && cval2
!= 0
9357 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9358 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9359 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9360 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9361 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9362 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9363 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9365 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9366 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9368 /* We can't just pass T to eval_subst in case cval1 or cval2
9369 was the same as ARG1. */
9372 = fold_build2_loc (loc
, code
, type
,
9373 eval_subst (loc
, arg0
, cval1
, maxval
,
9377 = fold_build2_loc (loc
, code
, type
,
9378 eval_subst (loc
, arg0
, cval1
, maxval
,
9382 = fold_build2_loc (loc
, code
, type
,
9383 eval_subst (loc
, arg0
, cval1
, minval
,
9387 /* All three of these results should be 0 or 1. Confirm they are.
9388 Then use those values to select the proper code to use. */
9390 if (TREE_CODE (high_result
) == INTEGER_CST
9391 && TREE_CODE (equal_result
) == INTEGER_CST
9392 && TREE_CODE (low_result
) == INTEGER_CST
)
9394 /* Make a 3-bit mask with the high-order bit being the
9395 value for `>', the next for '=', and the low for '<'. */
9396 switch ((integer_onep (high_result
) * 4)
9397 + (integer_onep (equal_result
) * 2)
9398 + integer_onep (low_result
))
9402 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9423 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9428 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9429 SET_EXPR_LOCATION (tem
, loc
);
9432 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9437 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9438 into a single range test. */
9439 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9440 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9441 && TREE_CODE (arg1
) == INTEGER_CST
9442 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9443 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9444 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9445 && !TREE_OVERFLOW (arg1
))
9447 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9448 if (tem
!= NULL_TREE
)
9452 /* Fold ~X op ~Y as Y op X. */
9453 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9454 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9456 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9457 return fold_build2_loc (loc
, code
, type
,
9458 fold_convert_loc (loc
, cmp_type
,
9459 TREE_OPERAND (arg1
, 0)),
9460 TREE_OPERAND (arg0
, 0));
9463 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9464 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9465 && TREE_CODE (arg1
) == INTEGER_CST
)
9467 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9468 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9469 TREE_OPERAND (arg0
, 0),
9470 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9471 fold_convert_loc (loc
, cmp_type
, arg1
)));
9478 /* Subroutine of fold_binary. Optimize complex multiplications of the
9479 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9480 argument EXPR represents the expression "z" of type TYPE. */
9483 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9485 tree itype
= TREE_TYPE (type
);
9486 tree rpart
, ipart
, tem
;
9488 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9490 rpart
= TREE_OPERAND (expr
, 0);
9491 ipart
= TREE_OPERAND (expr
, 1);
9493 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9495 rpart
= TREE_REALPART (expr
);
9496 ipart
= TREE_IMAGPART (expr
);
9500 expr
= save_expr (expr
);
9501 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9502 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9505 rpart
= save_expr (rpart
);
9506 ipart
= save_expr (ipart
);
9507 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9508 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9509 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9510 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9511 build_zero_cst (itype
));
9515 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9516 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9517 guarantees that P and N have the same least significant log2(M) bits.
9518 N is not otherwise constrained. In particular, N is not normalized to
9519 0 <= N < M as is common. In general, the precise value of P is unknown.
9520 M is chosen as large as possible such that constant N can be determined.
9522 Returns M and sets *RESIDUE to N.
9524 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9525 account. This is not always possible due to PR 35705.
9528 static unsigned HOST_WIDE_INT
9529 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9530 bool allow_func_align
)
9532 enum tree_code code
;
9536 code
= TREE_CODE (expr
);
9537 if (code
== ADDR_EXPR
)
9539 unsigned int bitalign
;
9540 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9541 *residue
/= BITS_PER_UNIT
;
9542 return bitalign
/ BITS_PER_UNIT
;
9544 else if (code
== POINTER_PLUS_EXPR
)
9547 unsigned HOST_WIDE_INT modulus
;
9548 enum tree_code inner_code
;
9550 op0
= TREE_OPERAND (expr
, 0);
9552 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9555 op1
= TREE_OPERAND (expr
, 1);
9557 inner_code
= TREE_CODE (op1
);
9558 if (inner_code
== INTEGER_CST
)
9560 *residue
+= TREE_INT_CST_LOW (op1
);
9563 else if (inner_code
== MULT_EXPR
)
9565 op1
= TREE_OPERAND (op1
, 1);
9566 if (TREE_CODE (op1
) == INTEGER_CST
)
9568 unsigned HOST_WIDE_INT align
;
9570 /* Compute the greatest power-of-2 divisor of op1. */
9571 align
= TREE_INT_CST_LOW (op1
);
9574 /* If align is non-zero and less than *modulus, replace
9575 *modulus with align., If align is 0, then either op1 is 0
9576 or the greatest power-of-2 divisor of op1 doesn't fit in an
9577 unsigned HOST_WIDE_INT. In either case, no additional
9578 constraint is imposed. */
9580 modulus
= MIN (modulus
, align
);
9587 /* If we get here, we were unable to determine anything useful about the
9592 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9593 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9596 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9598 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9600 if (TREE_CODE (arg
) == VECTOR_CST
)
9602 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9603 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9605 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9607 constructor_elt
*elt
;
9609 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (arg
), i
, elt
)
9613 elts
[i
] = elt
->value
;
9617 for (; i
< nelts
; i
++)
9619 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9623 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9624 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9625 NULL_TREE otherwise. */
9628 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9630 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9632 bool need_ctor
= false;
9634 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9635 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9636 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9637 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9640 elts
= XALLOCAVEC (tree
, nelts
* 3);
9641 if (!vec_cst_ctor_to_array (arg0
, elts
)
9642 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9645 for (i
= 0; i
< nelts
; i
++)
9647 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9649 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9654 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, nelts
);
9655 for (i
= 0; i
< nelts
; i
++)
9656 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9657 return build_constructor (type
, v
);
9660 return build_vector (type
, &elts
[2 * nelts
]);
9663 /* Try to fold a pointer difference of type TYPE two address expressions of
9664 array references AREF0 and AREF1 using location LOC. Return a
9665 simplified expression for the difference or NULL_TREE. */
9668 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9669 tree aref0
, tree aref1
)
9671 tree base0
= TREE_OPERAND (aref0
, 0);
9672 tree base1
= TREE_OPERAND (aref1
, 0);
9673 tree base_offset
= build_int_cst (type
, 0);
9675 /* If the bases are array references as well, recurse. If the bases
9676 are pointer indirections compute the difference of the pointers.
9677 If the bases are equal, we are set. */
9678 if ((TREE_CODE (base0
) == ARRAY_REF
9679 && TREE_CODE (base1
) == ARRAY_REF
9681 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9682 || (INDIRECT_REF_P (base0
)
9683 && INDIRECT_REF_P (base1
)
9684 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9685 TREE_OPERAND (base0
, 0),
9686 TREE_OPERAND (base1
, 0))))
9687 || operand_equal_p (base0
, base1
, 0))
9689 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9690 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9691 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9692 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9693 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9695 fold_build2_loc (loc
, MULT_EXPR
, type
,
9701 /* If the real or vector real constant CST of type TYPE has an exact
9702 inverse, return it, else return NULL. */
9705 exact_inverse (tree type
, tree cst
)
9708 tree unit_type
, *elts
;
9709 enum machine_mode mode
;
9710 unsigned vec_nelts
, i
;
9712 switch (TREE_CODE (cst
))
9715 r
= TREE_REAL_CST (cst
);
9717 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9718 return build_real (type
, r
);
9723 vec_nelts
= VECTOR_CST_NELTS (cst
);
9724 elts
= XALLOCAVEC (tree
, vec_nelts
);
9725 unit_type
= TREE_TYPE (type
);
9726 mode
= TYPE_MODE (unit_type
);
9728 for (i
= 0; i
< vec_nelts
; i
++)
9730 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9731 if (!exact_real_inverse (mode
, &r
))
9733 elts
[i
] = build_real (unit_type
, r
);
9736 return build_vector (type
, elts
);
9743 /* Fold a binary expression of code CODE and type TYPE with operands
9744 OP0 and OP1. LOC is the location of the resulting expression.
9745 Return the folded expression if folding is successful. Otherwise,
9746 return NULL_TREE. */
9749 fold_binary_loc (location_t loc
,
9750 enum tree_code code
, tree type
, tree op0
, tree op1
)
9752 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9753 tree arg0
, arg1
, tem
;
9754 tree t1
= NULL_TREE
;
9755 bool strict_overflow_p
;
9757 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9758 && TREE_CODE_LENGTH (code
) == 2
9760 && op1
!= NULL_TREE
);
9765 /* Strip any conversions that don't change the mode. This is
9766 safe for every expression, except for a comparison expression
9767 because its signedness is derived from its operands. So, in
9768 the latter case, only strip conversions that don't change the
9769 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9772 Note that this is done as an internal manipulation within the
9773 constant folder, in order to find the simplest representation
9774 of the arguments so that their form can be studied. In any
9775 cases, the appropriate type conversions should be put back in
9776 the tree that will get out of the constant folder. */
9778 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9780 STRIP_SIGN_NOPS (arg0
);
9781 STRIP_SIGN_NOPS (arg1
);
9789 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9790 constant but we can't do arithmetic on them. */
9791 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9792 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9793 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9794 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9795 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9796 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9798 if (kind
== tcc_binary
)
9800 /* Make sure type and arg0 have the same saturating flag. */
9801 gcc_assert (TYPE_SATURATING (type
)
9802 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9803 tem
= const_binop (code
, arg0
, arg1
);
9805 else if (kind
== tcc_comparison
)
9806 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9810 if (tem
!= NULL_TREE
)
9812 if (TREE_TYPE (tem
) != type
)
9813 tem
= fold_convert_loc (loc
, type
, tem
);
9818 /* If this is a commutative operation, and ARG0 is a constant, move it
9819 to ARG1 to reduce the number of tests below. */
9820 if (commutative_tree_code (code
)
9821 && tree_swap_operands_p (arg0
, arg1
, true))
9822 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9824 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9826 First check for cases where an arithmetic operation is applied to a
9827 compound, conditional, or comparison operation. Push the arithmetic
9828 operation inside the compound or conditional to see if any folding
9829 can then be done. Convert comparison to conditional for this purpose.
9830 The also optimizes non-constant cases that used to be done in
9833 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9834 one of the operands is a comparison and the other is a comparison, a
9835 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9836 code below would make the expression more complex. Change it to a
9837 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9838 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9840 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9841 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9842 && ((truth_value_p (TREE_CODE (arg0
))
9843 && (truth_value_p (TREE_CODE (arg1
))
9844 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9845 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9846 || (truth_value_p (TREE_CODE (arg1
))
9847 && (truth_value_p (TREE_CODE (arg0
))
9848 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9849 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9851 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9852 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9855 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9856 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9858 if (code
== EQ_EXPR
)
9859 tem
= invert_truthvalue_loc (loc
, tem
);
9861 return fold_convert_loc (loc
, type
, tem
);
9864 if (TREE_CODE_CLASS (code
) == tcc_binary
9865 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9867 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9869 tem
= fold_build2_loc (loc
, code
, type
,
9870 fold_convert_loc (loc
, TREE_TYPE (op0
),
9871 TREE_OPERAND (arg0
, 1)), op1
);
9872 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9875 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9876 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9878 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9879 fold_convert_loc (loc
, TREE_TYPE (op1
),
9880 TREE_OPERAND (arg1
, 1)));
9881 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9885 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9887 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9889 /*cond_first_p=*/1);
9890 if (tem
!= NULL_TREE
)
9894 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9896 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9898 /*cond_first_p=*/0);
9899 if (tem
!= NULL_TREE
)
9907 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9908 if (TREE_CODE (arg0
) == ADDR_EXPR
9909 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9911 tree iref
= TREE_OPERAND (arg0
, 0);
9912 return fold_build2 (MEM_REF
, type
,
9913 TREE_OPERAND (iref
, 0),
9914 int_const_binop (PLUS_EXPR
, arg1
,
9915 TREE_OPERAND (iref
, 1)));
9918 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9919 if (TREE_CODE (arg0
) == ADDR_EXPR
9920 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9923 HOST_WIDE_INT coffset
;
9924 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9928 return fold_build2 (MEM_REF
, type
,
9929 build_fold_addr_expr (base
),
9930 int_const_binop (PLUS_EXPR
, arg1
,
9931 size_int (coffset
)));
9936 case POINTER_PLUS_EXPR
:
9937 /* 0 +p index -> (type)index */
9938 if (integer_zerop (arg0
))
9939 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9941 /* PTR +p 0 -> PTR */
9942 if (integer_zerop (arg1
))
9943 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9945 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9946 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9947 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9948 return fold_convert_loc (loc
, type
,
9949 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9950 fold_convert_loc (loc
, sizetype
,
9952 fold_convert_loc (loc
, sizetype
,
9955 /* (PTR +p B) +p A -> PTR +p (B + A) */
9956 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9959 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9960 tree arg00
= TREE_OPERAND (arg0
, 0);
9961 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9962 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9963 return fold_convert_loc (loc
, type
,
9964 fold_build_pointer_plus_loc (loc
,
9968 /* PTR_CST +p CST -> CST1 */
9969 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9970 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9971 fold_convert_loc (loc
, type
, arg1
));
9973 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9974 of the array. Loop optimizer sometimes produce this type of
9976 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9978 tem
= try_move_mult_to_index (loc
, arg0
,
9979 fold_convert_loc (loc
,
9982 return fold_convert_loc (loc
, type
, tem
);
9988 /* A + (-B) -> A - B */
9989 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9990 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9991 fold_convert_loc (loc
, type
, arg0
),
9992 fold_convert_loc (loc
, type
,
9993 TREE_OPERAND (arg1
, 0)));
9994 /* (-A) + B -> B - A */
9995 if (TREE_CODE (arg0
) == NEGATE_EXPR
9996 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9997 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9998 fold_convert_loc (loc
, type
, arg1
),
9999 fold_convert_loc (loc
, type
,
10000 TREE_OPERAND (arg0
, 0)));
10002 if (INTEGRAL_TYPE_P (type
))
10004 /* Convert ~A + 1 to -A. */
10005 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10006 && integer_onep (arg1
))
10007 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10008 fold_convert_loc (loc
, type
,
10009 TREE_OPERAND (arg0
, 0)));
10011 /* ~X + X is -1. */
10012 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10013 && !TYPE_OVERFLOW_TRAPS (type
))
10015 tree tem
= TREE_OPERAND (arg0
, 0);
10018 if (operand_equal_p (tem
, arg1
, 0))
10020 t1
= build_int_cst_type (type
, -1);
10021 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10025 /* X + ~X is -1. */
10026 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10027 && !TYPE_OVERFLOW_TRAPS (type
))
10029 tree tem
= TREE_OPERAND (arg1
, 0);
10032 if (operand_equal_p (arg0
, tem
, 0))
10034 t1
= build_int_cst_type (type
, -1);
10035 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10039 /* X + (X / CST) * -CST is X % CST. */
10040 if (TREE_CODE (arg1
) == MULT_EXPR
10041 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10042 && operand_equal_p (arg0
,
10043 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10045 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10046 tree cst1
= TREE_OPERAND (arg1
, 1);
10047 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10049 if (sum
&& integer_zerop (sum
))
10050 return fold_convert_loc (loc
, type
,
10051 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10052 TREE_TYPE (arg0
), arg0
,
10057 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10058 one. Make sure the type is not saturating and has the signedness of
10059 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10060 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10061 if ((TREE_CODE (arg0
) == MULT_EXPR
10062 || TREE_CODE (arg1
) == MULT_EXPR
)
10063 && !TYPE_SATURATING (type
)
10064 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10065 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10066 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10068 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10073 if (! FLOAT_TYPE_P (type
))
10075 if (integer_zerop (arg1
))
10076 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10078 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10079 with a constant, and the two constants have no bits in common,
10080 we should treat this as a BIT_IOR_EXPR since this may produce more
10081 simplifications. */
10082 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10083 && TREE_CODE (arg1
) == BIT_AND_EXPR
10084 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10085 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10086 && integer_zerop (const_binop (BIT_AND_EXPR
,
10087 TREE_OPERAND (arg0
, 1),
10088 TREE_OPERAND (arg1
, 1))))
10090 code
= BIT_IOR_EXPR
;
10094 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10095 (plus (plus (mult) (mult)) (foo)) so that we can
10096 take advantage of the factoring cases below. */
10097 if (TYPE_OVERFLOW_WRAPS (type
)
10098 && (((TREE_CODE (arg0
) == PLUS_EXPR
10099 || TREE_CODE (arg0
) == MINUS_EXPR
)
10100 && TREE_CODE (arg1
) == MULT_EXPR
)
10101 || ((TREE_CODE (arg1
) == PLUS_EXPR
10102 || TREE_CODE (arg1
) == MINUS_EXPR
)
10103 && TREE_CODE (arg0
) == MULT_EXPR
)))
10105 tree parg0
, parg1
, parg
, marg
;
10106 enum tree_code pcode
;
10108 if (TREE_CODE (arg1
) == MULT_EXPR
)
10109 parg
= arg0
, marg
= arg1
;
10111 parg
= arg1
, marg
= arg0
;
10112 pcode
= TREE_CODE (parg
);
10113 parg0
= TREE_OPERAND (parg
, 0);
10114 parg1
= TREE_OPERAND (parg
, 1);
10115 STRIP_NOPS (parg0
);
10116 STRIP_NOPS (parg1
);
10118 if (TREE_CODE (parg0
) == MULT_EXPR
10119 && TREE_CODE (parg1
) != MULT_EXPR
)
10120 return fold_build2_loc (loc
, pcode
, type
,
10121 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10122 fold_convert_loc (loc
, type
,
10124 fold_convert_loc (loc
, type
,
10126 fold_convert_loc (loc
, type
, parg1
));
10127 if (TREE_CODE (parg0
) != MULT_EXPR
10128 && TREE_CODE (parg1
) == MULT_EXPR
)
10130 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10131 fold_convert_loc (loc
, type
, parg0
),
10132 fold_build2_loc (loc
, pcode
, type
,
10133 fold_convert_loc (loc
, type
, marg
),
10134 fold_convert_loc (loc
, type
,
10140 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10141 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10142 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10144 /* Likewise if the operands are reversed. */
10145 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10146 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10148 /* Convert X + -C into X - C. */
10149 if (TREE_CODE (arg1
) == REAL_CST
10150 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10152 tem
= fold_negate_const (arg1
, type
);
10153 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10154 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10155 fold_convert_loc (loc
, type
, arg0
),
10156 fold_convert_loc (loc
, type
, tem
));
10159 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10160 to __complex__ ( x, y ). This is not the same for SNaNs or
10161 if signed zeros are involved. */
10162 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10164 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10166 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10167 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10168 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10169 bool arg0rz
= false, arg0iz
= false;
10170 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10171 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10173 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10174 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10175 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10177 tree rp
= arg1r
? arg1r
10178 : build1 (REALPART_EXPR
, rtype
, arg1
);
10179 tree ip
= arg0i
? arg0i
10180 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10181 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10183 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10185 tree rp
= arg0r
? arg0r
10186 : build1 (REALPART_EXPR
, rtype
, arg0
);
10187 tree ip
= arg1i
? arg1i
10188 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10189 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10194 if (flag_unsafe_math_optimizations
10195 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10196 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10197 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10200 /* Convert x+x into x*2.0. */
10201 if (operand_equal_p (arg0
, arg1
, 0)
10202 && SCALAR_FLOAT_TYPE_P (type
))
10203 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10204 build_real (type
, dconst2
));
10206 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10207 We associate floats only if the user has specified
10208 -fassociative-math. */
10209 if (flag_associative_math
10210 && TREE_CODE (arg1
) == PLUS_EXPR
10211 && TREE_CODE (arg0
) != MULT_EXPR
)
10213 tree tree10
= TREE_OPERAND (arg1
, 0);
10214 tree tree11
= TREE_OPERAND (arg1
, 1);
10215 if (TREE_CODE (tree11
) == MULT_EXPR
10216 && TREE_CODE (tree10
) == MULT_EXPR
)
10219 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10220 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10223 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10224 We associate floats only if the user has specified
10225 -fassociative-math. */
10226 if (flag_associative_math
10227 && TREE_CODE (arg0
) == PLUS_EXPR
10228 && TREE_CODE (arg1
) != MULT_EXPR
)
10230 tree tree00
= TREE_OPERAND (arg0
, 0);
10231 tree tree01
= TREE_OPERAND (arg0
, 1);
10232 if (TREE_CODE (tree01
) == MULT_EXPR
10233 && TREE_CODE (tree00
) == MULT_EXPR
)
10236 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10237 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10243 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10244 is a rotate of A by C1 bits. */
10245 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10246 is a rotate of A by B bits. */
10248 enum tree_code code0
, code1
;
10250 code0
= TREE_CODE (arg0
);
10251 code1
= TREE_CODE (arg1
);
10252 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10253 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10254 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10255 TREE_OPERAND (arg1
, 0), 0)
10256 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10257 TYPE_UNSIGNED (rtype
))
10258 /* Only create rotates in complete modes. Other cases are not
10259 expanded properly. */
10260 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10262 tree tree01
, tree11
;
10263 enum tree_code code01
, code11
;
10265 tree01
= TREE_OPERAND (arg0
, 1);
10266 tree11
= TREE_OPERAND (arg1
, 1);
10267 STRIP_NOPS (tree01
);
10268 STRIP_NOPS (tree11
);
10269 code01
= TREE_CODE (tree01
);
10270 code11
= TREE_CODE (tree11
);
10271 if (code01
== INTEGER_CST
10272 && code11
== INTEGER_CST
10273 && TREE_INT_CST_HIGH (tree01
) == 0
10274 && TREE_INT_CST_HIGH (tree11
) == 0
10275 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10276 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10278 tem
= build2_loc (loc
, LROTATE_EXPR
,
10279 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10280 TREE_OPERAND (arg0
, 0),
10281 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10282 return fold_convert_loc (loc
, type
, tem
);
10284 else if (code11
== MINUS_EXPR
)
10286 tree tree110
, tree111
;
10287 tree110
= TREE_OPERAND (tree11
, 0);
10288 tree111
= TREE_OPERAND (tree11
, 1);
10289 STRIP_NOPS (tree110
);
10290 STRIP_NOPS (tree111
);
10291 if (TREE_CODE (tree110
) == INTEGER_CST
10292 && 0 == compare_tree_int (tree110
,
10294 (TREE_TYPE (TREE_OPERAND
10296 && operand_equal_p (tree01
, tree111
, 0))
10298 fold_convert_loc (loc
, type
,
10299 build2 ((code0
== LSHIFT_EXPR
10302 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10303 TREE_OPERAND (arg0
, 0), tree01
));
10305 else if (code01
== MINUS_EXPR
)
10307 tree tree010
, tree011
;
10308 tree010
= TREE_OPERAND (tree01
, 0);
10309 tree011
= TREE_OPERAND (tree01
, 1);
10310 STRIP_NOPS (tree010
);
10311 STRIP_NOPS (tree011
);
10312 if (TREE_CODE (tree010
) == INTEGER_CST
10313 && 0 == compare_tree_int (tree010
,
10315 (TREE_TYPE (TREE_OPERAND
10317 && operand_equal_p (tree11
, tree011
, 0))
10318 return fold_convert_loc
10320 build2 ((code0
!= LSHIFT_EXPR
10323 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10324 TREE_OPERAND (arg0
, 0), tree11
));
10330 /* In most languages, can't associate operations on floats through
10331 parentheses. Rather than remember where the parentheses were, we
10332 don't associate floats at all, unless the user has specified
10333 -fassociative-math.
10334 And, we need to make sure type is not saturating. */
10336 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10337 && !TYPE_SATURATING (type
))
10339 tree var0
, con0
, lit0
, minus_lit0
;
10340 tree var1
, con1
, lit1
, minus_lit1
;
10343 /* Split both trees into variables, constants, and literals. Then
10344 associate each group together, the constants with literals,
10345 then the result with variables. This increases the chances of
10346 literals being recombined later and of generating relocatable
10347 expressions for the sum of a constant and literal. */
10348 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10349 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10350 code
== MINUS_EXPR
);
10352 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10353 if (code
== MINUS_EXPR
)
10356 /* With undefined overflow we can only associate constants with one
10357 variable, and constants whose association doesn't overflow. */
10358 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10359 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10366 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10367 tmp0
= TREE_OPERAND (tmp0
, 0);
10368 if (CONVERT_EXPR_P (tmp0
)
10369 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10371 <= TYPE_PRECISION (type
)))
10372 tmp0
= TREE_OPERAND (tmp0
, 0);
10373 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10374 tmp1
= TREE_OPERAND (tmp1
, 0);
10375 if (CONVERT_EXPR_P (tmp1
)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10378 <= TYPE_PRECISION (type
)))
10379 tmp1
= TREE_OPERAND (tmp1
, 0);
10380 /* The only case we can still associate with two variables
10381 is if they are the same, modulo negation and bit-pattern
10382 preserving conversions. */
10383 if (!operand_equal_p (tmp0
, tmp1
, 0))
10387 if (ok
&& lit0
&& lit1
)
10389 tree tmp0
= fold_convert (type
, lit0
);
10390 tree tmp1
= fold_convert (type
, lit1
);
10392 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10393 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10398 /* Only do something if we found more than two objects. Otherwise,
10399 nothing has changed and we risk infinite recursion. */
10401 && (2 < ((var0
!= 0) + (var1
!= 0)
10402 + (con0
!= 0) + (con1
!= 0)
10403 + (lit0
!= 0) + (lit1
!= 0)
10404 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10406 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10407 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10408 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10409 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10411 /* Preserve the MINUS_EXPR if the negative part of the literal is
10412 greater than the positive part. Otherwise, the multiplicative
10413 folding code (i.e extract_muldiv) may be fooled in case
10414 unsigned constants are subtracted, like in the following
10415 example: ((X*2 + 4) - 8U)/2. */
10416 if (minus_lit0
&& lit0
)
10418 if (TREE_CODE (lit0
) == INTEGER_CST
10419 && TREE_CODE (minus_lit0
) == INTEGER_CST
10420 && tree_int_cst_lt (lit0
, minus_lit0
))
10422 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10428 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10437 fold_convert_loc (loc
, type
,
10438 associate_trees (loc
, var0
, minus_lit0
,
10439 MINUS_EXPR
, type
));
10442 con0
= associate_trees (loc
, con0
, minus_lit0
,
10445 fold_convert_loc (loc
, type
,
10446 associate_trees (loc
, var0
, con0
,
10451 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10453 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10461 /* Pointer simplifications for subtraction, simple reassociations. */
10462 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10464 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10465 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10466 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10468 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10469 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10470 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10471 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10472 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10473 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10475 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10478 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10479 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10481 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10482 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10483 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10484 fold_convert_loc (loc
, type
, arg1
));
10486 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10489 /* A - (-B) -> A + B */
10490 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10491 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10492 fold_convert_loc (loc
, type
,
10493 TREE_OPERAND (arg1
, 0)));
10494 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10495 if (TREE_CODE (arg0
) == NEGATE_EXPR
10496 && (FLOAT_TYPE_P (type
)
10497 || INTEGRAL_TYPE_P (type
))
10498 && negate_expr_p (arg1
)
10499 && reorder_operands_p (arg0
, arg1
))
10500 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10501 fold_convert_loc (loc
, type
,
10502 negate_expr (arg1
)),
10503 fold_convert_loc (loc
, type
,
10504 TREE_OPERAND (arg0
, 0)));
10505 /* Convert -A - 1 to ~A. */
10506 if (INTEGRAL_TYPE_P (type
)
10507 && TREE_CODE (arg0
) == NEGATE_EXPR
10508 && integer_onep (arg1
)
10509 && !TYPE_OVERFLOW_TRAPS (type
))
10510 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10511 fold_convert_loc (loc
, type
,
10512 TREE_OPERAND (arg0
, 0)));
10514 /* Convert -1 - A to ~A. */
10515 if (INTEGRAL_TYPE_P (type
)
10516 && integer_all_onesp (arg0
))
10517 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10520 /* X - (X / CST) * CST is X % CST. */
10521 if (INTEGRAL_TYPE_P (type
)
10522 && TREE_CODE (arg1
) == MULT_EXPR
10523 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10524 && operand_equal_p (arg0
,
10525 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10526 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10527 TREE_OPERAND (arg1
, 1), 0))
10529 fold_convert_loc (loc
, type
,
10530 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10531 arg0
, TREE_OPERAND (arg1
, 1)));
10533 if (! FLOAT_TYPE_P (type
))
10535 if (integer_zerop (arg0
))
10536 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10537 if (integer_zerop (arg1
))
10538 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10540 /* Fold A - (A & B) into ~B & A. */
10541 if (!TREE_SIDE_EFFECTS (arg0
)
10542 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10544 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10546 tree arg10
= fold_convert_loc (loc
, type
,
10547 TREE_OPERAND (arg1
, 0));
10548 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10549 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10551 fold_convert_loc (loc
, type
, arg0
));
10553 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10555 tree arg11
= fold_convert_loc (loc
,
10556 type
, TREE_OPERAND (arg1
, 1));
10557 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10558 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10560 fold_convert_loc (loc
, type
, arg0
));
10564 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10565 any power of 2 minus 1. */
10566 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10567 && TREE_CODE (arg1
) == BIT_AND_EXPR
10568 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10569 TREE_OPERAND (arg1
, 0), 0))
10571 tree mask0
= TREE_OPERAND (arg0
, 1);
10572 tree mask1
= TREE_OPERAND (arg1
, 1);
10573 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10575 if (operand_equal_p (tem
, mask1
, 0))
10577 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10578 TREE_OPERAND (arg0
, 0), mask1
);
10579 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10584 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10585 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10586 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10588 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10589 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10590 (-ARG1 + ARG0) reduces to -ARG1. */
10591 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10592 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10594 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10595 __complex__ ( x, -y ). This is not the same for SNaNs or if
10596 signed zeros are involved. */
10597 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10599 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10601 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10602 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10603 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10604 bool arg0rz
= false, arg0iz
= false;
10605 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10606 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10608 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10609 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10610 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10612 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10614 : build1 (REALPART_EXPR
, rtype
, arg1
));
10615 tree ip
= arg0i
? arg0i
10616 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10617 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10619 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10621 tree rp
= arg0r
? arg0r
10622 : build1 (REALPART_EXPR
, rtype
, arg0
);
10623 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10625 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10626 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10631 /* Fold &x - &x. This can happen from &x.foo - &x.
10632 This is unsafe for certain floats even in non-IEEE formats.
10633 In IEEE, it is unsafe because it does wrong for NaNs.
10634 Also note that operand_equal_p is always false if an operand
10637 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10638 && operand_equal_p (arg0
, arg1
, 0))
10639 return build_zero_cst (type
);
10641 /* A - B -> A + (-B) if B is easily negatable. */
10642 if (negate_expr_p (arg1
)
10643 && ((FLOAT_TYPE_P (type
)
10644 /* Avoid this transformation if B is a positive REAL_CST. */
10645 && (TREE_CODE (arg1
) != REAL_CST
10646 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10647 || INTEGRAL_TYPE_P (type
)))
10648 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10649 fold_convert_loc (loc
, type
, arg0
),
10650 fold_convert_loc (loc
, type
,
10651 negate_expr (arg1
)));
10653 /* Try folding difference of addresses. */
10655 HOST_WIDE_INT diff
;
10657 if ((TREE_CODE (arg0
) == ADDR_EXPR
10658 || TREE_CODE (arg1
) == ADDR_EXPR
)
10659 && ptr_difference_const (arg0
, arg1
, &diff
))
10660 return build_int_cst_type (type
, diff
);
10663 /* Fold &a[i] - &a[j] to i-j. */
10664 if (TREE_CODE (arg0
) == ADDR_EXPR
10665 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10666 && TREE_CODE (arg1
) == ADDR_EXPR
10667 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10669 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10670 TREE_OPERAND (arg0
, 0),
10671 TREE_OPERAND (arg1
, 0));
10676 if (FLOAT_TYPE_P (type
)
10677 && flag_unsafe_math_optimizations
10678 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10679 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10680 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10683 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10684 one. Make sure the type is not saturating and has the signedness of
10685 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10686 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10687 if ((TREE_CODE (arg0
) == MULT_EXPR
10688 || TREE_CODE (arg1
) == MULT_EXPR
)
10689 && !TYPE_SATURATING (type
)
10690 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10691 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10692 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10694 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10702 /* (-A) * (-B) -> A * B */
10703 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10704 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10705 fold_convert_loc (loc
, type
,
10706 TREE_OPERAND (arg0
, 0)),
10707 fold_convert_loc (loc
, type
,
10708 negate_expr (arg1
)));
10709 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10710 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10711 fold_convert_loc (loc
, type
,
10712 negate_expr (arg0
)),
10713 fold_convert_loc (loc
, type
,
10714 TREE_OPERAND (arg1
, 0)));
10716 if (! FLOAT_TYPE_P (type
))
10718 if (integer_zerop (arg1
))
10719 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10720 if (integer_onep (arg1
))
10721 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10722 /* Transform x * -1 into -x. Make sure to do the negation
10723 on the original operand with conversions not stripped
10724 because we can only strip non-sign-changing conversions. */
10725 if (integer_all_onesp (arg1
))
10726 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10727 /* Transform x * -C into -x * C if x is easily negatable. */
10728 if (TREE_CODE (arg1
) == INTEGER_CST
10729 && tree_int_cst_sgn (arg1
) == -1
10730 && negate_expr_p (arg0
)
10731 && (tem
= negate_expr (arg1
)) != arg1
10732 && !TREE_OVERFLOW (tem
))
10733 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10734 fold_convert_loc (loc
, type
,
10735 negate_expr (arg0
)),
10738 /* (a * (1 << b)) is (a << b) */
10739 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10740 && integer_onep (TREE_OPERAND (arg1
, 0)))
10741 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10742 TREE_OPERAND (arg1
, 1));
10743 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10744 && integer_onep (TREE_OPERAND (arg0
, 0)))
10745 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10746 TREE_OPERAND (arg0
, 1));
10748 /* (A + A) * C -> A * 2 * C */
10749 if (TREE_CODE (arg0
) == PLUS_EXPR
10750 && TREE_CODE (arg1
) == INTEGER_CST
10751 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10752 TREE_OPERAND (arg0
, 1), 0))
10753 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10754 omit_one_operand_loc (loc
, type
,
10755 TREE_OPERAND (arg0
, 0),
10756 TREE_OPERAND (arg0
, 1)),
10757 fold_build2_loc (loc
, MULT_EXPR
, type
,
10758 build_int_cst (type
, 2) , arg1
));
10760 strict_overflow_p
= false;
10761 if (TREE_CODE (arg1
) == INTEGER_CST
10762 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10763 &strict_overflow_p
)))
10765 if (strict_overflow_p
)
10766 fold_overflow_warning (("assuming signed overflow does not "
10767 "occur when simplifying "
10769 WARN_STRICT_OVERFLOW_MISC
);
10770 return fold_convert_loc (loc
, type
, tem
);
10773 /* Optimize z * conj(z) for integer complex numbers. */
10774 if (TREE_CODE (arg0
) == CONJ_EXPR
10775 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10776 return fold_mult_zconjz (loc
, type
, arg1
);
10777 if (TREE_CODE (arg1
) == CONJ_EXPR
10778 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10779 return fold_mult_zconjz (loc
, type
, arg0
);
10783 /* Maybe fold x * 0 to 0. The expressions aren't the same
10784 when x is NaN, since x * 0 is also NaN. Nor are they the
10785 same in modes with signed zeros, since multiplying a
10786 negative value by 0 gives -0, not +0. */
10787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10788 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10789 && real_zerop (arg1
))
10790 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10791 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10792 Likewise for complex arithmetic with signed zeros. */
10793 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10794 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10795 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10796 && real_onep (arg1
))
10797 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10799 /* Transform x * -1.0 into -x. */
10800 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10801 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10802 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10803 && real_minus_onep (arg1
))
10804 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10806 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10807 the result for floating point types due to rounding so it is applied
10808 only if -fassociative-math was specify. */
10809 if (flag_associative_math
10810 && TREE_CODE (arg0
) == RDIV_EXPR
10811 && TREE_CODE (arg1
) == REAL_CST
10812 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10814 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10817 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10818 TREE_OPERAND (arg0
, 1));
10821 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10822 if (operand_equal_p (arg0
, arg1
, 0))
10824 tree tem
= fold_strip_sign_ops (arg0
);
10825 if (tem
!= NULL_TREE
)
10827 tem
= fold_convert_loc (loc
, type
, tem
);
10828 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10832 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10833 This is not the same for NaNs or if signed zeros are
10835 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10836 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10837 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10838 && TREE_CODE (arg1
) == COMPLEX_CST
10839 && real_zerop (TREE_REALPART (arg1
)))
10841 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10842 if (real_onep (TREE_IMAGPART (arg1
)))
10844 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10845 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10847 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10848 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10850 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10851 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10852 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10856 /* Optimize z * conj(z) for floating point complex numbers.
10857 Guarded by flag_unsafe_math_optimizations as non-finite
10858 imaginary components don't produce scalar results. */
10859 if (flag_unsafe_math_optimizations
10860 && TREE_CODE (arg0
) == CONJ_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10862 return fold_mult_zconjz (loc
, type
, arg1
);
10863 if (flag_unsafe_math_optimizations
10864 && TREE_CODE (arg1
) == CONJ_EXPR
10865 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10866 return fold_mult_zconjz (loc
, type
, arg0
);
10868 if (flag_unsafe_math_optimizations
)
10870 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10871 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10873 /* Optimizations of root(...)*root(...). */
10874 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10877 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10878 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10880 /* Optimize sqrt(x)*sqrt(x) as x. */
10881 if (BUILTIN_SQRT_P (fcode0
)
10882 && operand_equal_p (arg00
, arg10
, 0)
10883 && ! HONOR_SNANS (TYPE_MODE (type
)))
10886 /* Optimize root(x)*root(y) as root(x*y). */
10887 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10888 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10889 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10892 /* Optimize expN(x)*expN(y) as expN(x+y). */
10893 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10895 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10896 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10897 CALL_EXPR_ARG (arg0
, 0),
10898 CALL_EXPR_ARG (arg1
, 0));
10899 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10902 /* Optimizations of pow(...)*pow(...). */
10903 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10904 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10905 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10907 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10908 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10909 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10910 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10912 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10913 if (operand_equal_p (arg01
, arg11
, 0))
10915 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10916 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10918 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10921 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10922 if (operand_equal_p (arg00
, arg10
, 0))
10924 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10925 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10927 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10931 /* Optimize tan(x)*cos(x) as sin(x). */
10932 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10933 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10934 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10935 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10936 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10937 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10938 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10939 CALL_EXPR_ARG (arg1
, 0), 0))
10941 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10943 if (sinfn
!= NULL_TREE
)
10944 return build_call_expr_loc (loc
, sinfn
, 1,
10945 CALL_EXPR_ARG (arg0
, 0));
10948 /* Optimize x*pow(x,c) as pow(x,c+1). */
10949 if (fcode1
== BUILT_IN_POW
10950 || fcode1
== BUILT_IN_POWF
10951 || fcode1
== BUILT_IN_POWL
)
10953 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10954 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10955 if (TREE_CODE (arg11
) == REAL_CST
10956 && !TREE_OVERFLOW (arg11
)
10957 && operand_equal_p (arg0
, arg10
, 0))
10959 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10963 c
= TREE_REAL_CST (arg11
);
10964 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10965 arg
= build_real (type
, c
);
10966 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10970 /* Optimize pow(x,c)*x as pow(x,c+1). */
10971 if (fcode0
== BUILT_IN_POW
10972 || fcode0
== BUILT_IN_POWF
10973 || fcode0
== BUILT_IN_POWL
)
10975 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10976 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10977 if (TREE_CODE (arg01
) == REAL_CST
10978 && !TREE_OVERFLOW (arg01
)
10979 && operand_equal_p (arg1
, arg00
, 0))
10981 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10985 c
= TREE_REAL_CST (arg01
);
10986 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10987 arg
= build_real (type
, c
);
10988 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10992 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10993 if (!in_gimple_form
10995 && operand_equal_p (arg0
, arg1
, 0))
10997 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11001 tree arg
= build_real (type
, dconst2
);
11002 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11011 if (integer_all_onesp (arg1
))
11012 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11013 if (integer_zerop (arg1
))
11014 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11015 if (operand_equal_p (arg0
, arg1
, 0))
11016 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11018 /* ~X | X is -1. */
11019 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11020 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11022 t1
= build_zero_cst (type
);
11023 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11024 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11027 /* X | ~X is -1. */
11028 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11029 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11031 t1
= build_zero_cst (type
);
11032 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11033 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11036 /* Canonicalize (X & C1) | C2. */
11037 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11038 && TREE_CODE (arg1
) == INTEGER_CST
11039 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11041 double_int c1
, c2
, c3
, msk
;
11042 int width
= TYPE_PRECISION (type
), w
;
11043 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11044 c2
= tree_to_double_int (arg1
);
11046 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11047 if (double_int_equal_p (double_int_and (c1
, c2
), c1
))
11048 return omit_one_operand_loc (loc
, type
, arg1
,
11049 TREE_OPERAND (arg0
, 0));
11051 msk
= double_int_mask (width
);
11053 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11054 if (double_int_zero_p (double_int_and_not (msk
,
11055 double_int_ior (c1
, c2
))))
11056 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11057 TREE_OPERAND (arg0
, 0), arg1
);
11059 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11060 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11061 mode which allows further optimizations. */
11062 c1
= double_int_and (c1
, msk
);
11063 c2
= double_int_and (c2
, msk
);
11064 c3
= double_int_and_not (c1
, c2
);
11065 for (w
= BITS_PER_UNIT
;
11066 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11069 unsigned HOST_WIDE_INT mask
11070 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11071 if (((c1
.low
| c2
.low
) & mask
) == mask
11072 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11074 c3
= uhwi_to_double_int (mask
);
11078 if (!double_int_equal_p (c3
, c1
))
11079 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11080 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11081 TREE_OPERAND (arg0
, 0),
11082 double_int_to_tree (type
,
11087 /* (X & Y) | Y is (X, Y). */
11088 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11089 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11090 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11091 /* (X & Y) | X is (Y, X). */
11092 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11093 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11094 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11095 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11096 /* X | (X & Y) is (Y, X). */
11097 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11098 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11099 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11100 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11101 /* X | (Y & X) is (Y, X). */
11102 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11103 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11104 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11105 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11107 /* (X & ~Y) | (~X & Y) is X ^ Y */
11108 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11109 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11111 tree a0
, a1
, l0
, l1
, n0
, n1
;
11113 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11114 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11116 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11117 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11119 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11120 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11122 if ((operand_equal_p (n0
, a0
, 0)
11123 && operand_equal_p (n1
, a1
, 0))
11124 || (operand_equal_p (n0
, a1
, 0)
11125 && operand_equal_p (n1
, a0
, 0)))
11126 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11129 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11130 if (t1
!= NULL_TREE
)
11133 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11135 This results in more efficient code for machines without a NAND
11136 instruction. Combine will canonicalize to the first form
11137 which will allow use of NAND instructions provided by the
11138 backend if they exist. */
11139 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11140 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11143 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11144 build2 (BIT_AND_EXPR
, type
,
11145 fold_convert_loc (loc
, type
,
11146 TREE_OPERAND (arg0
, 0)),
11147 fold_convert_loc (loc
, type
,
11148 TREE_OPERAND (arg1
, 0))));
11151 /* See if this can be simplified into a rotate first. If that
11152 is unsuccessful continue in the association code. */
11156 if (integer_zerop (arg1
))
11157 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11158 if (integer_all_onesp (arg1
))
11159 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11160 if (operand_equal_p (arg0
, arg1
, 0))
11161 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11163 /* ~X ^ X is -1. */
11164 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11165 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11167 t1
= build_zero_cst (type
);
11168 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11169 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11172 /* X ^ ~X is -1. */
11173 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11174 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11176 t1
= build_zero_cst (type
);
11177 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11178 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11181 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11182 with a constant, and the two constants have no bits in common,
11183 we should treat this as a BIT_IOR_EXPR since this may produce more
11184 simplifications. */
11185 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11186 && TREE_CODE (arg1
) == BIT_AND_EXPR
11187 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11188 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11189 && integer_zerop (const_binop (BIT_AND_EXPR
,
11190 TREE_OPERAND (arg0
, 1),
11191 TREE_OPERAND (arg1
, 1))))
11193 code
= BIT_IOR_EXPR
;
11197 /* (X | Y) ^ X -> Y & ~ X*/
11198 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11201 tree t2
= TREE_OPERAND (arg0
, 1);
11202 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11204 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11205 fold_convert_loc (loc
, type
, t2
),
11206 fold_convert_loc (loc
, type
, t1
));
11210 /* (Y | X) ^ X -> Y & ~ X*/
11211 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11212 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11214 tree t2
= TREE_OPERAND (arg0
, 0);
11215 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11217 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11218 fold_convert_loc (loc
, type
, t2
),
11219 fold_convert_loc (loc
, type
, t1
));
11223 /* X ^ (X | Y) -> Y & ~ X*/
11224 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11225 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11227 tree t2
= TREE_OPERAND (arg1
, 1);
11228 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11230 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11231 fold_convert_loc (loc
, type
, t2
),
11232 fold_convert_loc (loc
, type
, t1
));
11236 /* X ^ (Y | X) -> Y & ~ X*/
11237 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11238 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11240 tree t2
= TREE_OPERAND (arg1
, 0);
11241 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11243 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11244 fold_convert_loc (loc
, type
, t2
),
11245 fold_convert_loc (loc
, type
, t1
));
11249 /* Convert ~X ^ ~Y to X ^ Y. */
11250 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11251 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11252 return fold_build2_loc (loc
, code
, type
,
11253 fold_convert_loc (loc
, type
,
11254 TREE_OPERAND (arg0
, 0)),
11255 fold_convert_loc (loc
, type
,
11256 TREE_OPERAND (arg1
, 0)));
11258 /* Convert ~X ^ C to X ^ ~C. */
11259 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11260 && TREE_CODE (arg1
) == INTEGER_CST
)
11261 return fold_build2_loc (loc
, code
, type
,
11262 fold_convert_loc (loc
, type
,
11263 TREE_OPERAND (arg0
, 0)),
11264 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11266 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11267 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11268 && integer_onep (TREE_OPERAND (arg0
, 1))
11269 && integer_onep (arg1
))
11270 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11271 build_zero_cst (TREE_TYPE (arg0
)));
11273 /* Fold (X & Y) ^ Y as ~X & Y. */
11274 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11275 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11277 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11278 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11279 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11280 fold_convert_loc (loc
, type
, arg1
));
11282 /* Fold (X & Y) ^ X as ~Y & X. */
11283 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11284 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11285 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11287 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11288 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11289 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11290 fold_convert_loc (loc
, type
, arg1
));
11292 /* Fold X ^ (X & Y) as X & ~Y. */
11293 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11294 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11296 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11297 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11298 fold_convert_loc (loc
, type
, arg0
),
11299 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11301 /* Fold X ^ (Y & X) as ~Y & X. */
11302 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11303 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11304 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11306 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11307 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11308 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11309 fold_convert_loc (loc
, type
, arg0
));
11312 /* See if this can be simplified into a rotate first. If that
11313 is unsuccessful continue in the association code. */
11317 if (integer_all_onesp (arg1
))
11318 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11319 if (integer_zerop (arg1
))
11320 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11321 if (operand_equal_p (arg0
, arg1
, 0))
11322 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11324 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11325 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11326 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11327 || (TREE_CODE (arg0
) == EQ_EXPR
11328 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11329 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11330 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11332 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11333 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11334 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11335 || (TREE_CODE (arg1
) == EQ_EXPR
11336 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11337 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11338 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11340 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11341 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11342 && TREE_CODE (arg1
) == INTEGER_CST
11343 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11345 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11346 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11347 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11348 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11349 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11351 fold_convert_loc (loc
, type
,
11352 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11353 type
, tmp2
, tmp3
));
11356 /* (X | Y) & Y is (X, Y). */
11357 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11358 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11359 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11360 /* (X | Y) & X is (Y, X). */
11361 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11362 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11363 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11364 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11365 /* X & (X | Y) is (Y, X). */
11366 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11367 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11368 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11369 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11370 /* X & (Y | X) is (Y, X). */
11371 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11372 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11373 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11374 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11376 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11377 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11378 && integer_onep (TREE_OPERAND (arg0
, 1))
11379 && integer_onep (arg1
))
11382 tem
= TREE_OPERAND (arg0
, 0);
11383 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11384 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11386 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11387 build_zero_cst (TREE_TYPE (tem
)));
11389 /* Fold ~X & 1 as (X & 1) == 0. */
11390 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11391 && integer_onep (arg1
))
11394 tem
= TREE_OPERAND (arg0
, 0);
11395 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11396 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11398 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11399 build_zero_cst (TREE_TYPE (tem
)));
11401 /* Fold !X & 1 as X == 0. */
11402 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11403 && integer_onep (arg1
))
11405 tem
= TREE_OPERAND (arg0
, 0);
11406 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11407 build_zero_cst (TREE_TYPE (tem
)));
11410 /* Fold (X ^ Y) & Y as ~X & Y. */
11411 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11412 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11414 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11415 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11416 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11417 fold_convert_loc (loc
, type
, arg1
));
11419 /* Fold (X ^ Y) & X as ~Y & X. */
11420 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11421 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11422 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11424 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11425 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11426 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11427 fold_convert_loc (loc
, type
, arg1
));
11429 /* Fold X & (X ^ Y) as X & ~Y. */
11430 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11431 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11433 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11434 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11435 fold_convert_loc (loc
, type
, arg0
),
11436 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11438 /* Fold X & (Y ^ X) as ~Y & X. */
11439 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11440 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11441 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11443 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11444 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11445 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11446 fold_convert_loc (loc
, type
, arg0
));
11449 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11450 multiple of 1 << CST. */
11451 if (TREE_CODE (arg1
) == INTEGER_CST
)
11453 double_int cst1
= tree_to_double_int (arg1
);
11454 double_int ncst1
= double_int_ext (double_int_neg (cst1
),
11455 TYPE_PRECISION (TREE_TYPE (arg1
)),
11456 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11457 if (double_int_equal_p (double_int_and (cst1
, ncst1
), ncst1
)
11458 && multiple_of_p (type
, arg0
,
11459 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11460 return fold_convert_loc (loc
, type
, arg0
);
11463 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11465 if (TREE_CODE (arg1
) == INTEGER_CST
11466 && TREE_CODE (arg0
) == MULT_EXPR
11467 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11470 = double_int_ctz (tree_to_double_int (TREE_OPERAND (arg0
, 1)));
11473 double_int arg1mask
, masked
;
11474 arg1mask
= double_int_not (double_int_mask (arg1tz
));
11475 arg1mask
= double_int_ext (arg1mask
, TYPE_PRECISION (type
),
11476 TYPE_UNSIGNED (type
));
11477 masked
= double_int_and (arg1mask
, tree_to_double_int (arg1
));
11478 if (double_int_zero_p (masked
))
11479 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11481 else if (!double_int_equal_p (masked
, tree_to_double_int (arg1
)))
11482 return fold_build2_loc (loc
, code
, type
, op0
,
11483 double_int_to_tree (type
, masked
));
11487 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11488 ((A & N) + B) & M -> (A + B) & M
11489 Similarly if (N & M) == 0,
11490 ((A | N) + B) & M -> (A + B) & M
11491 and for - instead of + (or unary - instead of +)
11492 and/or ^ instead of |.
11493 If B is constant and (B & M) == 0, fold into A & M. */
11494 if (host_integerp (arg1
, 1))
11496 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11497 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11498 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11499 && (TREE_CODE (arg0
) == PLUS_EXPR
11500 || TREE_CODE (arg0
) == MINUS_EXPR
11501 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11502 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11503 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11507 unsigned HOST_WIDE_INT cst0
;
11509 /* Now we know that arg0 is (C + D) or (C - D) or
11510 -C and arg1 (M) is == (1LL << cst) - 1.
11511 Store C into PMOP[0] and D into PMOP[1]. */
11512 pmop
[0] = TREE_OPERAND (arg0
, 0);
11514 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11516 pmop
[1] = TREE_OPERAND (arg0
, 1);
11520 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11521 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11525 for (; which
>= 0; which
--)
11526 switch (TREE_CODE (pmop
[which
]))
11531 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11534 /* tree_low_cst not used, because we don't care about
11536 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11538 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11543 else if (cst0
!= 0)
11545 /* If C or D is of the form (A & N) where
11546 (N & M) == M, or of the form (A | N) or
11547 (A ^ N) where (N & M) == 0, replace it with A. */
11548 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11551 /* If C or D is a N where (N & M) == 0, it can be
11552 omitted (assumed 0). */
11553 if ((TREE_CODE (arg0
) == PLUS_EXPR
11554 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11555 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11556 pmop
[which
] = NULL
;
11562 /* Only build anything new if we optimized one or both arguments
11564 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11565 || (TREE_CODE (arg0
) != NEGATE_EXPR
11566 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11568 tree utype
= TREE_TYPE (arg0
);
11569 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11571 /* Perform the operations in a type that has defined
11572 overflow behavior. */
11573 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11574 if (pmop
[0] != NULL
)
11575 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11576 if (pmop
[1] != NULL
)
11577 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11580 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11581 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11582 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11584 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11585 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11587 else if (pmop
[0] != NULL
)
11589 else if (pmop
[1] != NULL
)
11592 return build_int_cst (type
, 0);
11594 else if (pmop
[0] == NULL
)
11595 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11597 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11599 /* TEM is now the new binary +, - or unary - replacement. */
11600 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11601 fold_convert_loc (loc
, utype
, arg1
));
11602 return fold_convert_loc (loc
, type
, tem
);
11607 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11608 if (t1
!= NULL_TREE
)
11610 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11611 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11612 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11615 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11617 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11618 && (~TREE_INT_CST_LOW (arg1
)
11619 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11621 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11624 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11626 This results in more efficient code for machines without a NOR
11627 instruction. Combine will canonicalize to the first form
11628 which will allow use of NOR instructions provided by the
11629 backend if they exist. */
11630 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11631 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11633 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11634 build2 (BIT_IOR_EXPR
, type
,
11635 fold_convert_loc (loc
, type
,
11636 TREE_OPERAND (arg0
, 0)),
11637 fold_convert_loc (loc
, type
,
11638 TREE_OPERAND (arg1
, 0))));
11641 /* If arg0 is derived from the address of an object or function, we may
11642 be able to fold this expression using the object or function's
11644 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11646 unsigned HOST_WIDE_INT modulus
, residue
;
11647 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11649 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11650 integer_onep (arg1
));
11652 /* This works because modulus is a power of 2. If this weren't the
11653 case, we'd have to replace it by its greatest power-of-2
11654 divisor: modulus & -modulus. */
11656 return build_int_cst (type
, residue
& low
);
11659 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11660 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11661 if the new mask might be further optimized. */
11662 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11663 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11664 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11665 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11666 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11667 < TYPE_PRECISION (TREE_TYPE (arg0
))
11668 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11669 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11671 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11672 unsigned HOST_WIDE_INT mask
11673 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11674 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11675 tree shift_type
= TREE_TYPE (arg0
);
11677 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11678 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11679 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11680 && TYPE_PRECISION (TREE_TYPE (arg0
))
11681 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11683 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11684 tree arg00
= TREE_OPERAND (arg0
, 0);
11685 /* See if more bits can be proven as zero because of
11687 if (TREE_CODE (arg00
) == NOP_EXPR
11688 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11690 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11691 if (TYPE_PRECISION (inner_type
)
11692 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11693 && TYPE_PRECISION (inner_type
) < prec
)
11695 prec
= TYPE_PRECISION (inner_type
);
11696 /* See if we can shorten the right shift. */
11698 shift_type
= inner_type
;
11701 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11702 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11703 zerobits
<<= prec
- shiftc
;
11704 /* For arithmetic shift if sign bit could be set, zerobits
11705 can contain actually sign bits, so no transformation is
11706 possible, unless MASK masks them all away. In that
11707 case the shift needs to be converted into logical shift. */
11708 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11709 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11711 if ((mask
& zerobits
) == 0)
11712 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11718 /* ((X << 16) & 0xff00) is (X, 0). */
11719 if ((mask
& zerobits
) == mask
)
11720 return omit_one_operand_loc (loc
, type
,
11721 build_int_cst (type
, 0), arg0
);
11723 newmask
= mask
| zerobits
;
11724 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11728 /* Only do the transformation if NEWMASK is some integer
11730 for (prec
= BITS_PER_UNIT
;
11731 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11732 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11734 if (prec
< HOST_BITS_PER_WIDE_INT
11735 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11739 if (shift_type
!= TREE_TYPE (arg0
))
11741 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11742 fold_convert_loc (loc
, shift_type
,
11743 TREE_OPERAND (arg0
, 0)),
11744 TREE_OPERAND (arg0
, 1));
11745 tem
= fold_convert_loc (loc
, type
, tem
);
11749 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11750 if (!tree_int_cst_equal (newmaskt
, arg1
))
11751 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11759 /* Don't touch a floating-point divide by zero unless the mode
11760 of the constant can represent infinity. */
11761 if (TREE_CODE (arg1
) == REAL_CST
11762 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11763 && real_zerop (arg1
))
11766 /* Optimize A / A to 1.0 if we don't care about
11767 NaNs or Infinities. Skip the transformation
11768 for non-real operands. */
11769 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11770 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11771 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11772 && operand_equal_p (arg0
, arg1
, 0))
11774 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11776 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11779 /* The complex version of the above A / A optimization. */
11780 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11781 && operand_equal_p (arg0
, arg1
, 0))
11783 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11784 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11785 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11787 tree r
= build_real (elem_type
, dconst1
);
11788 /* omit_two_operands will call fold_convert for us. */
11789 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11793 /* (-A) / (-B) -> A / B */
11794 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11795 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11796 TREE_OPERAND (arg0
, 0),
11797 negate_expr (arg1
));
11798 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11799 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11800 negate_expr (arg0
),
11801 TREE_OPERAND (arg1
, 0));
11803 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11804 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11805 && real_onep (arg1
))
11806 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11808 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11809 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11810 && real_minus_onep (arg1
))
11811 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11812 negate_expr (arg0
)));
11814 /* If ARG1 is a constant, we can convert this to a multiply by the
11815 reciprocal. This does not have the same rounding properties,
11816 so only do this if -freciprocal-math. We can actually
11817 always safely do it if ARG1 is a power of two, but it's hard to
11818 tell if it is or not in a portable manner. */
11820 && (TREE_CODE (arg1
) == REAL_CST
11821 || (TREE_CODE (arg1
) == COMPLEX_CST
11822 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11823 || (TREE_CODE (arg1
) == VECTOR_CST
11824 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11826 if (flag_reciprocal_math
11827 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11828 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11829 /* Find the reciprocal if optimizing and the result is exact.
11830 TODO: Complex reciprocal not implemented. */
11831 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11833 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11836 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11839 /* Convert A/B/C to A/(B*C). */
11840 if (flag_reciprocal_math
11841 && TREE_CODE (arg0
) == RDIV_EXPR
)
11842 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11843 fold_build2_loc (loc
, MULT_EXPR
, type
,
11844 TREE_OPERAND (arg0
, 1), arg1
));
11846 /* Convert A/(B/C) to (A/B)*C. */
11847 if (flag_reciprocal_math
11848 && TREE_CODE (arg1
) == RDIV_EXPR
)
11849 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11850 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11851 TREE_OPERAND (arg1
, 0)),
11852 TREE_OPERAND (arg1
, 1));
11854 /* Convert C1/(X*C2) into (C1/C2)/X. */
11855 if (flag_reciprocal_math
11856 && TREE_CODE (arg1
) == MULT_EXPR
11857 && TREE_CODE (arg0
) == REAL_CST
11858 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11860 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11861 TREE_OPERAND (arg1
, 1));
11863 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11864 TREE_OPERAND (arg1
, 0));
11867 if (flag_unsafe_math_optimizations
)
11869 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11870 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11872 /* Optimize sin(x)/cos(x) as tan(x). */
11873 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11874 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11875 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11876 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11877 CALL_EXPR_ARG (arg1
, 0), 0))
11879 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11881 if (tanfn
!= NULL_TREE
)
11882 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11885 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11886 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11887 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11888 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11889 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11890 CALL_EXPR_ARG (arg1
, 0), 0))
11892 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11894 if (tanfn
!= NULL_TREE
)
11896 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11897 CALL_EXPR_ARG (arg0
, 0));
11898 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11899 build_real (type
, dconst1
), tmp
);
11903 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11904 NaNs or Infinities. */
11905 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11906 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11907 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11909 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11910 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11912 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11913 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11914 && operand_equal_p (arg00
, arg01
, 0))
11916 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11918 if (cosfn
!= NULL_TREE
)
11919 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11923 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11924 NaNs or Infinities. */
11925 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11926 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11927 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11929 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11930 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11932 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11933 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11934 && operand_equal_p (arg00
, arg01
, 0))
11936 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11938 if (cosfn
!= NULL_TREE
)
11940 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11941 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11942 build_real (type
, dconst1
),
11948 /* Optimize pow(x,c)/x as pow(x,c-1). */
11949 if (fcode0
== BUILT_IN_POW
11950 || fcode0
== BUILT_IN_POWF
11951 || fcode0
== BUILT_IN_POWL
)
11953 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11954 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11955 if (TREE_CODE (arg01
) == REAL_CST
11956 && !TREE_OVERFLOW (arg01
)
11957 && operand_equal_p (arg1
, arg00
, 0))
11959 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11963 c
= TREE_REAL_CST (arg01
);
11964 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11965 arg
= build_real (type
, c
);
11966 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11970 /* Optimize a/root(b/c) into a*root(c/b). */
11971 if (BUILTIN_ROOT_P (fcode1
))
11973 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11975 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11977 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11978 tree b
= TREE_OPERAND (rootarg
, 0);
11979 tree c
= TREE_OPERAND (rootarg
, 1);
11981 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11983 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11984 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11988 /* Optimize x/expN(y) into x*expN(-y). */
11989 if (BUILTIN_EXPONENT_P (fcode1
))
11991 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11992 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11993 arg1
= build_call_expr_loc (loc
,
11995 fold_convert_loc (loc
, type
, arg
));
11996 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11999 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12000 if (fcode1
== BUILT_IN_POW
12001 || fcode1
== BUILT_IN_POWF
12002 || fcode1
== BUILT_IN_POWL
)
12004 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12005 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12006 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12007 tree neg11
= fold_convert_loc (loc
, type
,
12008 negate_expr (arg11
));
12009 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12010 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12015 case TRUNC_DIV_EXPR
:
12016 /* Optimize (X & (-A)) / A where A is a power of 2,
12018 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12019 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12020 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12022 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12023 arg1
, TREE_OPERAND (arg0
, 1));
12024 if (sum
&& integer_zerop (sum
)) {
12025 unsigned long pow2
;
12027 if (TREE_INT_CST_LOW (arg1
))
12028 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12030 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12031 + HOST_BITS_PER_WIDE_INT
;
12033 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12034 TREE_OPERAND (arg0
, 0),
12035 build_int_cst (integer_type_node
, pow2
));
12041 case FLOOR_DIV_EXPR
:
12042 /* Simplify A / (B << N) where A and B are positive and B is
12043 a power of 2, to A >> (N + log2(B)). */
12044 strict_overflow_p
= false;
12045 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12046 && (TYPE_UNSIGNED (type
)
12047 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12049 tree sval
= TREE_OPERAND (arg1
, 0);
12050 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12052 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12053 unsigned long pow2
;
12055 if (TREE_INT_CST_LOW (sval
))
12056 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12058 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12059 + HOST_BITS_PER_WIDE_INT
;
12061 if (strict_overflow_p
)
12062 fold_overflow_warning (("assuming signed overflow does not "
12063 "occur when simplifying A / (B << N)"),
12064 WARN_STRICT_OVERFLOW_MISC
);
12066 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12068 build_int_cst (TREE_TYPE (sh_cnt
),
12070 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12071 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12075 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12076 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12077 if (INTEGRAL_TYPE_P (type
)
12078 && TYPE_UNSIGNED (type
)
12079 && code
== FLOOR_DIV_EXPR
)
12080 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12084 case ROUND_DIV_EXPR
:
12085 case CEIL_DIV_EXPR
:
12086 case EXACT_DIV_EXPR
:
12087 if (integer_onep (arg1
))
12088 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12089 if (integer_zerop (arg1
))
12091 /* X / -1 is -X. */
12092 if (!TYPE_UNSIGNED (type
)
12093 && TREE_CODE (arg1
) == INTEGER_CST
12094 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12095 && TREE_INT_CST_HIGH (arg1
) == -1)
12096 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12098 /* Convert -A / -B to A / B when the type is signed and overflow is
12100 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12101 && TREE_CODE (arg0
) == NEGATE_EXPR
12102 && negate_expr_p (arg1
))
12104 if (INTEGRAL_TYPE_P (type
))
12105 fold_overflow_warning (("assuming signed overflow does not occur "
12106 "when distributing negation across "
12108 WARN_STRICT_OVERFLOW_MISC
);
12109 return fold_build2_loc (loc
, code
, type
,
12110 fold_convert_loc (loc
, type
,
12111 TREE_OPERAND (arg0
, 0)),
12112 fold_convert_loc (loc
, type
,
12113 negate_expr (arg1
)));
12115 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12116 && TREE_CODE (arg1
) == NEGATE_EXPR
12117 && negate_expr_p (arg0
))
12119 if (INTEGRAL_TYPE_P (type
))
12120 fold_overflow_warning (("assuming signed overflow does not occur "
12121 "when distributing negation across "
12123 WARN_STRICT_OVERFLOW_MISC
);
12124 return fold_build2_loc (loc
, code
, type
,
12125 fold_convert_loc (loc
, type
,
12126 negate_expr (arg0
)),
12127 fold_convert_loc (loc
, type
,
12128 TREE_OPERAND (arg1
, 0)));
12131 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12132 operation, EXACT_DIV_EXPR.
12134 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12135 At one time others generated faster code, it's not clear if they do
12136 after the last round to changes to the DIV code in expmed.c. */
12137 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12138 && multiple_of_p (type
, arg0
, arg1
))
12139 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12141 strict_overflow_p
= false;
12142 if (TREE_CODE (arg1
) == INTEGER_CST
12143 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12144 &strict_overflow_p
)))
12146 if (strict_overflow_p
)
12147 fold_overflow_warning (("assuming signed overflow does not occur "
12148 "when simplifying division"),
12149 WARN_STRICT_OVERFLOW_MISC
);
12150 return fold_convert_loc (loc
, type
, tem
);
12155 case CEIL_MOD_EXPR
:
12156 case FLOOR_MOD_EXPR
:
12157 case ROUND_MOD_EXPR
:
12158 case TRUNC_MOD_EXPR
:
12159 /* X % 1 is always zero, but be sure to preserve any side
12161 if (integer_onep (arg1
))
12162 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12164 /* X % 0, return X % 0 unchanged so that we can get the
12165 proper warnings and errors. */
12166 if (integer_zerop (arg1
))
12169 /* 0 % X is always zero, but be sure to preserve any side
12170 effects in X. Place this after checking for X == 0. */
12171 if (integer_zerop (arg0
))
12172 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12174 /* X % -1 is zero. */
12175 if (!TYPE_UNSIGNED (type
)
12176 && TREE_CODE (arg1
) == INTEGER_CST
12177 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12178 && TREE_INT_CST_HIGH (arg1
) == -1)
12179 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12181 /* X % -C is the same as X % C. */
12182 if (code
== TRUNC_MOD_EXPR
12183 && !TYPE_UNSIGNED (type
)
12184 && TREE_CODE (arg1
) == INTEGER_CST
12185 && !TREE_OVERFLOW (arg1
)
12186 && TREE_INT_CST_HIGH (arg1
) < 0
12187 && !TYPE_OVERFLOW_TRAPS (type
)
12188 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12189 && !sign_bit_p (arg1
, arg1
))
12190 return fold_build2_loc (loc
, code
, type
,
12191 fold_convert_loc (loc
, type
, arg0
),
12192 fold_convert_loc (loc
, type
,
12193 negate_expr (arg1
)));
12195 /* X % -Y is the same as X % Y. */
12196 if (code
== TRUNC_MOD_EXPR
12197 && !TYPE_UNSIGNED (type
)
12198 && TREE_CODE (arg1
) == NEGATE_EXPR
12199 && !TYPE_OVERFLOW_TRAPS (type
))
12200 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12201 fold_convert_loc (loc
, type
,
12202 TREE_OPERAND (arg1
, 0)));
12204 strict_overflow_p
= false;
12205 if (TREE_CODE (arg1
) == INTEGER_CST
12206 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12207 &strict_overflow_p
)))
12209 if (strict_overflow_p
)
12210 fold_overflow_warning (("assuming signed overflow does not occur "
12211 "when simplifying modulus"),
12212 WARN_STRICT_OVERFLOW_MISC
);
12213 return fold_convert_loc (loc
, type
, tem
);
12216 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12217 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12218 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12219 && (TYPE_UNSIGNED (type
)
12220 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12223 /* Also optimize A % (C << N) where C is a power of 2,
12224 to A & ((C << N) - 1). */
12225 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12226 c
= TREE_OPERAND (arg1
, 0);
12228 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12231 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12232 build_int_cst (TREE_TYPE (arg1
), 1));
12233 if (strict_overflow_p
)
12234 fold_overflow_warning (("assuming signed overflow does not "
12235 "occur when simplifying "
12236 "X % (power of two)"),
12237 WARN_STRICT_OVERFLOW_MISC
);
12238 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12239 fold_convert_loc (loc
, type
, arg0
),
12240 fold_convert_loc (loc
, type
, mask
));
12248 if (integer_all_onesp (arg0
))
12249 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12253 /* Optimize -1 >> x for arithmetic right shifts. */
12254 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12255 && tree_expr_nonnegative_p (arg1
))
12256 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12257 /* ... fall through ... */
12261 if (integer_zerop (arg1
))
12262 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12263 if (integer_zerop (arg0
))
12264 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12266 /* Since negative shift count is not well-defined,
12267 don't try to compute it in the compiler. */
12268 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12271 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12272 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12273 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12274 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12275 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12277 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12278 + TREE_INT_CST_LOW (arg1
));
12280 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12281 being well defined. */
12282 if (low
>= TYPE_PRECISION (type
))
12284 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12285 low
= low
% TYPE_PRECISION (type
);
12286 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12287 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12288 TREE_OPERAND (arg0
, 0));
12290 low
= TYPE_PRECISION (type
) - 1;
12293 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12294 build_int_cst (type
, low
));
12297 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12298 into x & ((unsigned)-1 >> c) for unsigned types. */
12299 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12300 || (TYPE_UNSIGNED (type
)
12301 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12302 && host_integerp (arg1
, false)
12303 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12304 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12305 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12307 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12308 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12314 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12316 lshift
= build_int_cst (type
, -1);
12317 lshift
= int_const_binop (code
, lshift
, arg1
);
12319 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12323 /* Rewrite an LROTATE_EXPR by a constant into an
12324 RROTATE_EXPR by a new constant. */
12325 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12327 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12328 TYPE_PRECISION (type
));
12329 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12330 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12333 /* If we have a rotate of a bit operation with the rotate count and
12334 the second operand of the bit operation both constant,
12335 permute the two operations. */
12336 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12337 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12338 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12339 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12340 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12341 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12342 fold_build2_loc (loc
, code
, type
,
12343 TREE_OPERAND (arg0
, 0), arg1
),
12344 fold_build2_loc (loc
, code
, type
,
12345 TREE_OPERAND (arg0
, 1), arg1
));
12347 /* Two consecutive rotates adding up to the precision of the
12348 type can be ignored. */
12349 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12350 && TREE_CODE (arg0
) == RROTATE_EXPR
12351 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12352 && TREE_INT_CST_HIGH (arg1
) == 0
12353 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12354 && ((TREE_INT_CST_LOW (arg1
)
12355 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12356 == (unsigned int) TYPE_PRECISION (type
)))
12357 return TREE_OPERAND (arg0
, 0);
12359 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12360 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12361 if the latter can be further optimized. */
12362 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12363 && TREE_CODE (arg0
) == BIT_AND_EXPR
12364 && TREE_CODE (arg1
) == INTEGER_CST
12365 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12367 tree mask
= fold_build2_loc (loc
, code
, type
,
12368 fold_convert_loc (loc
, type
,
12369 TREE_OPERAND (arg0
, 1)),
12371 tree shift
= fold_build2_loc (loc
, code
, type
,
12372 fold_convert_loc (loc
, type
,
12373 TREE_OPERAND (arg0
, 0)),
12375 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12383 if (operand_equal_p (arg0
, arg1
, 0))
12384 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12385 if (INTEGRAL_TYPE_P (type
)
12386 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12387 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12388 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12394 if (operand_equal_p (arg0
, arg1
, 0))
12395 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12396 if (INTEGRAL_TYPE_P (type
)
12397 && TYPE_MAX_VALUE (type
)
12398 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12399 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12400 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12405 case TRUTH_ANDIF_EXPR
:
12406 /* Note that the operands of this must be ints
12407 and their values must be 0 or 1.
12408 ("true" is a fixed value perhaps depending on the language.) */
12409 /* If first arg is constant zero, return it. */
12410 if (integer_zerop (arg0
))
12411 return fold_convert_loc (loc
, type
, arg0
);
12412 case TRUTH_AND_EXPR
:
12413 /* If either arg is constant true, drop it. */
12414 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12415 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12416 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12417 /* Preserve sequence points. */
12418 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12419 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12420 /* If second arg is constant zero, result is zero, but first arg
12421 must be evaluated. */
12422 if (integer_zerop (arg1
))
12423 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12424 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12425 case will be handled here. */
12426 if (integer_zerop (arg0
))
12427 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12429 /* !X && X is always false. */
12430 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12431 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12432 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12433 /* X && !X is always false. */
12434 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12435 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12436 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12438 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12439 means A >= Y && A != MAX, but in this case we know that
12442 if (!TREE_SIDE_EFFECTS (arg0
)
12443 && !TREE_SIDE_EFFECTS (arg1
))
12445 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12446 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12447 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12449 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12450 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12451 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12454 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12460 case TRUTH_ORIF_EXPR
:
12461 /* Note that the operands of this must be ints
12462 and their values must be 0 or true.
12463 ("true" is a fixed value perhaps depending on the language.) */
12464 /* If first arg is constant true, return it. */
12465 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12466 return fold_convert_loc (loc
, type
, arg0
);
12467 case TRUTH_OR_EXPR
:
12468 /* If either arg is constant zero, drop it. */
12469 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12470 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12471 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12472 /* Preserve sequence points. */
12473 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12474 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12475 /* If second arg is constant true, result is true, but we must
12476 evaluate first arg. */
12477 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12478 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12479 /* Likewise for first arg, but note this only occurs here for
12481 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12482 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12484 /* !X || X is always true. */
12485 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12486 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12487 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12488 /* X || !X is always true. */
12489 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12490 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12491 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12493 /* (X && !Y) || (!X && Y) is X ^ Y */
12494 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12495 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12497 tree a0
, a1
, l0
, l1
, n0
, n1
;
12499 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12500 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12502 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12503 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12505 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12506 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12508 if ((operand_equal_p (n0
, a0
, 0)
12509 && operand_equal_p (n1
, a1
, 0))
12510 || (operand_equal_p (n0
, a1
, 0)
12511 && operand_equal_p (n1
, a0
, 0)))
12512 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12515 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12521 case TRUTH_XOR_EXPR
:
12522 /* If the second arg is constant zero, drop it. */
12523 if (integer_zerop (arg1
))
12524 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12525 /* If the second arg is constant true, this is a logical inversion. */
12526 if (integer_onep (arg1
))
12528 /* Only call invert_truthvalue if operand is a truth value. */
12529 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12530 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12532 tem
= invert_truthvalue_loc (loc
, arg0
);
12533 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12535 /* Identical arguments cancel to zero. */
12536 if (operand_equal_p (arg0
, arg1
, 0))
12537 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12539 /* !X ^ X is always true. */
12540 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12541 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12542 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12544 /* X ^ !X is always true. */
12545 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12546 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12547 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12556 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12557 if (tem
!= NULL_TREE
)
12560 /* bool_var != 0 becomes bool_var. */
12561 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12562 && code
== NE_EXPR
)
12563 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12565 /* bool_var == 1 becomes bool_var. */
12566 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12567 && code
== EQ_EXPR
)
12568 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12570 /* bool_var != 1 becomes !bool_var. */
12571 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12572 && code
== NE_EXPR
)
12573 return fold_convert_loc (loc
, type
,
12574 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12575 TREE_TYPE (arg0
), arg0
));
12577 /* bool_var == 0 becomes !bool_var. */
12578 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12579 && code
== EQ_EXPR
)
12580 return fold_convert_loc (loc
, type
,
12581 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12582 TREE_TYPE (arg0
), arg0
));
12584 /* !exp != 0 becomes !exp */
12585 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12586 && code
== NE_EXPR
)
12587 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12589 /* If this is an equality comparison of the address of two non-weak,
12590 unaliased symbols neither of which are extern (since we do not
12591 have access to attributes for externs), then we know the result. */
12592 if (TREE_CODE (arg0
) == ADDR_EXPR
12593 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12594 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12595 && ! lookup_attribute ("alias",
12596 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12597 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12598 && TREE_CODE (arg1
) == ADDR_EXPR
12599 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12600 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12601 && ! lookup_attribute ("alias",
12602 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12603 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12605 /* We know that we're looking at the address of two
12606 non-weak, unaliased, static _DECL nodes.
12608 It is both wasteful and incorrect to call operand_equal_p
12609 to compare the two ADDR_EXPR nodes. It is wasteful in that
12610 all we need to do is test pointer equality for the arguments
12611 to the two ADDR_EXPR nodes. It is incorrect to use
12612 operand_equal_p as that function is NOT equivalent to a
12613 C equality test. It can in fact return false for two
12614 objects which would test as equal using the C equality
12616 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12617 return constant_boolean_node (equal
12618 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12622 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12623 a MINUS_EXPR of a constant, we can convert it into a comparison with
12624 a revised constant as long as no overflow occurs. */
12625 if (TREE_CODE (arg1
) == INTEGER_CST
12626 && (TREE_CODE (arg0
) == PLUS_EXPR
12627 || TREE_CODE (arg0
) == MINUS_EXPR
)
12628 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12629 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12630 ? MINUS_EXPR
: PLUS_EXPR
,
12631 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12633 TREE_OPERAND (arg0
, 1)))
12634 && !TREE_OVERFLOW (tem
))
12635 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12637 /* Similarly for a NEGATE_EXPR. */
12638 if (TREE_CODE (arg0
) == NEGATE_EXPR
12639 && TREE_CODE (arg1
) == INTEGER_CST
12640 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12642 && TREE_CODE (tem
) == INTEGER_CST
12643 && !TREE_OVERFLOW (tem
))
12644 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12646 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12647 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12648 && TREE_CODE (arg1
) == INTEGER_CST
12649 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12650 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12651 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12652 fold_convert_loc (loc
,
12655 TREE_OPERAND (arg0
, 1)));
12657 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12658 if ((TREE_CODE (arg0
) == PLUS_EXPR
12659 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12660 || TREE_CODE (arg0
) == MINUS_EXPR
)
12661 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12664 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12665 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12667 tree val
= TREE_OPERAND (arg0
, 1);
12668 return omit_two_operands_loc (loc
, type
,
12669 fold_build2_loc (loc
, code
, type
,
12671 build_int_cst (TREE_TYPE (val
),
12673 TREE_OPERAND (arg0
, 0), arg1
);
12676 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12677 if (TREE_CODE (arg0
) == MINUS_EXPR
12678 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12679 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12682 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12684 return omit_two_operands_loc (loc
, type
,
12686 ? boolean_true_node
: boolean_false_node
,
12687 TREE_OPERAND (arg0
, 1), arg1
);
12690 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12691 for !=. Don't do this for ordered comparisons due to overflow. */
12692 if (TREE_CODE (arg0
) == MINUS_EXPR
12693 && integer_zerop (arg1
))
12694 return fold_build2_loc (loc
, code
, type
,
12695 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12697 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12698 if (TREE_CODE (arg0
) == ABS_EXPR
12699 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12700 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12702 /* If this is an EQ or NE comparison with zero and ARG0 is
12703 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12704 two operations, but the latter can be done in one less insn
12705 on machines that have only two-operand insns or on which a
12706 constant cannot be the first operand. */
12707 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12708 && integer_zerop (arg1
))
12710 tree arg00
= TREE_OPERAND (arg0
, 0);
12711 tree arg01
= TREE_OPERAND (arg0
, 1);
12712 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12713 && integer_onep (TREE_OPERAND (arg00
, 0)))
12715 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12716 arg01
, TREE_OPERAND (arg00
, 1));
12717 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12718 build_int_cst (TREE_TYPE (arg0
), 1));
12719 return fold_build2_loc (loc
, code
, type
,
12720 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12723 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12724 && integer_onep (TREE_OPERAND (arg01
, 0)))
12726 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12727 arg00
, TREE_OPERAND (arg01
, 1));
12728 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12729 build_int_cst (TREE_TYPE (arg0
), 1));
12730 return fold_build2_loc (loc
, code
, type
,
12731 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12736 /* If this is an NE or EQ comparison of zero against the result of a
12737 signed MOD operation whose second operand is a power of 2, make
12738 the MOD operation unsigned since it is simpler and equivalent. */
12739 if (integer_zerop (arg1
)
12740 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12741 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12742 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12743 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12744 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12745 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12747 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12748 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12749 fold_convert_loc (loc
, newtype
,
12750 TREE_OPERAND (arg0
, 0)),
12751 fold_convert_loc (loc
, newtype
,
12752 TREE_OPERAND (arg0
, 1)));
12754 return fold_build2_loc (loc
, code
, type
, newmod
,
12755 fold_convert_loc (loc
, newtype
, arg1
));
12758 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12759 C1 is a valid shift constant, and C2 is a power of two, i.e.
12761 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12762 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12763 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12765 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12766 && integer_zerop (arg1
))
12768 tree itype
= TREE_TYPE (arg0
);
12769 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12770 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12772 /* Check for a valid shift count. */
12773 if (TREE_INT_CST_HIGH (arg001
) == 0
12774 && TREE_INT_CST_LOW (arg001
) < prec
)
12776 tree arg01
= TREE_OPERAND (arg0
, 1);
12777 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12778 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12779 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12780 can be rewritten as (X & (C2 << C1)) != 0. */
12781 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12783 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12784 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12785 return fold_build2_loc (loc
, code
, type
, tem
,
12786 fold_convert_loc (loc
, itype
, arg1
));
12788 /* Otherwise, for signed (arithmetic) shifts,
12789 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12790 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12791 else if (!TYPE_UNSIGNED (itype
))
12792 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12793 arg000
, build_int_cst (itype
, 0));
12794 /* Otherwise, of unsigned (logical) shifts,
12795 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12796 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12798 return omit_one_operand_loc (loc
, type
,
12799 code
== EQ_EXPR
? integer_one_node
12800 : integer_zero_node
,
12805 /* If we have (A & C) == C where C is a power of 2, convert this into
12806 (A & C) != 0. Similarly for NE_EXPR. */
12807 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12808 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12809 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12810 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12811 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12812 integer_zero_node
));
12814 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12815 bit, then fold the expression into A < 0 or A >= 0. */
12816 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12820 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12821 Similarly for NE_EXPR. */
12822 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12823 && TREE_CODE (arg1
) == INTEGER_CST
12824 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12826 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12827 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12828 TREE_OPERAND (arg0
, 1));
12830 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12831 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12833 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12834 if (integer_nonzerop (dandnotc
))
12835 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12838 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12839 Similarly for NE_EXPR. */
12840 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12841 && TREE_CODE (arg1
) == INTEGER_CST
12842 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12844 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12846 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12847 TREE_OPERAND (arg0
, 1),
12848 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12849 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12850 if (integer_nonzerop (candnotd
))
12851 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12854 /* If this is a comparison of a field, we may be able to simplify it. */
12855 if ((TREE_CODE (arg0
) == COMPONENT_REF
12856 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12857 /* Handle the constant case even without -O
12858 to make sure the warnings are given. */
12859 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12861 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12866 /* Optimize comparisons of strlen vs zero to a compare of the
12867 first character of the string vs zero. To wit,
12868 strlen(ptr) == 0 => *ptr == 0
12869 strlen(ptr) != 0 => *ptr != 0
12870 Other cases should reduce to one of these two (or a constant)
12871 due to the return value of strlen being unsigned. */
12872 if (TREE_CODE (arg0
) == CALL_EXPR
12873 && integer_zerop (arg1
))
12875 tree fndecl
= get_callee_fndecl (arg0
);
12878 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12879 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12880 && call_expr_nargs (arg0
) == 1
12881 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12883 tree iref
= build_fold_indirect_ref_loc (loc
,
12884 CALL_EXPR_ARG (arg0
, 0));
12885 return fold_build2_loc (loc
, code
, type
, iref
,
12886 build_int_cst (TREE_TYPE (iref
), 0));
12890 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12891 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12892 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12893 && integer_zerop (arg1
)
12894 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12896 tree arg00
= TREE_OPERAND (arg0
, 0);
12897 tree arg01
= TREE_OPERAND (arg0
, 1);
12898 tree itype
= TREE_TYPE (arg00
);
12899 if (TREE_INT_CST_HIGH (arg01
) == 0
12900 && TREE_INT_CST_LOW (arg01
)
12901 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12903 if (TYPE_UNSIGNED (itype
))
12905 itype
= signed_type_for (itype
);
12906 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12908 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12909 type
, arg00
, build_int_cst (itype
, 0));
12913 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12914 if (integer_zerop (arg1
)
12915 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12916 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12917 TREE_OPERAND (arg0
, 1));
12919 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12920 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12921 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12922 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12923 build_zero_cst (TREE_TYPE (arg0
)));
12924 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12925 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12926 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12927 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12928 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12929 build_zero_cst (TREE_TYPE (arg0
)));
12931 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12932 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12933 && TREE_CODE (arg1
) == INTEGER_CST
12934 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12935 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12936 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12937 TREE_OPERAND (arg0
, 1), arg1
));
12939 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12940 (X & C) == 0 when C is a single bit. */
12941 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12942 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12943 && integer_zerop (arg1
)
12944 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12946 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12947 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12948 TREE_OPERAND (arg0
, 1));
12949 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12951 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12955 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12956 constant C is a power of two, i.e. a single bit. */
12957 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12958 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12959 && integer_zerop (arg1
)
12960 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12961 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12962 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12964 tree arg00
= TREE_OPERAND (arg0
, 0);
12965 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12966 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12969 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12970 when is C is a power of two, i.e. a single bit. */
12971 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12972 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12973 && integer_zerop (arg1
)
12974 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12975 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12976 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12978 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12979 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12980 arg000
, TREE_OPERAND (arg0
, 1));
12981 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12982 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12985 if (integer_zerop (arg1
)
12986 && tree_expr_nonzero_p (arg0
))
12988 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12989 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12992 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12993 if (TREE_CODE (arg0
) == NEGATE_EXPR
12994 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12995 return fold_build2_loc (loc
, code
, type
,
12996 TREE_OPERAND (arg0
, 0),
12997 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12998 TREE_OPERAND (arg1
, 0)));
13000 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13001 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13002 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13004 tree arg00
= TREE_OPERAND (arg0
, 0);
13005 tree arg01
= TREE_OPERAND (arg0
, 1);
13006 tree arg10
= TREE_OPERAND (arg1
, 0);
13007 tree arg11
= TREE_OPERAND (arg1
, 1);
13008 tree itype
= TREE_TYPE (arg0
);
13010 if (operand_equal_p (arg01
, arg11
, 0))
13011 return fold_build2_loc (loc
, code
, type
,
13012 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13013 fold_build2_loc (loc
,
13014 BIT_XOR_EXPR
, itype
,
13017 build_zero_cst (itype
));
13019 if (operand_equal_p (arg01
, arg10
, 0))
13020 return fold_build2_loc (loc
, code
, type
,
13021 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13022 fold_build2_loc (loc
,
13023 BIT_XOR_EXPR
, itype
,
13026 build_zero_cst (itype
));
13028 if (operand_equal_p (arg00
, arg11
, 0))
13029 return fold_build2_loc (loc
, code
, type
,
13030 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13031 fold_build2_loc (loc
,
13032 BIT_XOR_EXPR
, itype
,
13035 build_zero_cst (itype
));
13037 if (operand_equal_p (arg00
, arg10
, 0))
13038 return fold_build2_loc (loc
, code
, type
,
13039 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13040 fold_build2_loc (loc
,
13041 BIT_XOR_EXPR
, itype
,
13044 build_zero_cst (itype
));
13047 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13048 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13050 tree arg00
= TREE_OPERAND (arg0
, 0);
13051 tree arg01
= TREE_OPERAND (arg0
, 1);
13052 tree arg10
= TREE_OPERAND (arg1
, 0);
13053 tree arg11
= TREE_OPERAND (arg1
, 1);
13054 tree itype
= TREE_TYPE (arg0
);
13056 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13057 operand_equal_p guarantees no side-effects so we don't need
13058 to use omit_one_operand on Z. */
13059 if (operand_equal_p (arg01
, arg11
, 0))
13060 return fold_build2_loc (loc
, code
, type
, arg00
,
13061 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13063 if (operand_equal_p (arg01
, arg10
, 0))
13064 return fold_build2_loc (loc
, code
, type
, arg00
,
13065 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13067 if (operand_equal_p (arg00
, arg11
, 0))
13068 return fold_build2_loc (loc
, code
, type
, arg01
,
13069 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13071 if (operand_equal_p (arg00
, arg10
, 0))
13072 return fold_build2_loc (loc
, code
, type
, arg01
,
13073 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13076 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13077 if (TREE_CODE (arg01
) == INTEGER_CST
13078 && TREE_CODE (arg11
) == INTEGER_CST
)
13080 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13081 fold_convert_loc (loc
, itype
, arg11
));
13082 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13083 return fold_build2_loc (loc
, code
, type
, tem
,
13084 fold_convert_loc (loc
, itype
, arg10
));
13088 /* Attempt to simplify equality/inequality comparisons of complex
13089 values. Only lower the comparison if the result is known or
13090 can be simplified to a single scalar comparison. */
13091 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13092 || TREE_CODE (arg0
) == COMPLEX_CST
)
13093 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13094 || TREE_CODE (arg1
) == COMPLEX_CST
))
13096 tree real0
, imag0
, real1
, imag1
;
13099 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13101 real0
= TREE_OPERAND (arg0
, 0);
13102 imag0
= TREE_OPERAND (arg0
, 1);
13106 real0
= TREE_REALPART (arg0
);
13107 imag0
= TREE_IMAGPART (arg0
);
13110 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13112 real1
= TREE_OPERAND (arg1
, 0);
13113 imag1
= TREE_OPERAND (arg1
, 1);
13117 real1
= TREE_REALPART (arg1
);
13118 imag1
= TREE_IMAGPART (arg1
);
13121 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13122 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13124 if (integer_zerop (rcond
))
13126 if (code
== EQ_EXPR
)
13127 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13129 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13133 if (code
== NE_EXPR
)
13134 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13136 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13140 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13141 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13143 if (integer_zerop (icond
))
13145 if (code
== EQ_EXPR
)
13146 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13148 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13152 if (code
== NE_EXPR
)
13153 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13155 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13166 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13167 if (tem
!= NULL_TREE
)
13170 /* Transform comparisons of the form X +- C CMP X. */
13171 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13172 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13173 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13174 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13175 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13176 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13178 tree arg01
= TREE_OPERAND (arg0
, 1);
13179 enum tree_code code0
= TREE_CODE (arg0
);
13182 if (TREE_CODE (arg01
) == REAL_CST
)
13183 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13185 is_positive
= tree_int_cst_sgn (arg01
);
13187 /* (X - c) > X becomes false. */
13188 if (code
== GT_EXPR
13189 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13190 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13192 if (TREE_CODE (arg01
) == INTEGER_CST
13193 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13194 fold_overflow_warning (("assuming signed overflow does not "
13195 "occur when assuming that (X - c) > X "
13196 "is always false"),
13197 WARN_STRICT_OVERFLOW_ALL
);
13198 return constant_boolean_node (0, type
);
13201 /* Likewise (X + c) < X becomes false. */
13202 if (code
== LT_EXPR
13203 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13204 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13206 if (TREE_CODE (arg01
) == INTEGER_CST
13207 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13208 fold_overflow_warning (("assuming signed overflow does not "
13209 "occur when assuming that "
13210 "(X + c) < X is always false"),
13211 WARN_STRICT_OVERFLOW_ALL
);
13212 return constant_boolean_node (0, type
);
13215 /* Convert (X - c) <= X to true. */
13216 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13218 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13219 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13221 if (TREE_CODE (arg01
) == INTEGER_CST
13222 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13223 fold_overflow_warning (("assuming signed overflow does not "
13224 "occur when assuming that "
13225 "(X - c) <= X is always true"),
13226 WARN_STRICT_OVERFLOW_ALL
);
13227 return constant_boolean_node (1, type
);
13230 /* Convert (X + c) >= X to true. */
13231 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13233 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13234 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13236 if (TREE_CODE (arg01
) == INTEGER_CST
13237 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13238 fold_overflow_warning (("assuming signed overflow does not "
13239 "occur when assuming that "
13240 "(X + c) >= X is always true"),
13241 WARN_STRICT_OVERFLOW_ALL
);
13242 return constant_boolean_node (1, type
);
13245 if (TREE_CODE (arg01
) == INTEGER_CST
)
13247 /* Convert X + c > X and X - c < X to true for integers. */
13248 if (code
== GT_EXPR
13249 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13250 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13252 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13253 fold_overflow_warning (("assuming signed overflow does "
13254 "not occur when assuming that "
13255 "(X + c) > X is always true"),
13256 WARN_STRICT_OVERFLOW_ALL
);
13257 return constant_boolean_node (1, type
);
13260 if (code
== LT_EXPR
13261 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13262 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13264 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13265 fold_overflow_warning (("assuming signed overflow does "
13266 "not occur when assuming that "
13267 "(X - c) < X is always true"),
13268 WARN_STRICT_OVERFLOW_ALL
);
13269 return constant_boolean_node (1, type
);
13272 /* Convert X + c <= X and X - c >= X to false for integers. */
13273 if (code
== LE_EXPR
13274 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13275 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13277 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13278 fold_overflow_warning (("assuming signed overflow does "
13279 "not occur when assuming that "
13280 "(X + c) <= X is always false"),
13281 WARN_STRICT_OVERFLOW_ALL
);
13282 return constant_boolean_node (0, type
);
13285 if (code
== GE_EXPR
13286 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13287 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13289 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13290 fold_overflow_warning (("assuming signed overflow does "
13291 "not occur when assuming that "
13292 "(X - c) >= X is always false"),
13293 WARN_STRICT_OVERFLOW_ALL
);
13294 return constant_boolean_node (0, type
);
13299 /* Comparisons with the highest or lowest possible integer of
13300 the specified precision will have known values. */
13302 tree arg1_type
= TREE_TYPE (arg1
);
13303 unsigned int width
= TYPE_PRECISION (arg1_type
);
13305 if (TREE_CODE (arg1
) == INTEGER_CST
13306 && width
<= HOST_BITS_PER_DOUBLE_INT
13307 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13309 HOST_WIDE_INT signed_max_hi
;
13310 unsigned HOST_WIDE_INT signed_max_lo
;
13311 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13313 if (width
<= HOST_BITS_PER_WIDE_INT
)
13315 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13320 if (TYPE_UNSIGNED (arg1_type
))
13322 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13328 max_lo
= signed_max_lo
;
13329 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13335 width
-= HOST_BITS_PER_WIDE_INT
;
13336 signed_max_lo
= -1;
13337 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13342 if (TYPE_UNSIGNED (arg1_type
))
13344 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13349 max_hi
= signed_max_hi
;
13350 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13354 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13355 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13359 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13362 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13365 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13368 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13370 /* The GE_EXPR and LT_EXPR cases above are not normally
13371 reached because of previous transformations. */
13376 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13378 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13382 arg1
= const_binop (PLUS_EXPR
, arg1
,
13383 build_int_cst (TREE_TYPE (arg1
), 1));
13384 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13385 fold_convert_loc (loc
,
13386 TREE_TYPE (arg1
), arg0
),
13389 arg1
= const_binop (PLUS_EXPR
, arg1
,
13390 build_int_cst (TREE_TYPE (arg1
), 1));
13391 return fold_build2_loc (loc
, NE_EXPR
, type
,
13392 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13398 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13400 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13404 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13407 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13410 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13413 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13418 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13420 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13424 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13425 return fold_build2_loc (loc
, NE_EXPR
, type
,
13426 fold_convert_loc (loc
,
13427 TREE_TYPE (arg1
), arg0
),
13430 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13431 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13432 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13439 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13440 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13441 && TYPE_UNSIGNED (arg1_type
)
13442 /* We will flip the signedness of the comparison operator
13443 associated with the mode of arg1, so the sign bit is
13444 specified by this mode. Check that arg1 is the signed
13445 max associated with this sign bit. */
13446 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13447 /* signed_type does not work on pointer types. */
13448 && INTEGRAL_TYPE_P (arg1_type
))
13450 /* The following case also applies to X < signed_max+1
13451 and X >= signed_max+1 because previous transformations. */
13452 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13455 st
= signed_type_for (TREE_TYPE (arg1
));
13456 return fold_build2_loc (loc
,
13457 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13458 type
, fold_convert_loc (loc
, st
, arg0
),
13459 build_int_cst (st
, 0));
13465 /* If we are comparing an ABS_EXPR with a constant, we can
13466 convert all the cases into explicit comparisons, but they may
13467 well not be faster than doing the ABS and one comparison.
13468 But ABS (X) <= C is a range comparison, which becomes a subtraction
13469 and a comparison, and is probably faster. */
13470 if (code
== LE_EXPR
13471 && TREE_CODE (arg1
) == INTEGER_CST
13472 && TREE_CODE (arg0
) == ABS_EXPR
13473 && ! TREE_SIDE_EFFECTS (arg0
)
13474 && (0 != (tem
= negate_expr (arg1
)))
13475 && TREE_CODE (tem
) == INTEGER_CST
13476 && !TREE_OVERFLOW (tem
))
13477 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13478 build2 (GE_EXPR
, type
,
13479 TREE_OPERAND (arg0
, 0), tem
),
13480 build2 (LE_EXPR
, type
,
13481 TREE_OPERAND (arg0
, 0), arg1
));
13483 /* Convert ABS_EXPR<x> >= 0 to true. */
13484 strict_overflow_p
= false;
13485 if (code
== GE_EXPR
13486 && (integer_zerop (arg1
)
13487 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13488 && real_zerop (arg1
)))
13489 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13491 if (strict_overflow_p
)
13492 fold_overflow_warning (("assuming signed overflow does not occur "
13493 "when simplifying comparison of "
13494 "absolute value and zero"),
13495 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13496 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13499 /* Convert ABS_EXPR<x> < 0 to false. */
13500 strict_overflow_p
= false;
13501 if (code
== LT_EXPR
13502 && (integer_zerop (arg1
) || real_zerop (arg1
))
13503 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13505 if (strict_overflow_p
)
13506 fold_overflow_warning (("assuming signed overflow does not occur "
13507 "when simplifying comparison of "
13508 "absolute value and zero"),
13509 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13510 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13513 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13514 and similarly for >= into !=. */
13515 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13516 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13517 && TREE_CODE (arg1
) == LSHIFT_EXPR
13518 && integer_onep (TREE_OPERAND (arg1
, 0)))
13519 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13520 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13521 TREE_OPERAND (arg1
, 1)),
13522 build_zero_cst (TREE_TYPE (arg0
)));
13524 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13525 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13526 && CONVERT_EXPR_P (arg1
)
13527 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13528 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13530 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13531 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13532 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13533 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13534 build_zero_cst (TREE_TYPE (arg0
)));
13539 case UNORDERED_EXPR
:
13547 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13549 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13550 if (t1
!= NULL_TREE
)
13554 /* If the first operand is NaN, the result is constant. */
13555 if (TREE_CODE (arg0
) == REAL_CST
13556 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13557 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13559 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13560 ? integer_zero_node
13561 : integer_one_node
;
13562 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13565 /* If the second operand is NaN, the result is constant. */
13566 if (TREE_CODE (arg1
) == REAL_CST
13567 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13568 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13570 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13571 ? integer_zero_node
13572 : integer_one_node
;
13573 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13576 /* Simplify unordered comparison of something with itself. */
13577 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13578 && operand_equal_p (arg0
, arg1
, 0))
13579 return constant_boolean_node (1, type
);
13581 if (code
== LTGT_EXPR
13582 && !flag_trapping_math
13583 && operand_equal_p (arg0
, arg1
, 0))
13584 return constant_boolean_node (0, type
);
13586 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13588 tree targ0
= strip_float_extensions (arg0
);
13589 tree targ1
= strip_float_extensions (arg1
);
13590 tree newtype
= TREE_TYPE (targ0
);
13592 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13593 newtype
= TREE_TYPE (targ1
);
13595 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13596 return fold_build2_loc (loc
, code
, type
,
13597 fold_convert_loc (loc
, newtype
, targ0
),
13598 fold_convert_loc (loc
, newtype
, targ1
));
13603 case COMPOUND_EXPR
:
13604 /* When pedantic, a compound expression can be neither an lvalue
13605 nor an integer constant expression. */
13606 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13608 /* Don't let (0, 0) be null pointer constant. */
13609 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13610 : fold_convert_loc (loc
, type
, arg1
);
13611 return pedantic_non_lvalue_loc (loc
, tem
);
13614 if ((TREE_CODE (arg0
) == REAL_CST
13615 && TREE_CODE (arg1
) == REAL_CST
)
13616 || (TREE_CODE (arg0
) == INTEGER_CST
13617 && TREE_CODE (arg1
) == INTEGER_CST
))
13618 return build_complex (type
, arg0
, arg1
);
13619 if (TREE_CODE (arg0
) == REALPART_EXPR
13620 && TREE_CODE (arg1
) == IMAGPART_EXPR
13621 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13622 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13623 TREE_OPERAND (arg1
, 0), 0))
13624 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13625 TREE_OPERAND (arg1
, 0));
13629 /* An ASSERT_EXPR should never be passed to fold_binary. */
13630 gcc_unreachable ();
13632 case VEC_PACK_TRUNC_EXPR
:
13633 case VEC_PACK_FIX_TRUNC_EXPR
:
13635 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13638 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13639 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13640 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13643 elts
= XALLOCAVEC (tree
, nelts
);
13644 if (!vec_cst_ctor_to_array (arg0
, elts
)
13645 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13648 for (i
= 0; i
< nelts
; i
++)
13650 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13651 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13652 TREE_TYPE (type
), elts
[i
]);
13653 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13657 return build_vector (type
, elts
);
13660 case VEC_WIDEN_MULT_LO_EXPR
:
13661 case VEC_WIDEN_MULT_HI_EXPR
:
13662 case VEC_WIDEN_MULT_EVEN_EXPR
:
13663 case VEC_WIDEN_MULT_ODD_EXPR
:
13665 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13666 unsigned int out
, ofs
, scale
;
13669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13670 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13671 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13674 elts
= XALLOCAVEC (tree
, nelts
* 4);
13675 if (!vec_cst_ctor_to_array (arg0
, elts
)
13676 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13679 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13680 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13681 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13682 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13683 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13684 scale
= 1, ofs
= 0;
13685 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13686 scale
= 1, ofs
= 1;
13688 for (out
= 0; out
< nelts
; out
++)
13690 unsigned int in1
= (out
<< scale
) + ofs
;
13691 unsigned int in2
= in1
+ nelts
* 2;
13694 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13695 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13697 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13699 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13700 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13704 return build_vector (type
, elts
);
13709 } /* switch (code) */
13712 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13713 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13717 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13719 switch (TREE_CODE (*tp
))
13725 *walk_subtrees
= 0;
13727 /* ... fall through ... */
13734 /* Return whether the sub-tree ST contains a label which is accessible from
13735 outside the sub-tree. */
13738 contains_label_p (tree st
)
13741 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13744 /* Fold a ternary expression of code CODE and type TYPE with operands
13745 OP0, OP1, and OP2. Return the folded expression if folding is
13746 successful. Otherwise, return NULL_TREE. */
13749 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13750 tree op0
, tree op1
, tree op2
)
13753 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13754 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13756 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13757 && TREE_CODE_LENGTH (code
) == 3);
13759 /* Strip any conversions that don't change the mode. This is safe
13760 for every expression, except for a comparison expression because
13761 its signedness is derived from its operands. So, in the latter
13762 case, only strip conversions that don't change the signedness.
13764 Note that this is done as an internal manipulation within the
13765 constant folder, in order to find the simplest representation of
13766 the arguments so that their form can be studied. In any cases,
13767 the appropriate type conversions should be put back in the tree
13768 that will get out of the constant folder. */
13789 case COMPONENT_REF
:
13790 if (TREE_CODE (arg0
) == CONSTRUCTOR
13791 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13793 unsigned HOST_WIDE_INT idx
;
13795 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13802 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13803 so all simple results must be passed through pedantic_non_lvalue. */
13804 if (TREE_CODE (arg0
) == INTEGER_CST
)
13806 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13807 tem
= integer_zerop (arg0
) ? op2
: op1
;
13808 /* Only optimize constant conditions when the selected branch
13809 has the same type as the COND_EXPR. This avoids optimizing
13810 away "c ? x : throw", where the throw has a void type.
13811 Avoid throwing away that operand which contains label. */
13812 if ((!TREE_SIDE_EFFECTS (unused_op
)
13813 || !contains_label_p (unused_op
))
13814 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13815 || VOID_TYPE_P (type
)))
13816 return pedantic_non_lvalue_loc (loc
, tem
);
13819 if (operand_equal_p (arg1
, op2
, 0))
13820 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13822 /* If we have A op B ? A : C, we may be able to convert this to a
13823 simpler expression, depending on the operation and the values
13824 of B and C. Signed zeros prevent all of these transformations,
13825 for reasons given above each one.
13827 Also try swapping the arguments and inverting the conditional. */
13828 if (COMPARISON_CLASS_P (arg0
)
13829 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13830 arg1
, TREE_OPERAND (arg0
, 1))
13831 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13833 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13838 if (COMPARISON_CLASS_P (arg0
)
13839 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13841 TREE_OPERAND (arg0
, 1))
13842 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13844 location_t loc0
= expr_location_or (arg0
, loc
);
13845 tem
= fold_truth_not_expr (loc0
, arg0
);
13846 if (tem
&& COMPARISON_CLASS_P (tem
))
13848 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13854 /* If the second operand is simpler than the third, swap them
13855 since that produces better jump optimization results. */
13856 if (truth_value_p (TREE_CODE (arg0
))
13857 && tree_swap_operands_p (op1
, op2
, false))
13859 location_t loc0
= expr_location_or (arg0
, loc
);
13860 /* See if this can be inverted. If it can't, possibly because
13861 it was a floating-point inequality comparison, don't do
13863 tem
= fold_truth_not_expr (loc0
, arg0
);
13865 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13868 /* Convert A ? 1 : 0 to simply A. */
13869 if (integer_onep (op1
)
13870 && integer_zerop (op2
)
13871 /* If we try to convert OP0 to our type, the
13872 call to fold will try to move the conversion inside
13873 a COND, which will recurse. In that case, the COND_EXPR
13874 is probably the best choice, so leave it alone. */
13875 && type
== TREE_TYPE (arg0
))
13876 return pedantic_non_lvalue_loc (loc
, arg0
);
13878 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13879 over COND_EXPR in cases such as floating point comparisons. */
13880 if (integer_zerop (op1
)
13881 && integer_onep (op2
)
13882 && truth_value_p (TREE_CODE (arg0
)))
13883 return pedantic_non_lvalue_loc (loc
,
13884 fold_convert_loc (loc
, type
,
13885 invert_truthvalue_loc (loc
,
13888 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13889 if (TREE_CODE (arg0
) == LT_EXPR
13890 && integer_zerop (TREE_OPERAND (arg0
, 1))
13891 && integer_zerop (op2
)
13892 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13894 /* sign_bit_p only checks ARG1 bits within A's precision.
13895 If <sign bit of A> has wider type than A, bits outside
13896 of A's precision in <sign bit of A> need to be checked.
13897 If they are all 0, this optimization needs to be done
13898 in unsigned A's type, if they are all 1 in signed A's type,
13899 otherwise this can't be done. */
13900 if (TYPE_PRECISION (TREE_TYPE (tem
))
13901 < TYPE_PRECISION (TREE_TYPE (arg1
))
13902 && TYPE_PRECISION (TREE_TYPE (tem
))
13903 < TYPE_PRECISION (type
))
13905 unsigned HOST_WIDE_INT mask_lo
;
13906 HOST_WIDE_INT mask_hi
;
13907 int inner_width
, outer_width
;
13910 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13911 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13912 if (outer_width
> TYPE_PRECISION (type
))
13913 outer_width
= TYPE_PRECISION (type
);
13915 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13917 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13918 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
13924 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13925 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13927 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13929 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13930 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13934 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13935 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13937 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13938 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13940 tem_type
= signed_type_for (TREE_TYPE (tem
));
13941 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13943 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13944 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13946 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13947 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13955 fold_convert_loc (loc
, type
,
13956 fold_build2_loc (loc
, BIT_AND_EXPR
,
13957 TREE_TYPE (tem
), tem
,
13958 fold_convert_loc (loc
,
13963 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13964 already handled above. */
13965 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13966 && integer_onep (TREE_OPERAND (arg0
, 1))
13967 && integer_zerop (op2
)
13968 && integer_pow2p (arg1
))
13970 tree tem
= TREE_OPERAND (arg0
, 0);
13972 if (TREE_CODE (tem
) == RSHIFT_EXPR
13973 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13974 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13975 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13976 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13977 TREE_OPERAND (tem
, 0), arg1
);
13980 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13981 is probably obsolete because the first operand should be a
13982 truth value (that's why we have the two cases above), but let's
13983 leave it in until we can confirm this for all front-ends. */
13984 if (integer_zerop (op2
)
13985 && TREE_CODE (arg0
) == NE_EXPR
13986 && integer_zerop (TREE_OPERAND (arg0
, 1))
13987 && integer_pow2p (arg1
)
13988 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13989 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13990 arg1
, OEP_ONLY_CONST
))
13991 return pedantic_non_lvalue_loc (loc
,
13992 fold_convert_loc (loc
, type
,
13993 TREE_OPERAND (arg0
, 0)));
13995 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13996 if (integer_zerop (op2
)
13997 && truth_value_p (TREE_CODE (arg0
))
13998 && truth_value_p (TREE_CODE (arg1
)))
13999 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14000 fold_convert_loc (loc
, type
, arg0
),
14003 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14004 if (integer_onep (op2
)
14005 && truth_value_p (TREE_CODE (arg0
))
14006 && truth_value_p (TREE_CODE (arg1
)))
14008 location_t loc0
= expr_location_or (arg0
, loc
);
14009 /* Only perform transformation if ARG0 is easily inverted. */
14010 tem
= fold_truth_not_expr (loc0
, arg0
);
14012 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14013 fold_convert_loc (loc
, type
, tem
),
14017 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14018 if (integer_zerop (arg1
)
14019 && truth_value_p (TREE_CODE (arg0
))
14020 && truth_value_p (TREE_CODE (op2
)))
14022 location_t loc0
= expr_location_or (arg0
, loc
);
14023 /* Only perform transformation if ARG0 is easily inverted. */
14024 tem
= fold_truth_not_expr (loc0
, arg0
);
14026 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14027 fold_convert_loc (loc
, type
, tem
),
14031 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14032 if (integer_onep (arg1
)
14033 && truth_value_p (TREE_CODE (arg0
))
14034 && truth_value_p (TREE_CODE (op2
)))
14035 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14036 fold_convert_loc (loc
, type
, arg0
),
14042 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14043 of fold_ternary on them. */
14044 gcc_unreachable ();
14046 case BIT_FIELD_REF
:
14047 if ((TREE_CODE (arg0
) == VECTOR_CST
14048 || (TREE_CODE (arg0
) == CONSTRUCTOR
14049 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14050 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14051 || (TREE_CODE (type
) == VECTOR_TYPE
14052 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14054 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14055 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14056 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14057 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14060 && (idx
% width
) == 0
14061 && (n
% width
) == 0
14062 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14066 if (TREE_CODE (type
) == VECTOR_TYPE
)
14068 if (TREE_CODE (arg0
) == VECTOR_CST
)
14070 tree
*vals
= XALLOCAVEC (tree
, n
);
14072 for (i
= 0; i
< n
; ++i
)
14073 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14074 return build_vector (type
, vals
);
14078 VEC(constructor_elt
, gc
) *vals
;
14080 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14081 return build_constructor (type
, NULL
);
14082 vals
= VEC_alloc (constructor_elt
, gc
, n
);
14083 for (i
= 0; i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14085 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14087 (arg0
, idx
+ i
)->value
);
14088 return build_constructor (type
, vals
);
14093 if (TREE_CODE (arg0
) == VECTOR_CST
)
14094 return VECTOR_CST_ELT (arg0
, idx
);
14095 else if (idx
< CONSTRUCTOR_NELTS (arg0
))
14096 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14097 return build_zero_cst (type
);
14102 /* A bit-field-ref that referenced the full argument can be stripped. */
14103 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14104 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14105 && integer_zerop (op2
))
14106 return fold_convert_loc (loc
, type
, arg0
);
14108 /* On constants we can use native encode/interpret to constant
14109 fold (nearly) all BIT_FIELD_REFs. */
14110 if (CONSTANT_CLASS_P (arg0
)
14111 && can_native_interpret_type_p (type
)
14112 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14113 /* This limitation should not be necessary, we just need to
14114 round this up to mode size. */
14115 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14116 /* Need bit-shifting of the buffer to relax the following. */
14117 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14119 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14120 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14121 unsigned HOST_WIDE_INT clen
;
14122 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14123 /* ??? We cannot tell native_encode_expr to start at
14124 some random byte only. So limit us to a reasonable amount
14128 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14129 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14131 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14133 tree v
= native_interpret_expr (type
,
14134 b
+ bitpos
/ BITS_PER_UNIT
,
14135 bitsize
/ BITS_PER_UNIT
);
14145 /* For integers we can decompose the FMA if possible. */
14146 if (TREE_CODE (arg0
) == INTEGER_CST
14147 && TREE_CODE (arg1
) == INTEGER_CST
)
14148 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14149 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14150 if (integer_zerop (arg2
))
14151 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14153 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14155 case VEC_PERM_EXPR
:
14156 if (TREE_CODE (arg2
) == VECTOR_CST
)
14158 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14159 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14161 bool need_mask_canon
= false;
14162 bool all_in_vec0
= true;
14163 bool all_in_vec1
= true;
14164 bool maybe_identity
= true;
14165 bool single_arg
= (op0
== op1
);
14166 bool changed
= false;
14168 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14169 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14170 for (i
= 0; i
< nelts
; i
++)
14172 tree val
= VECTOR_CST_ELT (arg2
, i
);
14173 if (TREE_CODE (val
) != INTEGER_CST
)
14176 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14177 if (TREE_INT_CST_HIGH (val
)
14178 || ((unsigned HOST_WIDE_INT
)
14179 TREE_INT_CST_LOW (val
) != sel
[i
]))
14180 need_mask_canon
= true;
14182 if (sel
[i
] < nelts
)
14183 all_in_vec1
= false;
14185 all_in_vec0
= false;
14187 if ((sel
[i
] & (nelts
-1)) != i
)
14188 maybe_identity
= false;
14191 if (maybe_identity
)
14199 if ((TREE_CODE (arg0
) == VECTOR_CST
14200 || TREE_CODE (arg0
) == CONSTRUCTOR
)
14201 && (TREE_CODE (arg1
) == VECTOR_CST
14202 || TREE_CODE (arg1
) == CONSTRUCTOR
))
14204 t
= fold_vec_perm (type
, arg0
, arg1
, sel
);
14205 if (t
!= NULL_TREE
)
14211 else if (all_in_vec1
)
14214 for (i
= 0; i
< nelts
; i
++)
14216 need_mask_canon
= true;
14219 if (op0
== op1
&& !single_arg
)
14222 if (need_mask_canon
&& arg2
== op2
)
14224 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14225 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14226 for (i
= 0; i
< nelts
; i
++)
14227 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14228 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14233 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14239 } /* switch (code) */
14242 /* Perform constant folding and related simplification of EXPR.
14243 The related simplifications include x*1 => x, x*0 => 0, etc.,
14244 and application of the associative law.
14245 NOP_EXPR conversions may be removed freely (as long as we
14246 are careful not to change the type of the overall expression).
14247 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14248 but we can constant-fold them if they have constant operands. */
14250 #ifdef ENABLE_FOLD_CHECKING
14251 # define fold(x) fold_1 (x)
14252 static tree
fold_1 (tree
);
14258 const tree t
= expr
;
14259 enum tree_code code
= TREE_CODE (t
);
14260 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14262 location_t loc
= EXPR_LOCATION (expr
);
14264 /* Return right away if a constant. */
14265 if (kind
== tcc_constant
)
14268 /* CALL_EXPR-like objects with variable numbers of operands are
14269 treated specially. */
14270 if (kind
== tcc_vl_exp
)
14272 if (code
== CALL_EXPR
)
14274 tem
= fold_call_expr (loc
, expr
, false);
14275 return tem
? tem
: expr
;
14280 if (IS_EXPR_CODE_CLASS (kind
))
14282 tree type
= TREE_TYPE (t
);
14283 tree op0
, op1
, op2
;
14285 switch (TREE_CODE_LENGTH (code
))
14288 op0
= TREE_OPERAND (t
, 0);
14289 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14290 return tem
? tem
: expr
;
14292 op0
= TREE_OPERAND (t
, 0);
14293 op1
= TREE_OPERAND (t
, 1);
14294 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14295 return tem
? tem
: expr
;
14297 op0
= TREE_OPERAND (t
, 0);
14298 op1
= TREE_OPERAND (t
, 1);
14299 op2
= TREE_OPERAND (t
, 2);
14300 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14301 return tem
? tem
: expr
;
14311 tree op0
= TREE_OPERAND (t
, 0);
14312 tree op1
= TREE_OPERAND (t
, 1);
14314 if (TREE_CODE (op1
) == INTEGER_CST
14315 && TREE_CODE (op0
) == CONSTRUCTOR
14316 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14318 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
14319 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
14320 unsigned HOST_WIDE_INT begin
= 0;
14322 /* Find a matching index by means of a binary search. */
14323 while (begin
!= end
)
14325 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14326 tree index
= VEC_index (constructor_elt
, elts
, middle
).index
;
14328 if (TREE_CODE (index
) == INTEGER_CST
14329 && tree_int_cst_lt (index
, op1
))
14330 begin
= middle
+ 1;
14331 else if (TREE_CODE (index
) == INTEGER_CST
14332 && tree_int_cst_lt (op1
, index
))
14334 else if (TREE_CODE (index
) == RANGE_EXPR
14335 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14336 begin
= middle
+ 1;
14337 else if (TREE_CODE (index
) == RANGE_EXPR
14338 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14341 return VEC_index (constructor_elt
, elts
, middle
).value
;
14349 return fold (DECL_INITIAL (t
));
14353 } /* switch (code) */
14356 #ifdef ENABLE_FOLD_CHECKING
14359 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
14360 static void fold_check_failed (const_tree
, const_tree
);
14361 void print_fold_checksum (const_tree
);
14363 /* When --enable-checking=fold, compute a digest of expr before
14364 and after actual fold call to see if fold did not accidentally
14365 change original expr. */
14371 struct md5_ctx ctx
;
14372 unsigned char checksum_before
[16], checksum_after
[16];
14375 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14376 md5_init_ctx (&ctx
);
14377 fold_checksum_tree (expr
, &ctx
, ht
);
14378 md5_finish_ctx (&ctx
, checksum_before
);
14381 ret
= fold_1 (expr
);
14383 md5_init_ctx (&ctx
);
14384 fold_checksum_tree (expr
, &ctx
, ht
);
14385 md5_finish_ctx (&ctx
, checksum_after
);
14388 if (memcmp (checksum_before
, checksum_after
, 16))
14389 fold_check_failed (expr
, ret
);
14395 print_fold_checksum (const_tree expr
)
14397 struct md5_ctx ctx
;
14398 unsigned char checksum
[16], cnt
;
14401 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14402 md5_init_ctx (&ctx
);
14403 fold_checksum_tree (expr
, &ctx
, ht
);
14404 md5_finish_ctx (&ctx
, checksum
);
14406 for (cnt
= 0; cnt
< 16; ++cnt
)
14407 fprintf (stderr
, "%02x", checksum
[cnt
]);
14408 putc ('\n', stderr
);
14412 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14414 internal_error ("fold check: original tree changed by fold");
14418 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
14421 enum tree_code code
;
14422 union tree_node buf
;
14428 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
14431 *slot
= CONST_CAST_TREE (expr
);
14432 code
= TREE_CODE (expr
);
14433 if (TREE_CODE_CLASS (code
) == tcc_declaration
14434 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14436 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14437 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14438 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14439 expr
= (tree
) &buf
;
14441 else if (TREE_CODE_CLASS (code
) == tcc_type
14442 && (TYPE_POINTER_TO (expr
)
14443 || TYPE_REFERENCE_TO (expr
)
14444 || TYPE_CACHED_VALUES_P (expr
)
14445 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14446 || TYPE_NEXT_VARIANT (expr
)))
14448 /* Allow these fields to be modified. */
14450 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14451 expr
= tmp
= (tree
) &buf
;
14452 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14453 TYPE_POINTER_TO (tmp
) = NULL
;
14454 TYPE_REFERENCE_TO (tmp
) = NULL
;
14455 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14456 if (TYPE_CACHED_VALUES_P (tmp
))
14458 TYPE_CACHED_VALUES_P (tmp
) = 0;
14459 TYPE_CACHED_VALUES (tmp
) = NULL
;
14462 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14463 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14464 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14465 if (TREE_CODE_CLASS (code
) != tcc_type
14466 && TREE_CODE_CLASS (code
) != tcc_declaration
14467 && code
!= TREE_LIST
14468 && code
!= SSA_NAME
14469 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14470 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14471 switch (TREE_CODE_CLASS (code
))
14477 md5_process_bytes (TREE_STRING_POINTER (expr
),
14478 TREE_STRING_LENGTH (expr
), ctx
);
14481 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14482 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14485 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14486 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14492 case tcc_exceptional
:
14496 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14497 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14498 expr
= TREE_CHAIN (expr
);
14499 goto recursive_label
;
14502 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14503 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14509 case tcc_expression
:
14510 case tcc_reference
:
14511 case tcc_comparison
:
14514 case tcc_statement
:
14516 len
= TREE_OPERAND_LENGTH (expr
);
14517 for (i
= 0; i
< len
; ++i
)
14518 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14520 case tcc_declaration
:
14521 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14522 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14523 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14525 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14526 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14527 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14528 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14529 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14531 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14532 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14534 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14536 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14537 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14538 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14542 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14543 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14544 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14545 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14546 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14547 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14548 if (INTEGRAL_TYPE_P (expr
)
14549 || SCALAR_FLOAT_TYPE_P (expr
))
14551 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14552 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14554 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14555 if (TREE_CODE (expr
) == RECORD_TYPE
14556 || TREE_CODE (expr
) == UNION_TYPE
14557 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14558 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14559 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14566 /* Helper function for outputting the checksum of a tree T. When
14567 debugging with gdb, you can "define mynext" to be "next" followed
14568 by "call debug_fold_checksum (op0)", then just trace down till the
14571 DEBUG_FUNCTION
void
14572 debug_fold_checksum (const_tree t
)
14575 unsigned char checksum
[16];
14576 struct md5_ctx ctx
;
14577 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14579 md5_init_ctx (&ctx
);
14580 fold_checksum_tree (t
, &ctx
, ht
);
14581 md5_finish_ctx (&ctx
, checksum
);
14584 for (i
= 0; i
< 16; i
++)
14585 fprintf (stderr
, "%d ", checksum
[i
]);
14587 fprintf (stderr
, "\n");
14592 /* Fold a unary tree expression with code CODE of type TYPE with an
14593 operand OP0. LOC is the location of the resulting expression.
14594 Return a folded expression if successful. Otherwise, return a tree
14595 expression with code CODE of type TYPE with an operand OP0. */
14598 fold_build1_stat_loc (location_t loc
,
14599 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14602 #ifdef ENABLE_FOLD_CHECKING
14603 unsigned char checksum_before
[16], checksum_after
[16];
14604 struct md5_ctx ctx
;
14607 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14608 md5_init_ctx (&ctx
);
14609 fold_checksum_tree (op0
, &ctx
, ht
);
14610 md5_finish_ctx (&ctx
, checksum_before
);
14614 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14616 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14618 #ifdef ENABLE_FOLD_CHECKING
14619 md5_init_ctx (&ctx
);
14620 fold_checksum_tree (op0
, &ctx
, ht
);
14621 md5_finish_ctx (&ctx
, checksum_after
);
14624 if (memcmp (checksum_before
, checksum_after
, 16))
14625 fold_check_failed (op0
, tem
);
14630 /* Fold a binary tree expression with code CODE of type TYPE with
14631 operands OP0 and OP1. LOC is the location of the resulting
14632 expression. Return a folded expression if successful. Otherwise,
14633 return a tree expression with code CODE of type TYPE with operands
14637 fold_build2_stat_loc (location_t loc
,
14638 enum tree_code code
, tree type
, tree op0
, tree op1
14642 #ifdef ENABLE_FOLD_CHECKING
14643 unsigned char checksum_before_op0
[16],
14644 checksum_before_op1
[16],
14645 checksum_after_op0
[16],
14646 checksum_after_op1
[16];
14647 struct md5_ctx ctx
;
14650 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14651 md5_init_ctx (&ctx
);
14652 fold_checksum_tree (op0
, &ctx
, ht
);
14653 md5_finish_ctx (&ctx
, checksum_before_op0
);
14656 md5_init_ctx (&ctx
);
14657 fold_checksum_tree (op1
, &ctx
, ht
);
14658 md5_finish_ctx (&ctx
, checksum_before_op1
);
14662 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14664 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14666 #ifdef ENABLE_FOLD_CHECKING
14667 md5_init_ctx (&ctx
);
14668 fold_checksum_tree (op0
, &ctx
, ht
);
14669 md5_finish_ctx (&ctx
, checksum_after_op0
);
14672 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14673 fold_check_failed (op0
, tem
);
14675 md5_init_ctx (&ctx
);
14676 fold_checksum_tree (op1
, &ctx
, ht
);
14677 md5_finish_ctx (&ctx
, checksum_after_op1
);
14680 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14681 fold_check_failed (op1
, tem
);
14686 /* Fold a ternary tree expression with code CODE of type TYPE with
14687 operands OP0, OP1, and OP2. Return a folded expression if
14688 successful. Otherwise, return a tree expression with code CODE of
14689 type TYPE with operands OP0, OP1, and OP2. */
14692 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14693 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14696 #ifdef ENABLE_FOLD_CHECKING
14697 unsigned char checksum_before_op0
[16],
14698 checksum_before_op1
[16],
14699 checksum_before_op2
[16],
14700 checksum_after_op0
[16],
14701 checksum_after_op1
[16],
14702 checksum_after_op2
[16];
14703 struct md5_ctx ctx
;
14706 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14707 md5_init_ctx (&ctx
);
14708 fold_checksum_tree (op0
, &ctx
, ht
);
14709 md5_finish_ctx (&ctx
, checksum_before_op0
);
14712 md5_init_ctx (&ctx
);
14713 fold_checksum_tree (op1
, &ctx
, ht
);
14714 md5_finish_ctx (&ctx
, checksum_before_op1
);
14717 md5_init_ctx (&ctx
);
14718 fold_checksum_tree (op2
, &ctx
, ht
);
14719 md5_finish_ctx (&ctx
, checksum_before_op2
);
14723 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14724 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14726 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14728 #ifdef ENABLE_FOLD_CHECKING
14729 md5_init_ctx (&ctx
);
14730 fold_checksum_tree (op0
, &ctx
, ht
);
14731 md5_finish_ctx (&ctx
, checksum_after_op0
);
14734 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14735 fold_check_failed (op0
, tem
);
14737 md5_init_ctx (&ctx
);
14738 fold_checksum_tree (op1
, &ctx
, ht
);
14739 md5_finish_ctx (&ctx
, checksum_after_op1
);
14742 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14743 fold_check_failed (op1
, tem
);
14745 md5_init_ctx (&ctx
);
14746 fold_checksum_tree (op2
, &ctx
, ht
);
14747 md5_finish_ctx (&ctx
, checksum_after_op2
);
14750 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14751 fold_check_failed (op2
, tem
);
14756 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14757 arguments in ARGARRAY, and a null static chain.
14758 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14759 of type TYPE from the given operands as constructed by build_call_array. */
14762 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14763 int nargs
, tree
*argarray
)
14766 #ifdef ENABLE_FOLD_CHECKING
14767 unsigned char checksum_before_fn
[16],
14768 checksum_before_arglist
[16],
14769 checksum_after_fn
[16],
14770 checksum_after_arglist
[16];
14771 struct md5_ctx ctx
;
14775 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14776 md5_init_ctx (&ctx
);
14777 fold_checksum_tree (fn
, &ctx
, ht
);
14778 md5_finish_ctx (&ctx
, checksum_before_fn
);
14781 md5_init_ctx (&ctx
);
14782 for (i
= 0; i
< nargs
; i
++)
14783 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14784 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14788 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14790 #ifdef ENABLE_FOLD_CHECKING
14791 md5_init_ctx (&ctx
);
14792 fold_checksum_tree (fn
, &ctx
, ht
);
14793 md5_finish_ctx (&ctx
, checksum_after_fn
);
14796 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14797 fold_check_failed (fn
, tem
);
14799 md5_init_ctx (&ctx
);
14800 for (i
= 0; i
< nargs
; i
++)
14801 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14802 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14805 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14806 fold_check_failed (NULL_TREE
, tem
);
14811 /* Perform constant folding and related simplification of initializer
14812 expression EXPR. These behave identically to "fold_buildN" but ignore
14813 potential run-time traps and exceptions that fold must preserve. */
14815 #define START_FOLD_INIT \
14816 int saved_signaling_nans = flag_signaling_nans;\
14817 int saved_trapping_math = flag_trapping_math;\
14818 int saved_rounding_math = flag_rounding_math;\
14819 int saved_trapv = flag_trapv;\
14820 int saved_folding_initializer = folding_initializer;\
14821 flag_signaling_nans = 0;\
14822 flag_trapping_math = 0;\
14823 flag_rounding_math = 0;\
14825 folding_initializer = 1;
14827 #define END_FOLD_INIT \
14828 flag_signaling_nans = saved_signaling_nans;\
14829 flag_trapping_math = saved_trapping_math;\
14830 flag_rounding_math = saved_rounding_math;\
14831 flag_trapv = saved_trapv;\
14832 folding_initializer = saved_folding_initializer;
14835 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14836 tree type
, tree op
)
14841 result
= fold_build1_loc (loc
, code
, type
, op
);
14848 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14849 tree type
, tree op0
, tree op1
)
14854 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14861 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14862 tree type
, tree op0
, tree op1
, tree op2
)
14867 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14874 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14875 int nargs
, tree
*argarray
)
14880 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14886 #undef START_FOLD_INIT
14887 #undef END_FOLD_INIT
14889 /* Determine if first argument is a multiple of second argument. Return 0 if
14890 it is not, or we cannot easily determined it to be.
14892 An example of the sort of thing we care about (at this point; this routine
14893 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14894 fold cases do now) is discovering that
14896 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14902 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14904 This code also handles discovering that
14906 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14908 is a multiple of 8 so we don't have to worry about dealing with a
14909 possible remainder.
14911 Note that we *look* inside a SAVE_EXPR only to determine how it was
14912 calculated; it is not safe for fold to do much of anything else with the
14913 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14914 at run time. For example, the latter example above *cannot* be implemented
14915 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14916 evaluation time of the original SAVE_EXPR is not necessarily the same at
14917 the time the new expression is evaluated. The only optimization of this
14918 sort that would be valid is changing
14920 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14924 SAVE_EXPR (I) * SAVE_EXPR (J)
14926 (where the same SAVE_EXPR (J) is used in the original and the
14927 transformed version). */
14930 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14932 if (operand_equal_p (top
, bottom
, 0))
14935 if (TREE_CODE (type
) != INTEGER_TYPE
)
14938 switch (TREE_CODE (top
))
14941 /* Bitwise and provides a power of two multiple. If the mask is
14942 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14943 if (!integer_pow2p (bottom
))
14948 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14949 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14953 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14954 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14957 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14961 op1
= TREE_OPERAND (top
, 1);
14962 /* const_binop may not detect overflow correctly,
14963 so check for it explicitly here. */
14964 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14965 > TREE_INT_CST_LOW (op1
)
14966 && TREE_INT_CST_HIGH (op1
) == 0
14967 && 0 != (t1
= fold_convert (type
,
14968 const_binop (LSHIFT_EXPR
,
14971 && !TREE_OVERFLOW (t1
))
14972 return multiple_of_p (type
, t1
, bottom
);
14977 /* Can't handle conversions from non-integral or wider integral type. */
14978 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14979 || (TYPE_PRECISION (type
)
14980 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14983 /* .. fall through ... */
14986 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14989 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14990 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14993 if (TREE_CODE (bottom
) != INTEGER_CST
14994 || integer_zerop (bottom
)
14995 || (TYPE_UNSIGNED (type
)
14996 && (tree_int_cst_sgn (top
) < 0
14997 || tree_int_cst_sgn (bottom
) < 0)))
14999 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15007 /* Return true if CODE or TYPE is known to be non-negative. */
15010 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15012 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15013 && truth_value_p (code
))
15014 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15015 have a signed:1 type (where the value is -1 and 0). */
15020 /* Return true if (CODE OP0) is known to be non-negative. If the return
15021 value is based on the assumption that signed overflow is undefined,
15022 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15023 *STRICT_OVERFLOW_P. */
15026 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15027 bool *strict_overflow_p
)
15029 if (TYPE_UNSIGNED (type
))
15035 /* We can't return 1 if flag_wrapv is set because
15036 ABS_EXPR<INT_MIN> = INT_MIN. */
15037 if (!INTEGRAL_TYPE_P (type
))
15039 if (TYPE_OVERFLOW_UNDEFINED (type
))
15041 *strict_overflow_p
= true;
15046 case NON_LVALUE_EXPR
:
15048 case FIX_TRUNC_EXPR
:
15049 return tree_expr_nonnegative_warnv_p (op0
,
15050 strict_overflow_p
);
15054 tree inner_type
= TREE_TYPE (op0
);
15055 tree outer_type
= type
;
15057 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15059 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15060 return tree_expr_nonnegative_warnv_p (op0
,
15061 strict_overflow_p
);
15062 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15064 if (TYPE_UNSIGNED (inner_type
))
15066 return tree_expr_nonnegative_warnv_p (op0
,
15067 strict_overflow_p
);
15070 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
15072 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15073 return tree_expr_nonnegative_warnv_p (op0
,
15074 strict_overflow_p
);
15075 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15076 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15077 && TYPE_UNSIGNED (inner_type
);
15083 return tree_simple_nonnegative_warnv_p (code
, type
);
15086 /* We don't know sign of `t', so be conservative and return false. */
15090 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15091 value is based on the assumption that signed overflow is undefined,
15092 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15093 *STRICT_OVERFLOW_P. */
15096 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15097 tree op1
, bool *strict_overflow_p
)
15099 if (TYPE_UNSIGNED (type
))
15104 case POINTER_PLUS_EXPR
:
15106 if (FLOAT_TYPE_P (type
))
15107 return (tree_expr_nonnegative_warnv_p (op0
,
15109 && tree_expr_nonnegative_warnv_p (op1
,
15110 strict_overflow_p
));
15112 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15113 both unsigned and at least 2 bits shorter than the result. */
15114 if (TREE_CODE (type
) == INTEGER_TYPE
15115 && TREE_CODE (op0
) == NOP_EXPR
15116 && TREE_CODE (op1
) == NOP_EXPR
)
15118 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15119 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15120 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15121 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15123 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15124 TYPE_PRECISION (inner2
)) + 1;
15125 return prec
< TYPE_PRECISION (type
);
15131 if (FLOAT_TYPE_P (type
))
15133 /* x * x for floating point x is always non-negative. */
15134 if (operand_equal_p (op0
, op1
, 0))
15136 return (tree_expr_nonnegative_warnv_p (op0
,
15138 && tree_expr_nonnegative_warnv_p (op1
,
15139 strict_overflow_p
));
15142 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15143 both unsigned and their total bits is shorter than the result. */
15144 if (TREE_CODE (type
) == INTEGER_TYPE
15145 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15146 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15148 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15149 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15151 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15152 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15155 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15156 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15158 if (TREE_CODE (op0
) == INTEGER_CST
)
15159 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15161 if (TREE_CODE (op1
) == INTEGER_CST
)
15162 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15164 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15165 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15167 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15168 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15169 : TYPE_PRECISION (inner0
);
15171 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15172 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15173 : TYPE_PRECISION (inner1
);
15175 return precision0
+ precision1
< TYPE_PRECISION (type
);
15182 return (tree_expr_nonnegative_warnv_p (op0
,
15184 || tree_expr_nonnegative_warnv_p (op1
,
15185 strict_overflow_p
));
15191 case TRUNC_DIV_EXPR
:
15192 case CEIL_DIV_EXPR
:
15193 case FLOOR_DIV_EXPR
:
15194 case ROUND_DIV_EXPR
:
15195 return (tree_expr_nonnegative_warnv_p (op0
,
15197 && tree_expr_nonnegative_warnv_p (op1
,
15198 strict_overflow_p
));
15200 case TRUNC_MOD_EXPR
:
15201 case CEIL_MOD_EXPR
:
15202 case FLOOR_MOD_EXPR
:
15203 case ROUND_MOD_EXPR
:
15204 return tree_expr_nonnegative_warnv_p (op0
,
15205 strict_overflow_p
);
15207 return tree_simple_nonnegative_warnv_p (code
, type
);
15210 /* We don't know sign of `t', so be conservative and return false. */
15214 /* Return true if T is known to be non-negative. If the return
15215 value is based on the assumption that signed overflow is undefined,
15216 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15217 *STRICT_OVERFLOW_P. */
15220 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15222 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15225 switch (TREE_CODE (t
))
15228 return tree_int_cst_sgn (t
) >= 0;
15231 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15234 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15237 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15239 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15240 strict_overflow_p
));
15242 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15245 /* We don't know sign of `t', so be conservative and return false. */
15249 /* Return true if T is known to be non-negative. If the return
15250 value is based on the assumption that signed overflow is undefined,
15251 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15252 *STRICT_OVERFLOW_P. */
15255 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15256 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15258 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15259 switch (DECL_FUNCTION_CODE (fndecl
))
15261 CASE_FLT_FN (BUILT_IN_ACOS
):
15262 CASE_FLT_FN (BUILT_IN_ACOSH
):
15263 CASE_FLT_FN (BUILT_IN_CABS
):
15264 CASE_FLT_FN (BUILT_IN_COSH
):
15265 CASE_FLT_FN (BUILT_IN_ERFC
):
15266 CASE_FLT_FN (BUILT_IN_EXP
):
15267 CASE_FLT_FN (BUILT_IN_EXP10
):
15268 CASE_FLT_FN (BUILT_IN_EXP2
):
15269 CASE_FLT_FN (BUILT_IN_FABS
):
15270 CASE_FLT_FN (BUILT_IN_FDIM
):
15271 CASE_FLT_FN (BUILT_IN_HYPOT
):
15272 CASE_FLT_FN (BUILT_IN_POW10
):
15273 CASE_INT_FN (BUILT_IN_FFS
):
15274 CASE_INT_FN (BUILT_IN_PARITY
):
15275 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15276 case BUILT_IN_BSWAP32
:
15277 case BUILT_IN_BSWAP64
:
15281 CASE_FLT_FN (BUILT_IN_SQRT
):
15282 /* sqrt(-0.0) is -0.0. */
15283 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15285 return tree_expr_nonnegative_warnv_p (arg0
,
15286 strict_overflow_p
);
15288 CASE_FLT_FN (BUILT_IN_ASINH
):
15289 CASE_FLT_FN (BUILT_IN_ATAN
):
15290 CASE_FLT_FN (BUILT_IN_ATANH
):
15291 CASE_FLT_FN (BUILT_IN_CBRT
):
15292 CASE_FLT_FN (BUILT_IN_CEIL
):
15293 CASE_FLT_FN (BUILT_IN_ERF
):
15294 CASE_FLT_FN (BUILT_IN_EXPM1
):
15295 CASE_FLT_FN (BUILT_IN_FLOOR
):
15296 CASE_FLT_FN (BUILT_IN_FMOD
):
15297 CASE_FLT_FN (BUILT_IN_FREXP
):
15298 CASE_FLT_FN (BUILT_IN_ICEIL
):
15299 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15300 CASE_FLT_FN (BUILT_IN_IRINT
):
15301 CASE_FLT_FN (BUILT_IN_IROUND
):
15302 CASE_FLT_FN (BUILT_IN_LCEIL
):
15303 CASE_FLT_FN (BUILT_IN_LDEXP
):
15304 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15305 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15306 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15307 CASE_FLT_FN (BUILT_IN_LLRINT
):
15308 CASE_FLT_FN (BUILT_IN_LLROUND
):
15309 CASE_FLT_FN (BUILT_IN_LRINT
):
15310 CASE_FLT_FN (BUILT_IN_LROUND
):
15311 CASE_FLT_FN (BUILT_IN_MODF
):
15312 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15313 CASE_FLT_FN (BUILT_IN_RINT
):
15314 CASE_FLT_FN (BUILT_IN_ROUND
):
15315 CASE_FLT_FN (BUILT_IN_SCALB
):
15316 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15317 CASE_FLT_FN (BUILT_IN_SCALBN
):
15318 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15319 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15320 CASE_FLT_FN (BUILT_IN_SINH
):
15321 CASE_FLT_FN (BUILT_IN_TANH
):
15322 CASE_FLT_FN (BUILT_IN_TRUNC
):
15323 /* True if the 1st argument is nonnegative. */
15324 return tree_expr_nonnegative_warnv_p (arg0
,
15325 strict_overflow_p
);
15327 CASE_FLT_FN (BUILT_IN_FMAX
):
15328 /* True if the 1st OR 2nd arguments are nonnegative. */
15329 return (tree_expr_nonnegative_warnv_p (arg0
,
15331 || (tree_expr_nonnegative_warnv_p (arg1
,
15332 strict_overflow_p
)));
15334 CASE_FLT_FN (BUILT_IN_FMIN
):
15335 /* True if the 1st AND 2nd arguments are nonnegative. */
15336 return (tree_expr_nonnegative_warnv_p (arg0
,
15338 && (tree_expr_nonnegative_warnv_p (arg1
,
15339 strict_overflow_p
)));
15341 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15342 /* True if the 2nd argument is nonnegative. */
15343 return tree_expr_nonnegative_warnv_p (arg1
,
15344 strict_overflow_p
);
15346 CASE_FLT_FN (BUILT_IN_POWI
):
15347 /* True if the 1st argument is nonnegative or the second
15348 argument is an even integer. */
15349 if (TREE_CODE (arg1
) == INTEGER_CST
15350 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15352 return tree_expr_nonnegative_warnv_p (arg0
,
15353 strict_overflow_p
);
15355 CASE_FLT_FN (BUILT_IN_POW
):
15356 /* True if the 1st argument is nonnegative or the second
15357 argument is an even integer valued real. */
15358 if (TREE_CODE (arg1
) == REAL_CST
)
15363 c
= TREE_REAL_CST (arg1
);
15364 n
= real_to_integer (&c
);
15367 REAL_VALUE_TYPE cint
;
15368 real_from_integer (&cint
, VOIDmode
, n
,
15369 n
< 0 ? -1 : 0, 0);
15370 if (real_identical (&c
, &cint
))
15374 return tree_expr_nonnegative_warnv_p (arg0
,
15375 strict_overflow_p
);
15380 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15384 /* Return true if T is known to be non-negative. If the return
15385 value is based on the assumption that signed overflow is undefined,
15386 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15387 *STRICT_OVERFLOW_P. */
15390 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15392 enum tree_code code
= TREE_CODE (t
);
15393 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15400 tree temp
= TARGET_EXPR_SLOT (t
);
15401 t
= TARGET_EXPR_INITIAL (t
);
15403 /* If the initializer is non-void, then it's a normal expression
15404 that will be assigned to the slot. */
15405 if (!VOID_TYPE_P (t
))
15406 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15408 /* Otherwise, the initializer sets the slot in some way. One common
15409 way is an assignment statement at the end of the initializer. */
15412 if (TREE_CODE (t
) == BIND_EXPR
)
15413 t
= expr_last (BIND_EXPR_BODY (t
));
15414 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15415 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15416 t
= expr_last (TREE_OPERAND (t
, 0));
15417 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15422 if (TREE_CODE (t
) == MODIFY_EXPR
15423 && TREE_OPERAND (t
, 0) == temp
)
15424 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15425 strict_overflow_p
);
15432 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15433 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15435 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15436 get_callee_fndecl (t
),
15439 strict_overflow_p
);
15441 case COMPOUND_EXPR
:
15443 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15444 strict_overflow_p
);
15446 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15447 strict_overflow_p
);
15449 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15450 strict_overflow_p
);
15453 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15457 /* We don't know sign of `t', so be conservative and return false. */
15461 /* Return true if T is known to be non-negative. If the return
15462 value is based on the assumption that signed overflow is undefined,
15463 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15464 *STRICT_OVERFLOW_P. */
15467 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15469 enum tree_code code
;
15470 if (t
== error_mark_node
)
15473 code
= TREE_CODE (t
);
15474 switch (TREE_CODE_CLASS (code
))
15477 case tcc_comparison
:
15478 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15480 TREE_OPERAND (t
, 0),
15481 TREE_OPERAND (t
, 1),
15482 strict_overflow_p
);
15485 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15487 TREE_OPERAND (t
, 0),
15488 strict_overflow_p
);
15491 case tcc_declaration
:
15492 case tcc_reference
:
15493 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15501 case TRUTH_AND_EXPR
:
15502 case TRUTH_OR_EXPR
:
15503 case TRUTH_XOR_EXPR
:
15504 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15506 TREE_OPERAND (t
, 0),
15507 TREE_OPERAND (t
, 1),
15508 strict_overflow_p
);
15509 case TRUTH_NOT_EXPR
:
15510 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15512 TREE_OPERAND (t
, 0),
15513 strict_overflow_p
);
15520 case WITH_SIZE_EXPR
:
15522 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15525 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15529 /* Return true if `t' is known to be non-negative. Handle warnings
15530 about undefined signed overflow. */
15533 tree_expr_nonnegative_p (tree t
)
15535 bool ret
, strict_overflow_p
;
15537 strict_overflow_p
= false;
15538 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15539 if (strict_overflow_p
)
15540 fold_overflow_warning (("assuming signed overflow does not occur when "
15541 "determining that expression is always "
15543 WARN_STRICT_OVERFLOW_MISC
);
15548 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15549 For floating point we further ensure that T is not denormal.
15550 Similar logic is present in nonzero_address in rtlanal.h.
15552 If the return value is based on the assumption that signed overflow
15553 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15554 change *STRICT_OVERFLOW_P. */
15557 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15558 bool *strict_overflow_p
)
15563 return tree_expr_nonzero_warnv_p (op0
,
15564 strict_overflow_p
);
15568 tree inner_type
= TREE_TYPE (op0
);
15569 tree outer_type
= type
;
15571 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15572 && tree_expr_nonzero_warnv_p (op0
,
15573 strict_overflow_p
));
15577 case NON_LVALUE_EXPR
:
15578 return tree_expr_nonzero_warnv_p (op0
,
15579 strict_overflow_p
);
15588 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15589 For floating point we further ensure that T is not denormal.
15590 Similar logic is present in nonzero_address in rtlanal.h.
15592 If the return value is based on the assumption that signed overflow
15593 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15594 change *STRICT_OVERFLOW_P. */
15597 tree_binary_nonzero_warnv_p (enum tree_code code
,
15600 tree op1
, bool *strict_overflow_p
)
15602 bool sub_strict_overflow_p
;
15605 case POINTER_PLUS_EXPR
:
15607 if (TYPE_OVERFLOW_UNDEFINED (type
))
15609 /* With the presence of negative values it is hard
15610 to say something. */
15611 sub_strict_overflow_p
= false;
15612 if (!tree_expr_nonnegative_warnv_p (op0
,
15613 &sub_strict_overflow_p
)
15614 || !tree_expr_nonnegative_warnv_p (op1
,
15615 &sub_strict_overflow_p
))
15617 /* One of operands must be positive and the other non-negative. */
15618 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15619 overflows, on a twos-complement machine the sum of two
15620 nonnegative numbers can never be zero. */
15621 return (tree_expr_nonzero_warnv_p (op0
,
15623 || tree_expr_nonzero_warnv_p (op1
,
15624 strict_overflow_p
));
15629 if (TYPE_OVERFLOW_UNDEFINED (type
))
15631 if (tree_expr_nonzero_warnv_p (op0
,
15633 && tree_expr_nonzero_warnv_p (op1
,
15634 strict_overflow_p
))
15636 *strict_overflow_p
= true;
15643 sub_strict_overflow_p
= false;
15644 if (tree_expr_nonzero_warnv_p (op0
,
15645 &sub_strict_overflow_p
)
15646 && tree_expr_nonzero_warnv_p (op1
,
15647 &sub_strict_overflow_p
))
15649 if (sub_strict_overflow_p
)
15650 *strict_overflow_p
= true;
15655 sub_strict_overflow_p
= false;
15656 if (tree_expr_nonzero_warnv_p (op0
,
15657 &sub_strict_overflow_p
))
15659 if (sub_strict_overflow_p
)
15660 *strict_overflow_p
= true;
15662 /* When both operands are nonzero, then MAX must be too. */
15663 if (tree_expr_nonzero_warnv_p (op1
,
15664 strict_overflow_p
))
15667 /* MAX where operand 0 is positive is positive. */
15668 return tree_expr_nonnegative_warnv_p (op0
,
15669 strict_overflow_p
);
15671 /* MAX where operand 1 is positive is positive. */
15672 else if (tree_expr_nonzero_warnv_p (op1
,
15673 &sub_strict_overflow_p
)
15674 && tree_expr_nonnegative_warnv_p (op1
,
15675 &sub_strict_overflow_p
))
15677 if (sub_strict_overflow_p
)
15678 *strict_overflow_p
= true;
15684 return (tree_expr_nonzero_warnv_p (op1
,
15686 || tree_expr_nonzero_warnv_p (op0
,
15687 strict_overflow_p
));
15696 /* Return true when T is an address and is known to be nonzero.
15697 For floating point we further ensure that T is not denormal.
15698 Similar logic is present in nonzero_address in rtlanal.h.
15700 If the return value is based on the assumption that signed overflow
15701 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15702 change *STRICT_OVERFLOW_P. */
15705 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15707 bool sub_strict_overflow_p
;
15708 switch (TREE_CODE (t
))
15711 return !integer_zerop (t
);
15715 tree base
= TREE_OPERAND (t
, 0);
15716 if (!DECL_P (base
))
15717 base
= get_base_address (base
);
15722 /* Weak declarations may link to NULL. Other things may also be NULL
15723 so protect with -fdelete-null-pointer-checks; but not variables
15724 allocated on the stack. */
15726 && (flag_delete_null_pointer_checks
15727 || (DECL_CONTEXT (base
)
15728 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15729 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15730 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15732 /* Constants are never weak. */
15733 if (CONSTANT_CLASS_P (base
))
15740 sub_strict_overflow_p
= false;
15741 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15742 &sub_strict_overflow_p
)
15743 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15744 &sub_strict_overflow_p
))
15746 if (sub_strict_overflow_p
)
15747 *strict_overflow_p
= true;
15758 /* Return true when T is an address and is known to be nonzero.
15759 For floating point we further ensure that T is not denormal.
15760 Similar logic is present in nonzero_address in rtlanal.h.
15762 If the return value is based on the assumption that signed overflow
15763 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15764 change *STRICT_OVERFLOW_P. */
15767 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15769 tree type
= TREE_TYPE (t
);
15770 enum tree_code code
;
15772 /* Doing something useful for floating point would need more work. */
15773 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15776 code
= TREE_CODE (t
);
15777 switch (TREE_CODE_CLASS (code
))
15780 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15781 strict_overflow_p
);
15783 case tcc_comparison
:
15784 return tree_binary_nonzero_warnv_p (code
, type
,
15785 TREE_OPERAND (t
, 0),
15786 TREE_OPERAND (t
, 1),
15787 strict_overflow_p
);
15789 case tcc_declaration
:
15790 case tcc_reference
:
15791 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15799 case TRUTH_NOT_EXPR
:
15800 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15801 strict_overflow_p
);
15803 case TRUTH_AND_EXPR
:
15804 case TRUTH_OR_EXPR
:
15805 case TRUTH_XOR_EXPR
:
15806 return tree_binary_nonzero_warnv_p (code
, type
,
15807 TREE_OPERAND (t
, 0),
15808 TREE_OPERAND (t
, 1),
15809 strict_overflow_p
);
15816 case WITH_SIZE_EXPR
:
15818 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15820 case COMPOUND_EXPR
:
15823 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15824 strict_overflow_p
);
15827 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15828 strict_overflow_p
);
15831 return alloca_call_p (t
);
15839 /* Return true when T is an address and is known to be nonzero.
15840 Handle warnings about undefined signed overflow. */
15843 tree_expr_nonzero_p (tree t
)
15845 bool ret
, strict_overflow_p
;
15847 strict_overflow_p
= false;
15848 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15849 if (strict_overflow_p
)
15850 fold_overflow_warning (("assuming signed overflow does not occur when "
15851 "determining that expression is always "
15853 WARN_STRICT_OVERFLOW_MISC
);
15857 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15858 attempt to fold the expression to a constant without modifying TYPE,
15861 If the expression could be simplified to a constant, then return
15862 the constant. If the expression would not be simplified to a
15863 constant, then return NULL_TREE. */
15866 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15868 tree tem
= fold_binary (code
, type
, op0
, op1
);
15869 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15872 /* Given the components of a unary expression CODE, TYPE and OP0,
15873 attempt to fold the expression to a constant without modifying
15876 If the expression could be simplified to a constant, then return
15877 the constant. If the expression would not be simplified to a
15878 constant, then return NULL_TREE. */
15881 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15883 tree tem
= fold_unary (code
, type
, op0
);
15884 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15887 /* If EXP represents referencing an element in a constant string
15888 (either via pointer arithmetic or array indexing), return the
15889 tree representing the value accessed, otherwise return NULL. */
15892 fold_read_from_constant_string (tree exp
)
15894 if ((TREE_CODE (exp
) == INDIRECT_REF
15895 || TREE_CODE (exp
) == ARRAY_REF
)
15896 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15898 tree exp1
= TREE_OPERAND (exp
, 0);
15901 location_t loc
= EXPR_LOCATION (exp
);
15903 if (TREE_CODE (exp
) == INDIRECT_REF
)
15904 string
= string_constant (exp1
, &index
);
15907 tree low_bound
= array_ref_low_bound (exp
);
15908 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15910 /* Optimize the special-case of a zero lower bound.
15912 We convert the low_bound to sizetype to avoid some problems
15913 with constant folding. (E.g. suppose the lower bound is 1,
15914 and its mode is QI. Without the conversion,l (ARRAY
15915 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15916 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15917 if (! integer_zerop (low_bound
))
15918 index
= size_diffop_loc (loc
, index
,
15919 fold_convert_loc (loc
, sizetype
, low_bound
));
15925 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15926 && TREE_CODE (string
) == STRING_CST
15927 && TREE_CODE (index
) == INTEGER_CST
15928 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15929 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15931 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15932 return build_int_cst_type (TREE_TYPE (exp
),
15933 (TREE_STRING_POINTER (string
)
15934 [TREE_INT_CST_LOW (index
)]));
15939 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15940 an integer constant, real, or fixed-point constant.
15942 TYPE is the type of the result. */
15945 fold_negate_const (tree arg0
, tree type
)
15947 tree t
= NULL_TREE
;
15949 switch (TREE_CODE (arg0
))
15953 double_int val
= tree_to_double_int (arg0
);
15954 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15956 t
= force_fit_type_double (type
, val
, 1,
15957 (overflow
| TREE_OVERFLOW (arg0
))
15958 && !TYPE_UNSIGNED (type
));
15963 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15968 FIXED_VALUE_TYPE f
;
15969 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15970 &(TREE_FIXED_CST (arg0
)), NULL
,
15971 TYPE_SATURATING (type
));
15972 t
= build_fixed (type
, f
);
15973 /* Propagate overflow flags. */
15974 if (overflow_p
| TREE_OVERFLOW (arg0
))
15975 TREE_OVERFLOW (t
) = 1;
15980 gcc_unreachable ();
15986 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15987 an integer constant or real constant.
15989 TYPE is the type of the result. */
15992 fold_abs_const (tree arg0
, tree type
)
15994 tree t
= NULL_TREE
;
15996 switch (TREE_CODE (arg0
))
16000 double_int val
= tree_to_double_int (arg0
);
16002 /* If the value is unsigned or non-negative, then the absolute value
16003 is the same as the ordinary value. */
16004 if (TYPE_UNSIGNED (type
)
16005 || !double_int_negative_p (val
))
16008 /* If the value is negative, then the absolute value is
16014 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
16015 t
= force_fit_type_double (type
, val
, -1,
16016 overflow
| TREE_OVERFLOW (arg0
));
16022 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16023 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16029 gcc_unreachable ();
16035 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16036 constant. TYPE is the type of the result. */
16039 fold_not_const (const_tree arg0
, tree type
)
16043 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16045 val
= double_int_not (tree_to_double_int (arg0
));
16046 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16049 /* Given CODE, a relational operator, the target type, TYPE and two
16050 constant operands OP0 and OP1, return the result of the
16051 relational operation. If the result is not a compile time
16052 constant, then return NULL_TREE. */
16055 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16057 int result
, invert
;
16059 /* From here on, the only cases we handle are when the result is
16060 known to be a constant. */
16062 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16064 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16065 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16067 /* Handle the cases where either operand is a NaN. */
16068 if (real_isnan (c0
) || real_isnan (c1
))
16078 case UNORDERED_EXPR
:
16092 if (flag_trapping_math
)
16098 gcc_unreachable ();
16101 return constant_boolean_node (result
, type
);
16104 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16107 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16109 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16110 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16111 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16114 /* Handle equality/inequality of complex constants. */
16115 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16117 tree rcond
= fold_relational_const (code
, type
,
16118 TREE_REALPART (op0
),
16119 TREE_REALPART (op1
));
16120 tree icond
= fold_relational_const (code
, type
,
16121 TREE_IMAGPART (op0
),
16122 TREE_IMAGPART (op1
));
16123 if (code
== EQ_EXPR
)
16124 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16125 else if (code
== NE_EXPR
)
16126 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16131 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16133 To compute GT, swap the arguments and do LT.
16134 To compute GE, do LT and invert the result.
16135 To compute LE, swap the arguments, do LT and invert the result.
16136 To compute NE, do EQ and invert the result.
16138 Therefore, the code below must handle only EQ and LT. */
16140 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16145 code
= swap_tree_comparison (code
);
16148 /* Note that it is safe to invert for real values here because we
16149 have already handled the one case that it matters. */
16152 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16155 code
= invert_tree_comparison (code
, false);
16158 /* Compute a result for LT or EQ if args permit;
16159 Otherwise return T. */
16160 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16162 if (code
== EQ_EXPR
)
16163 result
= tree_int_cst_equal (op0
, op1
);
16164 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16165 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16167 result
= INT_CST_LT (op0
, op1
);
16174 return constant_boolean_node (result
, type
);
16177 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16178 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16182 fold_build_cleanup_point_expr (tree type
, tree expr
)
16184 /* If the expression does not have side effects then we don't have to wrap
16185 it with a cleanup point expression. */
16186 if (!TREE_SIDE_EFFECTS (expr
))
16189 /* If the expression is a return, check to see if the expression inside the
16190 return has no side effects or the right hand side of the modify expression
16191 inside the return. If either don't have side effects set we don't need to
16192 wrap the expression in a cleanup point expression. Note we don't check the
16193 left hand side of the modify because it should always be a return decl. */
16194 if (TREE_CODE (expr
) == RETURN_EXPR
)
16196 tree op
= TREE_OPERAND (expr
, 0);
16197 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16199 op
= TREE_OPERAND (op
, 1);
16200 if (!TREE_SIDE_EFFECTS (op
))
16204 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16207 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16208 of an indirection through OP0, or NULL_TREE if no simplification is
16212 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16218 subtype
= TREE_TYPE (sub
);
16219 if (!POINTER_TYPE_P (subtype
))
16222 if (TREE_CODE (sub
) == ADDR_EXPR
)
16224 tree op
= TREE_OPERAND (sub
, 0);
16225 tree optype
= TREE_TYPE (op
);
16226 /* *&CONST_DECL -> to the value of the const decl. */
16227 if (TREE_CODE (op
) == CONST_DECL
)
16228 return DECL_INITIAL (op
);
16229 /* *&p => p; make sure to handle *&"str"[cst] here. */
16230 if (type
== optype
)
16232 tree fop
= fold_read_from_constant_string (op
);
16238 /* *(foo *)&fooarray => fooarray[0] */
16239 else if (TREE_CODE (optype
) == ARRAY_TYPE
16240 && type
== TREE_TYPE (optype
)
16241 && (!in_gimple_form
16242 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16244 tree type_domain
= TYPE_DOMAIN (optype
);
16245 tree min_val
= size_zero_node
;
16246 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16247 min_val
= TYPE_MIN_VALUE (type_domain
);
16249 && TREE_CODE (min_val
) != INTEGER_CST
)
16251 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16252 NULL_TREE
, NULL_TREE
);
16254 /* *(foo *)&complexfoo => __real__ complexfoo */
16255 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16256 && type
== TREE_TYPE (optype
))
16257 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16258 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16259 else if (TREE_CODE (optype
) == VECTOR_TYPE
16260 && type
== TREE_TYPE (optype
))
16262 tree part_width
= TYPE_SIZE (type
);
16263 tree index
= bitsize_int (0);
16264 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16268 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16269 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16271 tree op00
= TREE_OPERAND (sub
, 0);
16272 tree op01
= TREE_OPERAND (sub
, 1);
16275 if (TREE_CODE (op00
) == ADDR_EXPR
)
16278 op00
= TREE_OPERAND (op00
, 0);
16279 op00type
= TREE_TYPE (op00
);
16281 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16282 if (TREE_CODE (op00type
) == VECTOR_TYPE
16283 && type
== TREE_TYPE (op00type
))
16285 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16286 tree part_width
= TYPE_SIZE (type
);
16287 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16288 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16289 tree index
= bitsize_int (indexi
);
16291 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16292 return fold_build3_loc (loc
,
16293 BIT_FIELD_REF
, type
, op00
,
16294 part_width
, index
);
16297 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16298 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16299 && type
== TREE_TYPE (op00type
))
16301 tree size
= TYPE_SIZE_UNIT (type
);
16302 if (tree_int_cst_equal (size
, op01
))
16303 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16305 /* ((foo *)&fooarray)[1] => fooarray[1] */
16306 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16307 && type
== TREE_TYPE (op00type
))
16309 tree type_domain
= TYPE_DOMAIN (op00type
);
16310 tree min_val
= size_zero_node
;
16311 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16312 min_val
= TYPE_MIN_VALUE (type_domain
);
16313 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16314 TYPE_SIZE_UNIT (type
));
16315 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16316 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16317 NULL_TREE
, NULL_TREE
);
16322 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16323 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16324 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16325 && (!in_gimple_form
16326 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16329 tree min_val
= size_zero_node
;
16330 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16331 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16332 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16333 min_val
= TYPE_MIN_VALUE (type_domain
);
16335 && TREE_CODE (min_val
) != INTEGER_CST
)
16337 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16344 /* Builds an expression for an indirection through T, simplifying some
16348 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16350 tree type
= TREE_TYPE (TREE_TYPE (t
));
16351 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16356 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16359 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16362 fold_indirect_ref_loc (location_t loc
, tree t
)
16364 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16372 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16373 whose result is ignored. The type of the returned tree need not be
16374 the same as the original expression. */
16377 fold_ignored_result (tree t
)
16379 if (!TREE_SIDE_EFFECTS (t
))
16380 return integer_zero_node
;
16383 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16386 t
= TREE_OPERAND (t
, 0);
16390 case tcc_comparison
:
16391 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16392 t
= TREE_OPERAND (t
, 0);
16393 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16394 t
= TREE_OPERAND (t
, 1);
16399 case tcc_expression
:
16400 switch (TREE_CODE (t
))
16402 case COMPOUND_EXPR
:
16403 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16405 t
= TREE_OPERAND (t
, 0);
16409 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16410 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16412 t
= TREE_OPERAND (t
, 0);
16425 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16426 This can only be applied to objects of a sizetype. */
16429 round_up_loc (location_t loc
, tree value
, int divisor
)
16431 tree div
= NULL_TREE
;
16433 gcc_assert (divisor
> 0);
16437 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16438 have to do anything. Only do this when we are not given a const,
16439 because in that case, this check is more expensive than just
16441 if (TREE_CODE (value
) != INTEGER_CST
)
16443 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16445 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16449 /* If divisor is a power of two, simplify this to bit manipulation. */
16450 if (divisor
== (divisor
& -divisor
))
16452 if (TREE_CODE (value
) == INTEGER_CST
)
16454 double_int val
= tree_to_double_int (value
);
16457 if ((val
.low
& (divisor
- 1)) == 0)
16460 overflow_p
= TREE_OVERFLOW (value
);
16461 val
.low
&= ~(divisor
- 1);
16462 val
.low
+= divisor
;
16470 return force_fit_type_double (TREE_TYPE (value
), val
,
16477 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16478 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16479 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16480 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16486 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16487 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16488 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16494 /* Likewise, but round down. */
16497 round_down_loc (location_t loc
, tree value
, int divisor
)
16499 tree div
= NULL_TREE
;
16501 gcc_assert (divisor
> 0);
16505 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16506 have to do anything. Only do this when we are not given a const,
16507 because in that case, this check is more expensive than just
16509 if (TREE_CODE (value
) != INTEGER_CST
)
16511 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16513 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16517 /* If divisor is a power of two, simplify this to bit manipulation. */
16518 if (divisor
== (divisor
& -divisor
))
16522 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16523 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16528 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16529 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16530 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16536 /* Returns the pointer to the base of the object addressed by EXP and
16537 extracts the information about the offset of the access, storing it
16538 to PBITPOS and POFFSET. */
16541 split_address_to_core_and_offset (tree exp
,
16542 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16545 enum machine_mode mode
;
16546 int unsignedp
, volatilep
;
16547 HOST_WIDE_INT bitsize
;
16548 location_t loc
= EXPR_LOCATION (exp
);
16550 if (TREE_CODE (exp
) == ADDR_EXPR
)
16552 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16553 poffset
, &mode
, &unsignedp
, &volatilep
,
16555 core
= build_fold_addr_expr_loc (loc
, core
);
16561 *poffset
= NULL_TREE
;
16567 /* Returns true if addresses of E1 and E2 differ by a constant, false
16568 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16571 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16574 HOST_WIDE_INT bitpos1
, bitpos2
;
16575 tree toffset1
, toffset2
, tdiff
, type
;
16577 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16578 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16580 if (bitpos1
% BITS_PER_UNIT
!= 0
16581 || bitpos2
% BITS_PER_UNIT
!= 0
16582 || !operand_equal_p (core1
, core2
, 0))
16585 if (toffset1
&& toffset2
)
16587 type
= TREE_TYPE (toffset1
);
16588 if (type
!= TREE_TYPE (toffset2
))
16589 toffset2
= fold_convert (type
, toffset2
);
16591 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16592 if (!cst_and_fits_in_hwi (tdiff
))
16595 *diff
= int_cst_value (tdiff
);
16597 else if (toffset1
|| toffset2
)
16599 /* If only one of the offsets is non-constant, the difference cannot
16606 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16610 /* Simplify the floating point expression EXP when the sign of the
16611 result is not significant. Return NULL_TREE if no simplification
16615 fold_strip_sign_ops (tree exp
)
16618 location_t loc
= EXPR_LOCATION (exp
);
16620 switch (TREE_CODE (exp
))
16624 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16625 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16629 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16631 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16632 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16633 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16634 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16635 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16636 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16639 case COMPOUND_EXPR
:
16640 arg0
= TREE_OPERAND (exp
, 0);
16641 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16643 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16647 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16648 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16650 return fold_build3_loc (loc
,
16651 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16652 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16653 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16658 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16661 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16662 /* Strip copysign function call, return the 1st argument. */
16663 arg0
= CALL_EXPR_ARG (exp
, 0);
16664 arg1
= CALL_EXPR_ARG (exp
, 1);
16665 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16668 /* Strip sign ops from the argument of "odd" math functions. */
16669 if (negate_mathfn_p (fcode
))
16671 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16673 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);