1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static bool simple_operand_p_2 (tree
);
116 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
117 static tree
range_predecessor (tree
);
118 static tree
range_successor (tree
);
119 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
120 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
121 static tree
unextend (tree
, int, int, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
!= UNKNOWN_LOCATION
? tloc
: loc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
195 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
199 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
204 /* This is nonzero if we should defer warnings about undefined
205 overflow. This facility exists because these warnings are a
206 special case. The code to estimate loop iterations does not want
207 to issue any warnings, since it works with expressions which do not
208 occur in user code. Various bits of cleanup code call fold(), but
209 only use the result if it has certain characteristics (e.g., is a
210 constant); that code only wants to issue a warning if the result is
213 static int fold_deferring_overflow_warnings
;
215 /* If a warning about undefined overflow is deferred, this is the
216 warning. Note that this may cause us to turn two warnings into
217 one, but that is fine since it is sufficient to only give one
218 warning per expression. */
220 static const char* fold_deferred_overflow_warning
;
222 /* If a warning about undefined overflow is deferred, this is the
223 level at which the warning should be emitted. */
225 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
227 /* Start deferring overflow warnings. We could use a stack here to
228 permit nested calls, but at present it is not necessary. */
231 fold_defer_overflow_warnings (void)
233 ++fold_deferring_overflow_warnings
;
236 /* Stop deferring overflow warnings. If there is a pending warning,
237 and ISSUE is true, then issue the warning if appropriate. STMT is
238 the statement with which the warning should be associated (used for
239 location information); STMT may be NULL. CODE is the level of the
240 warning--a warn_strict_overflow_code value. This function will use
241 the smaller of CODE and the deferred code when deciding whether to
242 issue the warning. CODE may be zero to mean to always use the
246 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
251 gcc_assert (fold_deferring_overflow_warnings
> 0);
252 --fold_deferring_overflow_warnings
;
253 if (fold_deferring_overflow_warnings
> 0)
255 if (fold_deferred_overflow_warning
!= NULL
257 && code
< (int) fold_deferred_overflow_code
)
258 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
262 warnmsg
= fold_deferred_overflow_warning
;
263 fold_deferred_overflow_warning
= NULL
;
265 if (!issue
|| warnmsg
== NULL
)
268 if (gimple_no_warning_p (stmt
))
271 /* Use the smallest code level when deciding to issue the
273 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
274 code
= fold_deferred_overflow_code
;
276 if (!issue_strict_overflow_warning (code
))
280 locus
= input_location
;
282 locus
= gimple_location (stmt
);
283 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
286 /* Stop deferring overflow warnings, ignoring any deferred
290 fold_undefer_and_ignore_overflow_warnings (void)
292 fold_undefer_overflow_warnings (false, NULL
, 0);
295 /* Whether we are deferring overflow warnings. */
298 fold_deferring_overflow_warnings_p (void)
300 return fold_deferring_overflow_warnings
> 0;
303 /* This is called when we fold something based on the fact that signed
304 overflow is undefined. */
307 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
309 if (fold_deferring_overflow_warnings
> 0)
311 if (fold_deferred_overflow_warning
== NULL
312 || wc
< fold_deferred_overflow_code
)
314 fold_deferred_overflow_warning
= gmsgid
;
315 fold_deferred_overflow_code
= wc
;
318 else if (issue_strict_overflow_warning (wc
))
319 warning (OPT_Wstrict_overflow
, gmsgid
);
322 /* Return true if the built-in mathematical function specified by CODE
323 is odd, i.e. -f(x) == f(-x). */
326 negate_mathfn_p (enum built_in_function code
)
330 CASE_FLT_FN (BUILT_IN_ASIN
):
331 CASE_FLT_FN (BUILT_IN_ASINH
):
332 CASE_FLT_FN (BUILT_IN_ATAN
):
333 CASE_FLT_FN (BUILT_IN_ATANH
):
334 CASE_FLT_FN (BUILT_IN_CASIN
):
335 CASE_FLT_FN (BUILT_IN_CASINH
):
336 CASE_FLT_FN (BUILT_IN_CATAN
):
337 CASE_FLT_FN (BUILT_IN_CATANH
):
338 CASE_FLT_FN (BUILT_IN_CBRT
):
339 CASE_FLT_FN (BUILT_IN_CPROJ
):
340 CASE_FLT_FN (BUILT_IN_CSIN
):
341 CASE_FLT_FN (BUILT_IN_CSINH
):
342 CASE_FLT_FN (BUILT_IN_CTAN
):
343 CASE_FLT_FN (BUILT_IN_CTANH
):
344 CASE_FLT_FN (BUILT_IN_ERF
):
345 CASE_FLT_FN (BUILT_IN_LLROUND
):
346 CASE_FLT_FN (BUILT_IN_LROUND
):
347 CASE_FLT_FN (BUILT_IN_ROUND
):
348 CASE_FLT_FN (BUILT_IN_SIN
):
349 CASE_FLT_FN (BUILT_IN_SINH
):
350 CASE_FLT_FN (BUILT_IN_TAN
):
351 CASE_FLT_FN (BUILT_IN_TANH
):
352 CASE_FLT_FN (BUILT_IN_TRUNC
):
355 CASE_FLT_FN (BUILT_IN_LLRINT
):
356 CASE_FLT_FN (BUILT_IN_LRINT
):
357 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
358 CASE_FLT_FN (BUILT_IN_RINT
):
359 return !flag_rounding_math
;
367 /* Check whether we may negate an integer constant T without causing
371 may_negate_without_overflow_p (const_tree t
)
373 unsigned HOST_WIDE_INT val
;
377 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
379 type
= TREE_TYPE (t
);
380 if (TYPE_UNSIGNED (type
))
383 prec
= TYPE_PRECISION (type
);
384 if (prec
> HOST_BITS_PER_WIDE_INT
)
386 if (TREE_INT_CST_LOW (t
) != 0)
388 prec
-= HOST_BITS_PER_WIDE_INT
;
389 val
= TREE_INT_CST_HIGH (t
);
392 val
= TREE_INT_CST_LOW (t
);
393 if (prec
< HOST_BITS_PER_WIDE_INT
)
394 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
395 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
398 /* Determine whether an expression T can be cheaply negated using
399 the function negate_expr without introducing undefined overflow. */
402 negate_expr_p (tree t
)
409 type
= TREE_TYPE (t
);
412 switch (TREE_CODE (t
))
415 if (TYPE_OVERFLOW_WRAPS (type
))
418 /* Check that -CST will not overflow type. */
419 return may_negate_without_overflow_p (t
);
421 return (INTEGRAL_TYPE_P (type
)
422 && TYPE_OVERFLOW_WRAPS (type
));
429 /* We want to canonicalize to positive real constants. Pretend
430 that only negative ones can be easily negated. */
431 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
434 return negate_expr_p (TREE_REALPART (t
))
435 && negate_expr_p (TREE_IMAGPART (t
));
438 return negate_expr_p (TREE_OPERAND (t
, 0))
439 && negate_expr_p (TREE_OPERAND (t
, 1));
442 return negate_expr_p (TREE_OPERAND (t
, 0));
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
446 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t
, 1))
450 && reorder_operands_p (TREE_OPERAND (t
, 0),
451 TREE_OPERAND (t
, 1)))
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t
, 0));
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
459 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
460 && reorder_operands_p (TREE_OPERAND (t
, 0),
461 TREE_OPERAND (t
, 1));
464 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
471 return negate_expr_p (TREE_OPERAND (t
, 1))
472 || negate_expr_p (TREE_OPERAND (t
, 0));
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
486 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
488 return negate_expr_p (TREE_OPERAND (t
, 1))
489 || negate_expr_p (TREE_OPERAND (t
, 0));
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type
) == REAL_TYPE
)
495 tree tem
= strip_float_extensions (t
);
497 return negate_expr_p (tem
);
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t
)))
504 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
509 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
511 tree op1
= TREE_OPERAND (t
, 1);
512 if (TREE_INT_CST_HIGH (op1
) == 0
513 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
514 == TREE_INT_CST_LOW (op1
))
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
531 fold_negate_expr (location_t loc
, tree t
)
533 tree type
= TREE_TYPE (t
);
536 switch (TREE_CODE (t
))
538 /* Convert - (~A) to A + 1. */
540 if (INTEGRAL_TYPE_P (type
))
541 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
542 build_int_cst (type
, 1));
546 tem
= fold_negate_const (t
, type
);
547 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
548 || !TYPE_OVERFLOW_TRAPS (type
))
553 tem
= fold_negate_const (t
, type
);
554 /* Two's complement FP formats, such as c4x, may overflow. */
555 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
560 tem
= fold_negate_const (t
, type
);
565 tree rpart
= negate_expr (TREE_REALPART (t
));
566 tree ipart
= negate_expr (TREE_IMAGPART (t
));
568 if ((TREE_CODE (rpart
) == REAL_CST
569 && TREE_CODE (ipart
) == REAL_CST
)
570 || (TREE_CODE (rpart
) == INTEGER_CST
571 && TREE_CODE (ipart
) == INTEGER_CST
))
572 return build_complex (type
, rpart
, ipart
);
577 if (negate_expr_p (t
))
578 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
579 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
580 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
584 if (negate_expr_p (t
))
585 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
586 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
590 return TREE_OPERAND (t
, 0);
593 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
594 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
596 /* -(A + B) -> (-B) - A. */
597 if (negate_expr_p (TREE_OPERAND (t
, 1))
598 && reorder_operands_p (TREE_OPERAND (t
, 0),
599 TREE_OPERAND (t
, 1)))
601 tem
= negate_expr (TREE_OPERAND (t
, 1));
602 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
603 tem
, TREE_OPERAND (t
, 0));
606 /* -(A + B) -> (-A) - B. */
607 if (negate_expr_p (TREE_OPERAND (t
, 0)))
609 tem
= negate_expr (TREE_OPERAND (t
, 0));
610 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
611 tem
, TREE_OPERAND (t
, 1));
617 /* - (A - B) -> B - A */
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
620 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
621 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
622 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
626 if (TYPE_UNSIGNED (type
))
632 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
634 tem
= TREE_OPERAND (t
, 1);
635 if (negate_expr_p (tem
))
636 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
637 TREE_OPERAND (t
, 0), negate_expr (tem
));
638 tem
= TREE_OPERAND (t
, 0);
639 if (negate_expr_p (tem
))
640 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
641 negate_expr (tem
), TREE_OPERAND (t
, 1));
650 /* In general we can't negate A / B, because if A is INT_MIN and
651 B is 1, we may turn this into INT_MIN / -1 which is undefined
652 and actually traps on some architectures. But if overflow is
653 undefined, we can negate, because - (INT_MIN / 1) is an
655 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
657 const char * const warnmsg
= G_("assuming signed overflow does not "
658 "occur when negating a division");
659 tem
= TREE_OPERAND (t
, 1);
660 if (negate_expr_p (tem
))
662 if (INTEGRAL_TYPE_P (type
)
663 && (TREE_CODE (tem
) != INTEGER_CST
664 || integer_onep (tem
)))
665 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
666 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
667 TREE_OPERAND (t
, 0), negate_expr (tem
));
669 tem
= TREE_OPERAND (t
, 0);
670 if (negate_expr_p (tem
))
672 if (INTEGRAL_TYPE_P (type
)
673 && (TREE_CODE (tem
) != INTEGER_CST
674 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
675 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
676 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
677 negate_expr (tem
), TREE_OPERAND (t
, 1));
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type
) == REAL_TYPE
)
686 tem
= strip_float_extensions (t
);
687 if (tem
!= t
&& negate_expr_p (tem
))
688 return fold_convert_loc (loc
, type
, negate_expr (tem
));
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (builtin_mathfn_code (t
))
695 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
699 fndecl
= get_callee_fndecl (t
);
700 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
701 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
707 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
709 tree op1
= TREE_OPERAND (t
, 1);
710 if (TREE_INT_CST_HIGH (op1
) == 0
711 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
712 == TREE_INT_CST_LOW (op1
))
714 tree ntype
= TYPE_UNSIGNED (type
)
715 ? signed_type_for (type
)
716 : unsigned_type_for (type
);
717 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
718 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
719 return fold_convert_loc (loc
, type
, temp
);
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
744 loc
= EXPR_LOCATION (t
);
745 type
= TREE_TYPE (t
);
748 tem
= fold_negate_expr (loc
, t
);
750 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
751 return fold_convert_loc (loc
, type
, tem
);
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead.
769 If IN is itself a literal or constant, return it as appropriate.
771 Note that we do not guarantee that any of the three values will be the
772 same type as IN, but they will have the same signedness and mode. */
775 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
776 tree
*minus_litp
, int negate_p
)
784 /* Strip any conversions that don't change the machine mode or signedness. */
785 STRIP_SIGN_NOPS (in
);
787 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
788 || TREE_CODE (in
) == FIXED_CST
)
790 else if (TREE_CODE (in
) == code
791 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
792 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
793 /* We can associate addition and subtraction together (even
794 though the C standard doesn't say so) for integers because
795 the value is not affected. For reals, the value might be
796 affected, so we can't. */
797 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
798 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
800 tree op0
= TREE_OPERAND (in
, 0);
801 tree op1
= TREE_OPERAND (in
, 1);
802 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
803 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
805 /* First see if either of the operands is a literal, then a constant. */
806 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
807 || TREE_CODE (op0
) == FIXED_CST
)
808 *litp
= op0
, op0
= 0;
809 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
810 || TREE_CODE (op1
) == FIXED_CST
)
811 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
813 if (op0
!= 0 && TREE_CONSTANT (op0
))
814 *conp
= op0
, op0
= 0;
815 else if (op1
!= 0 && TREE_CONSTANT (op1
))
816 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
818 /* If we haven't dealt with either operand, this is not a case we can
819 decompose. Otherwise, VAR is either of the ones remaining, if any. */
820 if (op0
!= 0 && op1
!= 0)
825 var
= op1
, neg_var_p
= neg1_p
;
827 /* Now do any needed negations. */
829 *minus_litp
= *litp
, *litp
= 0;
831 *conp
= negate_expr (*conp
);
833 var
= negate_expr (var
);
835 else if (TREE_CONSTANT (in
))
843 *minus_litp
= *litp
, *litp
= 0;
844 else if (*minus_litp
)
845 *litp
= *minus_litp
, *minus_litp
= 0;
846 *conp
= negate_expr (*conp
);
847 var
= negate_expr (var
);
853 /* Re-associate trees split by the above function. T1 and T2 are
854 either expressions to associate or null. Return the new
855 expression, if any. LOC is the location of the new expression. If
856 we build an operation, do it in TYPE and with CODE. */
859 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
866 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
867 try to fold this since we will have infinite recursion. But do
868 deal with any NEGATE_EXPRs. */
869 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
870 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
872 if (code
== PLUS_EXPR
)
874 if (TREE_CODE (t1
) == NEGATE_EXPR
)
875 return build2_loc (loc
, MINUS_EXPR
, type
,
876 fold_convert_loc (loc
, type
, t2
),
877 fold_convert_loc (loc
, type
,
878 TREE_OPERAND (t1
, 0)));
879 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
880 return build2_loc (loc
, MINUS_EXPR
, type
,
881 fold_convert_loc (loc
, type
, t1
),
882 fold_convert_loc (loc
, type
,
883 TREE_OPERAND (t2
, 0)));
884 else if (integer_zerop (t2
))
885 return fold_convert_loc (loc
, type
, t1
);
887 else if (code
== MINUS_EXPR
)
889 if (integer_zerop (t2
))
890 return fold_convert_loc (loc
, type
, t1
);
893 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
894 fold_convert_loc (loc
, type
, t2
));
897 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
898 fold_convert_loc (loc
, type
, t2
));
901 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
902 for use in int_const_binop, size_binop and size_diffop. */
905 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
907 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
909 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
924 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
925 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
926 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
930 /* Combine two integer constants ARG1 and ARG2 under operation CODE
931 to produce a new constant. Return NULL_TREE if we don't know how
932 to evaluate CODE at compile-time. */
935 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
938 double_int op1
, op2
, res
, tmp
;
940 tree type
= TREE_TYPE (arg1
);
941 bool uns
= TYPE_UNSIGNED (type
);
942 bool overflow
= false;
944 op1
= tree_to_double_int (arg1
);
945 op2
= tree_to_double_int (arg2
);
962 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
966 /* It's unclear from the C standard whether shifts can overflow.
967 The following code ignores overflow; perhaps a C standard
968 interpretation ruling is needed. */
969 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
973 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
977 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
981 res
= op1
.add_with_sign (op2
, false, &overflow
);
985 /* FIXME(crowl) Remove this code if the replacment works.
986 neg_double (op2.low, op2.high, &res.low, &res.high);
987 add_double (op1.low, op1.high, res.low, res.high,
988 &res.low, &res.high);
989 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
991 res
= op1
.add_with_sign (-op2
, false, &overflow
);
995 res
= op1
.mul_with_sign (op2
, false, &overflow
);
998 case MULT_HIGHPART_EXPR
:
999 /* ??? Need quad precision, or an additional shift operand
1000 to the multiply primitive, to handle very large highparts. */
1001 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
1004 res
= tmp
.rshift (TYPE_PRECISION (type
), TYPE_PRECISION (type
), !uns
);
1007 case TRUNC_DIV_EXPR
:
1008 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1009 case EXACT_DIV_EXPR
:
1010 /* This is a shortcut for a common special case. */
1011 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1012 && !TREE_OVERFLOW (arg1
)
1013 && !TREE_OVERFLOW (arg2
)
1014 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1016 if (code
== CEIL_DIV_EXPR
)
1017 op1
.low
+= op2
.low
- 1;
1019 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1023 /* ... fall through ... */
1025 case ROUND_DIV_EXPR
:
1033 if (op1
== op2
&& !op1
.is_zero ())
1035 res
= double_int_one
;
1038 overflow
= div_and_round_double (code
, uns
,
1039 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1040 &res
.low
, &res
.high
,
1041 &tmp
.low
, &tmp
.high
);
1044 case TRUNC_MOD_EXPR
:
1045 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1046 /* This is a shortcut for a common special case. */
1047 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1048 && !TREE_OVERFLOW (arg1
)
1049 && !TREE_OVERFLOW (arg2
)
1050 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1052 if (code
== CEIL_MOD_EXPR
)
1053 op1
.low
+= op2
.low
- 1;
1054 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR
:
1063 overflow
= div_and_round_double (code
, uns
,
1064 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1065 &tmp
.low
, &tmp
.high
,
1066 &res
.low
, &res
.high
);
1070 res
= op1
.min (op2
, uns
);
1074 res
= op1
.max (op2
, uns
);
1081 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1083 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1089 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1091 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1094 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1095 constant. We assume ARG1 and ARG2 have the same data type, or at least
1096 are the same kind of constant and the same machine mode. Return zero if
1097 combining the constants is not allowed in the current operating mode. */
1100 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1102 /* Sanity check for the recursive cases. */
1109 if (TREE_CODE (arg1
) == INTEGER_CST
)
1110 return int_const_binop (code
, arg1
, arg2
);
1112 if (TREE_CODE (arg1
) == REAL_CST
)
1114 enum machine_mode mode
;
1117 REAL_VALUE_TYPE value
;
1118 REAL_VALUE_TYPE result
;
1122 /* The following codes are handled by real_arithmetic. */
1137 d1
= TREE_REAL_CST (arg1
);
1138 d2
= TREE_REAL_CST (arg2
);
1140 type
= TREE_TYPE (arg1
);
1141 mode
= TYPE_MODE (type
);
1143 /* Don't perform operation if we honor signaling NaNs and
1144 either operand is a NaN. */
1145 if (HONOR_SNANS (mode
)
1146 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1149 /* Don't perform operation if it would raise a division
1150 by zero exception. */
1151 if (code
== RDIV_EXPR
1152 && REAL_VALUES_EQUAL (d2
, dconst0
)
1153 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1156 /* If either operand is a NaN, just return it. Otherwise, set up
1157 for floating-point trap; we return an overflow. */
1158 if (REAL_VALUE_ISNAN (d1
))
1160 else if (REAL_VALUE_ISNAN (d2
))
1163 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1164 real_convert (&result
, mode
, &value
);
1166 /* Don't constant fold this floating point operation if
1167 the result has overflowed and flag_trapping_math. */
1168 if (flag_trapping_math
1169 && MODE_HAS_INFINITIES (mode
)
1170 && REAL_VALUE_ISINF (result
)
1171 && !REAL_VALUE_ISINF (d1
)
1172 && !REAL_VALUE_ISINF (d2
))
1175 /* Don't constant fold this floating point operation if the
1176 result may dependent upon the run-time rounding mode and
1177 flag_rounding_math is set, or if GCC's software emulation
1178 is unable to accurately represent the result. */
1179 if ((flag_rounding_math
1180 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1181 && (inexact
|| !real_identical (&result
, &value
)))
1184 t
= build_real (type
, result
);
1186 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1190 if (TREE_CODE (arg1
) == FIXED_CST
)
1192 FIXED_VALUE_TYPE f1
;
1193 FIXED_VALUE_TYPE f2
;
1194 FIXED_VALUE_TYPE result
;
1199 /* The following codes are handled by fixed_arithmetic. */
1205 case TRUNC_DIV_EXPR
:
1206 f2
= TREE_FIXED_CST (arg2
);
1211 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1212 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1220 f1
= TREE_FIXED_CST (arg1
);
1221 type
= TREE_TYPE (arg1
);
1222 sat_p
= TYPE_SATURATING (type
);
1223 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1224 t
= build_fixed (type
, result
);
1225 /* Propagate overflow flags. */
1226 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1227 TREE_OVERFLOW (t
) = 1;
1231 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1233 tree type
= TREE_TYPE (arg1
);
1234 tree r1
= TREE_REALPART (arg1
);
1235 tree i1
= TREE_IMAGPART (arg1
);
1236 tree r2
= TREE_REALPART (arg2
);
1237 tree i2
= TREE_IMAGPART (arg2
);
1244 real
= const_binop (code
, r1
, r2
);
1245 imag
= const_binop (code
, i1
, i2
);
1249 if (COMPLEX_FLOAT_TYPE_P (type
))
1250 return do_mpc_arg2 (arg1
, arg2
, type
,
1251 /* do_nonfinite= */ folding_initializer
,
1254 real
= const_binop (MINUS_EXPR
,
1255 const_binop (MULT_EXPR
, r1
, r2
),
1256 const_binop (MULT_EXPR
, i1
, i2
));
1257 imag
= const_binop (PLUS_EXPR
,
1258 const_binop (MULT_EXPR
, r1
, i2
),
1259 const_binop (MULT_EXPR
, i1
, r2
));
1263 if (COMPLEX_FLOAT_TYPE_P (type
))
1264 return do_mpc_arg2 (arg1
, arg2
, type
,
1265 /* do_nonfinite= */ folding_initializer
,
1268 case TRUNC_DIV_EXPR
:
1270 case FLOOR_DIV_EXPR
:
1271 case ROUND_DIV_EXPR
:
1272 if (flag_complex_method
== 0)
1274 /* Keep this algorithm in sync with
1275 tree-complex.c:expand_complex_div_straight().
1277 Expand complex division to scalars, straightforward algorithm.
1278 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1282 = const_binop (PLUS_EXPR
,
1283 const_binop (MULT_EXPR
, r2
, r2
),
1284 const_binop (MULT_EXPR
, i2
, i2
));
1286 = const_binop (PLUS_EXPR
,
1287 const_binop (MULT_EXPR
, r1
, r2
),
1288 const_binop (MULT_EXPR
, i1
, i2
));
1290 = const_binop (MINUS_EXPR
,
1291 const_binop (MULT_EXPR
, i1
, r2
),
1292 const_binop (MULT_EXPR
, r1
, i2
));
1294 real
= const_binop (code
, t1
, magsquared
);
1295 imag
= const_binop (code
, t2
, magsquared
);
1299 /* Keep this algorithm in sync with
1300 tree-complex.c:expand_complex_div_wide().
1302 Expand complex division to scalars, modified algorithm to minimize
1303 overflow with wide input ranges. */
1304 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1305 fold_abs_const (r2
, TREE_TYPE (type
)),
1306 fold_abs_const (i2
, TREE_TYPE (type
)));
1308 if (integer_nonzerop (compare
))
1310 /* In the TRUE branch, we compute
1312 div = (br * ratio) + bi;
1313 tr = (ar * ratio) + ai;
1314 ti = (ai * ratio) - ar;
1317 tree ratio
= const_binop (code
, r2
, i2
);
1318 tree div
= const_binop (PLUS_EXPR
, i2
,
1319 const_binop (MULT_EXPR
, r2
, ratio
));
1320 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1321 real
= const_binop (PLUS_EXPR
, real
, i1
);
1322 real
= const_binop (code
, real
, div
);
1324 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1325 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1326 imag
= const_binop (code
, imag
, div
);
1330 /* In the FALSE branch, we compute
1332 divisor = (d * ratio) + c;
1333 tr = (b * ratio) + a;
1334 ti = b - (a * ratio);
1337 tree ratio
= const_binop (code
, i2
, r2
);
1338 tree div
= const_binop (PLUS_EXPR
, r2
,
1339 const_binop (MULT_EXPR
, i2
, ratio
));
1341 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1342 real
= const_binop (PLUS_EXPR
, real
, r1
);
1343 real
= const_binop (code
, real
, div
);
1345 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1346 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1347 imag
= const_binop (code
, imag
, div
);
1357 return build_complex (type
, real
, imag
);
1360 if (TREE_CODE (arg1
) == VECTOR_CST
1361 && TREE_CODE (arg2
) == VECTOR_CST
)
1363 tree type
= TREE_TYPE(arg1
);
1364 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1365 tree
*elts
= XALLOCAVEC (tree
, count
);
1367 for (i
= 0; i
< count
; i
++)
1369 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1370 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1372 elts
[i
] = const_binop (code
, elem1
, elem2
);
1374 /* It is possible that const_binop cannot handle the given
1375 code and return NULL_TREE */
1376 if(elts
[i
] == NULL_TREE
)
1380 return build_vector (type
, elts
);
1385 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1386 indicates which particular sizetype to create. */
1389 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1391 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1394 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1395 is a tree code. The type of the result is taken from the operands.
1396 Both must be equivalent integer types, ala int_binop_types_match_p.
1397 If the operands are constant, so is the result. */
1400 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1402 tree type
= TREE_TYPE (arg0
);
1404 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1405 return error_mark_node
;
1407 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1410 /* Handle the special case of two integer constants faster. */
1411 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1413 /* And some specific cases even faster than that. */
1414 if (code
== PLUS_EXPR
)
1416 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1418 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1421 else if (code
== MINUS_EXPR
)
1423 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1426 else if (code
== MULT_EXPR
)
1428 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1432 /* Handle general case of two integer constants. For sizetype
1433 constant calculations we always want to know about overflow,
1434 even in the unsigned case. */
1435 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1438 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1441 /* Given two values, either both of sizetype or both of bitsizetype,
1442 compute the difference between the two values. Return the value
1443 in signed type corresponding to the type of the operands. */
1446 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1448 tree type
= TREE_TYPE (arg0
);
1451 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1454 /* If the type is already signed, just do the simple thing. */
1455 if (!TYPE_UNSIGNED (type
))
1456 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1458 if (type
== sizetype
)
1460 else if (type
== bitsizetype
)
1461 ctype
= sbitsizetype
;
1463 ctype
= signed_type_for (type
);
1465 /* If either operand is not a constant, do the conversions to the signed
1466 type and subtract. The hardware will do the right thing with any
1467 overflow in the subtraction. */
1468 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1469 return size_binop_loc (loc
, MINUS_EXPR
,
1470 fold_convert_loc (loc
, ctype
, arg0
),
1471 fold_convert_loc (loc
, ctype
, arg1
));
1473 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1474 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1475 overflow) and negate (which can't either). Special-case a result
1476 of zero while we're here. */
1477 if (tree_int_cst_equal (arg0
, arg1
))
1478 return build_int_cst (ctype
, 0);
1479 else if (tree_int_cst_lt (arg1
, arg0
))
1480 return fold_convert_loc (loc
, ctype
,
1481 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1483 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1484 fold_convert_loc (loc
, ctype
,
1485 size_binop_loc (loc
,
1490 /* A subroutine of fold_convert_const handling conversions of an
1491 INTEGER_CST to another integer type. */
1494 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1498 /* Given an integer constant, make new constant with new type,
1499 appropriately sign-extended or truncated. */
1500 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1501 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1502 (TREE_INT_CST_HIGH (arg1
) < 0
1503 && (TYPE_UNSIGNED (type
)
1504 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1505 | TREE_OVERFLOW (arg1
));
1510 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1511 to an integer type. */
1514 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1519 /* The following code implements the floating point to integer
1520 conversion rules required by the Java Language Specification,
1521 that IEEE NaNs are mapped to zero and values that overflow
1522 the target precision saturate, i.e. values greater than
1523 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1524 are mapped to INT_MIN. These semantics are allowed by the
1525 C and C++ standards that simply state that the behavior of
1526 FP-to-integer conversion is unspecified upon overflow. */
1530 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1534 case FIX_TRUNC_EXPR
:
1535 real_trunc (&r
, VOIDmode
, &x
);
1542 /* If R is NaN, return zero and show we have an overflow. */
1543 if (REAL_VALUE_ISNAN (r
))
1546 val
= double_int_zero
;
1549 /* See if R is less than the lower bound or greater than the
1554 tree lt
= TYPE_MIN_VALUE (type
);
1555 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1556 if (REAL_VALUES_LESS (r
, l
))
1559 val
= tree_to_double_int (lt
);
1565 tree ut
= TYPE_MAX_VALUE (type
);
1568 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1569 if (REAL_VALUES_LESS (u
, r
))
1572 val
= tree_to_double_int (ut
);
1578 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1580 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1584 /* A subroutine of fold_convert_const handling conversions of a
1585 FIXED_CST to an integer type. */
1588 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1591 double_int temp
, temp_trunc
;
1594 /* Right shift FIXED_CST to temp by fbit. */
1595 temp
= TREE_FIXED_CST (arg1
).data
;
1596 mode
= TREE_FIXED_CST (arg1
).mode
;
1597 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1599 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1600 HOST_BITS_PER_DOUBLE_INT
,
1601 SIGNED_FIXED_POINT_MODE_P (mode
));
1603 /* Left shift temp to temp_trunc by fbit. */
1604 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1605 HOST_BITS_PER_DOUBLE_INT
,
1606 SIGNED_FIXED_POINT_MODE_P (mode
));
1610 temp
= double_int_zero
;
1611 temp_trunc
= double_int_zero
;
1614 /* If FIXED_CST is negative, we need to round the value toward 0.
1615 By checking if the fractional bits are not zero to add 1 to temp. */
1616 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1617 && temp_trunc
.is_negative ()
1618 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1619 temp
+= double_int_one
;
1621 /* Given a fixed-point constant, make new constant with new type,
1622 appropriately sign-extended or truncated. */
1623 t
= force_fit_type_double (type
, temp
, -1,
1624 (temp
.is_negative ()
1625 && (TYPE_UNSIGNED (type
)
1626 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1627 | TREE_OVERFLOW (arg1
));
1632 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1633 to another floating point type. */
1636 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1638 REAL_VALUE_TYPE value
;
1641 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1642 t
= build_real (type
, value
);
1644 /* If converting an infinity or NAN to a representation that doesn't
1645 have one, set the overflow bit so that we can produce some kind of
1646 error message at the appropriate point if necessary. It's not the
1647 most user-friendly message, but it's better than nothing. */
1648 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1649 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1650 TREE_OVERFLOW (t
) = 1;
1651 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1652 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1653 TREE_OVERFLOW (t
) = 1;
1654 /* Regular overflow, conversion produced an infinity in a mode that
1655 can't represent them. */
1656 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1657 && REAL_VALUE_ISINF (value
)
1658 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1659 TREE_OVERFLOW (t
) = 1;
1661 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1665 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1666 to a floating point type. */
1669 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1671 REAL_VALUE_TYPE value
;
1674 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1675 t
= build_real (type
, value
);
1677 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1681 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1682 to another fixed-point type. */
1685 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1687 FIXED_VALUE_TYPE value
;
1691 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1692 TYPE_SATURATING (type
));
1693 t
= build_fixed (type
, value
);
1695 /* Propagate overflow flags. */
1696 if (overflow_p
| TREE_OVERFLOW (arg1
))
1697 TREE_OVERFLOW (t
) = 1;
1701 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1702 to a fixed-point type. */
1705 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1707 FIXED_VALUE_TYPE value
;
1711 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1712 TREE_INT_CST (arg1
),
1713 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1714 TYPE_SATURATING (type
));
1715 t
= build_fixed (type
, value
);
1717 /* Propagate overflow flags. */
1718 if (overflow_p
| TREE_OVERFLOW (arg1
))
1719 TREE_OVERFLOW (t
) = 1;
1723 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1724 to a fixed-point type. */
1727 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1729 FIXED_VALUE_TYPE value
;
1733 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1734 &TREE_REAL_CST (arg1
),
1735 TYPE_SATURATING (type
));
1736 t
= build_fixed (type
, value
);
1738 /* Propagate overflow flags. */
1739 if (overflow_p
| TREE_OVERFLOW (arg1
))
1740 TREE_OVERFLOW (t
) = 1;
1744 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1745 type TYPE. If no simplification can be done return NULL_TREE. */
1748 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1750 if (TREE_TYPE (arg1
) == type
)
1753 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1754 || TREE_CODE (type
) == OFFSET_TYPE
)
1756 if (TREE_CODE (arg1
) == INTEGER_CST
)
1757 return fold_convert_const_int_from_int (type
, arg1
);
1758 else if (TREE_CODE (arg1
) == REAL_CST
)
1759 return fold_convert_const_int_from_real (code
, type
, arg1
);
1760 else if (TREE_CODE (arg1
) == FIXED_CST
)
1761 return fold_convert_const_int_from_fixed (type
, arg1
);
1763 else if (TREE_CODE (type
) == REAL_TYPE
)
1765 if (TREE_CODE (arg1
) == INTEGER_CST
)
1766 return build_real_from_int_cst (type
, arg1
);
1767 else if (TREE_CODE (arg1
) == REAL_CST
)
1768 return fold_convert_const_real_from_real (type
, arg1
);
1769 else if (TREE_CODE (arg1
) == FIXED_CST
)
1770 return fold_convert_const_real_from_fixed (type
, arg1
);
1772 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1774 if (TREE_CODE (arg1
) == FIXED_CST
)
1775 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1776 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1777 return fold_convert_const_fixed_from_int (type
, arg1
);
1778 else if (TREE_CODE (arg1
) == REAL_CST
)
1779 return fold_convert_const_fixed_from_real (type
, arg1
);
1784 /* Construct a vector of zero elements of vector type TYPE. */
1787 build_zero_vector (tree type
)
1791 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1792 return build_vector_from_val (type
, t
);
1795 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1798 fold_convertible_p (const_tree type
, const_tree arg
)
1800 tree orig
= TREE_TYPE (arg
);
1805 if (TREE_CODE (arg
) == ERROR_MARK
1806 || TREE_CODE (type
) == ERROR_MARK
1807 || TREE_CODE (orig
) == ERROR_MARK
)
1810 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1813 switch (TREE_CODE (type
))
1815 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1816 case POINTER_TYPE
: case REFERENCE_TYPE
:
1818 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1819 || TREE_CODE (orig
) == OFFSET_TYPE
)
1821 return (TREE_CODE (orig
) == VECTOR_TYPE
1822 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1825 case FIXED_POINT_TYPE
:
1829 return TREE_CODE (type
) == TREE_CODE (orig
);
1836 /* Convert expression ARG to type TYPE. Used by the middle-end for
1837 simple conversions in preference to calling the front-end's convert. */
1840 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1842 tree orig
= TREE_TYPE (arg
);
1848 if (TREE_CODE (arg
) == ERROR_MARK
1849 || TREE_CODE (type
) == ERROR_MARK
1850 || TREE_CODE (orig
) == ERROR_MARK
)
1851 return error_mark_node
;
1853 switch (TREE_CODE (type
))
1856 case REFERENCE_TYPE
:
1857 /* Handle conversions between pointers to different address spaces. */
1858 if (POINTER_TYPE_P (orig
)
1859 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1860 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1861 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1864 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1866 if (TREE_CODE (arg
) == INTEGER_CST
)
1868 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1869 if (tem
!= NULL_TREE
)
1872 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1873 || TREE_CODE (orig
) == OFFSET_TYPE
)
1874 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1875 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1876 return fold_convert_loc (loc
, type
,
1877 fold_build1_loc (loc
, REALPART_EXPR
,
1878 TREE_TYPE (orig
), arg
));
1879 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1880 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1881 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1884 if (TREE_CODE (arg
) == INTEGER_CST
)
1886 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1887 if (tem
!= NULL_TREE
)
1890 else if (TREE_CODE (arg
) == REAL_CST
)
1892 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1893 if (tem
!= NULL_TREE
)
1896 else if (TREE_CODE (arg
) == FIXED_CST
)
1898 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1899 if (tem
!= NULL_TREE
)
1903 switch (TREE_CODE (orig
))
1906 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1907 case POINTER_TYPE
: case REFERENCE_TYPE
:
1908 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1911 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1913 case FIXED_POINT_TYPE
:
1914 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1917 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1918 return fold_convert_loc (loc
, type
, tem
);
1924 case FIXED_POINT_TYPE
:
1925 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1926 || TREE_CODE (arg
) == REAL_CST
)
1928 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1929 if (tem
!= NULL_TREE
)
1930 goto fold_convert_exit
;
1933 switch (TREE_CODE (orig
))
1935 case FIXED_POINT_TYPE
:
1940 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1943 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1944 return fold_convert_loc (loc
, type
, tem
);
1951 switch (TREE_CODE (orig
))
1954 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1955 case POINTER_TYPE
: case REFERENCE_TYPE
:
1957 case FIXED_POINT_TYPE
:
1958 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1959 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1960 fold_convert_loc (loc
, TREE_TYPE (type
),
1961 integer_zero_node
));
1966 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1968 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1969 TREE_OPERAND (arg
, 0));
1970 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1971 TREE_OPERAND (arg
, 1));
1972 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1975 arg
= save_expr (arg
);
1976 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1977 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1978 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1979 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1980 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1988 if (integer_zerop (arg
))
1989 return build_zero_vector (type
);
1990 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1991 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1992 || TREE_CODE (orig
) == VECTOR_TYPE
);
1993 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
1996 tem
= fold_ignored_result (arg
);
1997 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2000 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2001 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2005 protected_set_expr_location_unshare (tem
, loc
);
2009 /* Return false if expr can be assumed not to be an lvalue, true
2013 maybe_lvalue_p (const_tree x
)
2015 /* We only need to wrap lvalue tree codes. */
2016 switch (TREE_CODE (x
))
2029 case ARRAY_RANGE_REF
:
2035 case PREINCREMENT_EXPR
:
2036 case PREDECREMENT_EXPR
:
2038 case TRY_CATCH_EXPR
:
2039 case WITH_CLEANUP_EXPR
:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2057 /* Return an expr equal to X but certainly not valid as an lvalue. */
2060 non_lvalue_loc (location_t loc
, tree x
)
2062 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2067 if (! maybe_lvalue_p (x
))
2069 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2072 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2073 Zero means allow extended lvalues. */
2075 int pedantic_lvalues
;
2077 /* When pedantic, return an expr equal to X but certainly not valid as a
2078 pedantic lvalue. Otherwise, return X. */
2081 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2083 if (pedantic_lvalues
)
2084 return non_lvalue_loc (loc
, x
);
2086 return protected_set_expr_location_unshare (x
, loc
);
2089 /* Given a tree comparison code, return the code that is the logical inverse.
2090 It is generally not safe to do this for floating-point comparisons, except
2091 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2092 ERROR_MARK in this case. */
2095 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2097 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2098 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2108 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2110 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2112 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2114 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2128 return UNORDERED_EXPR
;
2129 case UNORDERED_EXPR
:
2130 return ORDERED_EXPR
;
2136 /* Similar, but return the comparison that results if the operands are
2137 swapped. This is safe for floating-point. */
2140 swap_tree_comparison (enum tree_code code
)
2147 case UNORDERED_EXPR
:
2173 /* Convert a comparison tree code from an enum tree_code representation
2174 into a compcode bit-based encoding. This function is the inverse of
2175 compcode_to_comparison. */
2177 static enum comparison_code
2178 comparison_to_compcode (enum tree_code code
)
2195 return COMPCODE_ORD
;
2196 case UNORDERED_EXPR
:
2197 return COMPCODE_UNORD
;
2199 return COMPCODE_UNLT
;
2201 return COMPCODE_UNEQ
;
2203 return COMPCODE_UNLE
;
2205 return COMPCODE_UNGT
;
2207 return COMPCODE_LTGT
;
2209 return COMPCODE_UNGE
;
2215 /* Convert a compcode bit-based encoding of a comparison operator back
2216 to GCC's enum tree_code representation. This function is the
2217 inverse of comparison_to_compcode. */
2219 static enum tree_code
2220 compcode_to_comparison (enum comparison_code code
)
2237 return ORDERED_EXPR
;
2238 case COMPCODE_UNORD
:
2239 return UNORDERED_EXPR
;
2257 /* Return a tree for the comparison which is the combination of
2258 doing the AND or OR (depending on CODE) of the two operations LCODE
2259 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2260 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2261 if this makes the transformation invalid. */
2264 combine_comparisons (location_t loc
,
2265 enum tree_code code
, enum tree_code lcode
,
2266 enum tree_code rcode
, tree truth_type
,
2267 tree ll_arg
, tree lr_arg
)
2269 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2270 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2271 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2276 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2277 compcode
= lcompcode
& rcompcode
;
2280 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2281 compcode
= lcompcode
| rcompcode
;
2290 /* Eliminate unordered comparisons, as well as LTGT and ORD
2291 which are not used unless the mode has NaNs. */
2292 compcode
&= ~COMPCODE_UNORD
;
2293 if (compcode
== COMPCODE_LTGT
)
2294 compcode
= COMPCODE_NE
;
2295 else if (compcode
== COMPCODE_ORD
)
2296 compcode
= COMPCODE_TRUE
;
2298 else if (flag_trapping_math
)
2300 /* Check that the original operation and the optimized ones will trap
2301 under the same condition. */
2302 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2303 && (lcompcode
!= COMPCODE_EQ
)
2304 && (lcompcode
!= COMPCODE_ORD
);
2305 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2306 && (rcompcode
!= COMPCODE_EQ
)
2307 && (rcompcode
!= COMPCODE_ORD
);
2308 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2309 && (compcode
!= COMPCODE_EQ
)
2310 && (compcode
!= COMPCODE_ORD
);
2312 /* In a short-circuited boolean expression the LHS might be
2313 such that the RHS, if evaluated, will never trap. For
2314 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2315 if neither x nor y is NaN. (This is a mixed blessing: for
2316 example, the expression above will never trap, hence
2317 optimizing it to x < y would be invalid). */
2318 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2319 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2322 /* If the comparison was short-circuited, and only the RHS
2323 trapped, we may now generate a spurious trap. */
2325 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2328 /* If we changed the conditions that cause a trap, we lose. */
2329 if ((ltrap
|| rtrap
) != trap
)
2333 if (compcode
== COMPCODE_TRUE
)
2334 return constant_boolean_node (true, truth_type
);
2335 else if (compcode
== COMPCODE_FALSE
)
2336 return constant_boolean_node (false, truth_type
);
2339 enum tree_code tcode
;
2341 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2342 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2346 /* Return nonzero if two operands (typically of the same tree node)
2347 are necessarily equal. If either argument has side-effects this
2348 function returns zero. FLAGS modifies behavior as follows:
2350 If OEP_ONLY_CONST is set, only return nonzero for constants.
2351 This function tests whether the operands are indistinguishable;
2352 it does not test whether they are equal using C's == operation.
2353 The distinction is important for IEEE floating point, because
2354 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2355 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2357 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2358 even though it may hold multiple values during a function.
2359 This is because a GCC tree node guarantees that nothing else is
2360 executed between the evaluation of its "operands" (which may often
2361 be evaluated in arbitrary order). Hence if the operands themselves
2362 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2363 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2364 unset means assuming isochronic (or instantaneous) tree equivalence.
2365 Unless comparing arbitrary expression trees, such as from different
2366 statements, this flag can usually be left unset.
2368 If OEP_PURE_SAME is set, then pure functions with identical arguments
2369 are considered the same. It is used when the caller has other ways
2370 to ensure that global memory is unchanged in between. */
2373 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2375 /* If either is ERROR_MARK, they aren't equal. */
2376 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2377 || TREE_TYPE (arg0
) == error_mark_node
2378 || TREE_TYPE (arg1
) == error_mark_node
)
2381 /* Similar, if either does not have a type (like a released SSA name),
2382 they aren't equal. */
2383 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2386 /* Check equality of integer constants before bailing out due to
2387 precision differences. */
2388 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2389 return tree_int_cst_equal (arg0
, arg1
);
2391 /* If both types don't have the same signedness, then we can't consider
2392 them equal. We must check this before the STRIP_NOPS calls
2393 because they may change the signedness of the arguments. As pointers
2394 strictly don't have a signedness, require either two pointers or
2395 two non-pointers as well. */
2396 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2397 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2400 /* We cannot consider pointers to different address space equal. */
2401 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2402 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2403 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2406 /* If both types don't have the same precision, then it is not safe
2408 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2414 /* In case both args are comparisons but with different comparison
2415 code, try to swap the comparison operands of one arg to produce
2416 a match and compare that variant. */
2417 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2418 && COMPARISON_CLASS_P (arg0
)
2419 && COMPARISON_CLASS_P (arg1
))
2421 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2423 if (TREE_CODE (arg0
) == swap_code
)
2424 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2425 TREE_OPERAND (arg1
, 1), flags
)
2426 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2427 TREE_OPERAND (arg1
, 0), flags
);
2430 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2431 /* This is needed for conversions and for COMPONENT_REF.
2432 Might as well play it safe and always test this. */
2433 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2434 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2435 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2438 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2439 We don't care about side effects in that case because the SAVE_EXPR
2440 takes care of that for us. In all other cases, two expressions are
2441 equal if they have no side effects. If we have two identical
2442 expressions with side effects that should be treated the same due
2443 to the only side effects being identical SAVE_EXPR's, that will
2444 be detected in the recursive calls below.
2445 If we are taking an invariant address of two identical objects
2446 they are necessarily equal as well. */
2447 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2448 && (TREE_CODE (arg0
) == SAVE_EXPR
2449 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2450 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2453 /* Next handle constant cases, those for which we can return 1 even
2454 if ONLY_CONST is set. */
2455 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2456 switch (TREE_CODE (arg0
))
2459 return tree_int_cst_equal (arg0
, arg1
);
2462 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2463 TREE_FIXED_CST (arg1
));
2466 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2467 TREE_REAL_CST (arg1
)))
2471 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2473 /* If we do not distinguish between signed and unsigned zero,
2474 consider them equal. */
2475 if (real_zerop (arg0
) && real_zerop (arg1
))
2484 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2487 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2489 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2490 VECTOR_CST_ELT (arg1
, i
), flags
))
2497 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2499 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2503 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2504 && ! memcmp (TREE_STRING_POINTER (arg0
),
2505 TREE_STRING_POINTER (arg1
),
2506 TREE_STRING_LENGTH (arg0
)));
2509 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2510 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2511 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2516 if (flags
& OEP_ONLY_CONST
)
2519 /* Define macros to test an operand from arg0 and arg1 for equality and a
2520 variant that allows null and views null as being different from any
2521 non-null value. In the latter case, if either is null, the both
2522 must be; otherwise, do the normal comparison. */
2523 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2524 TREE_OPERAND (arg1, N), flags)
2526 #define OP_SAME_WITH_NULL(N) \
2527 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2528 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2530 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2533 /* Two conversions are equal only if signedness and modes match. */
2534 switch (TREE_CODE (arg0
))
2537 case FIX_TRUNC_EXPR
:
2538 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2539 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2549 case tcc_comparison
:
2551 if (OP_SAME (0) && OP_SAME (1))
2554 /* For commutative ops, allow the other order. */
2555 return (commutative_tree_code (TREE_CODE (arg0
))
2556 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2557 TREE_OPERAND (arg1
, 1), flags
)
2558 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2559 TREE_OPERAND (arg1
, 0), flags
));
2562 /* If either of the pointer (or reference) expressions we are
2563 dereferencing contain a side effect, these cannot be equal. */
2564 if (TREE_SIDE_EFFECTS (arg0
)
2565 || TREE_SIDE_EFFECTS (arg1
))
2568 switch (TREE_CODE (arg0
))
2575 case TARGET_MEM_REF
:
2576 /* Require equal extra operands and then fall through to MEM_REF
2577 handling of the two common operands. */
2578 if (!OP_SAME_WITH_NULL (2)
2579 || !OP_SAME_WITH_NULL (3)
2580 || !OP_SAME_WITH_NULL (4))
2584 /* Require equal access sizes, and similar pointer types.
2585 We can have incomplete types for array references of
2586 variable-sized arrays from the Fortran frontent
2588 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2589 || (TYPE_SIZE (TREE_TYPE (arg0
))
2590 && TYPE_SIZE (TREE_TYPE (arg1
))
2591 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2592 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2593 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2594 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2595 && OP_SAME (0) && OP_SAME (1));
2598 case ARRAY_RANGE_REF
:
2599 /* Operands 2 and 3 may be null.
2600 Compare the array index by value if it is constant first as we
2601 may have different types but same value here. */
2603 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2604 TREE_OPERAND (arg1
, 1))
2606 && OP_SAME_WITH_NULL (2)
2607 && OP_SAME_WITH_NULL (3));
2610 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2611 may be NULL when we're called to compare MEM_EXPRs. */
2612 return OP_SAME_WITH_NULL (0)
2614 && OP_SAME_WITH_NULL (2);
2617 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2623 case tcc_expression
:
2624 switch (TREE_CODE (arg0
))
2627 case TRUTH_NOT_EXPR
:
2630 case TRUTH_ANDIF_EXPR
:
2631 case TRUTH_ORIF_EXPR
:
2632 return OP_SAME (0) && OP_SAME (1);
2635 case WIDEN_MULT_PLUS_EXPR
:
2636 case WIDEN_MULT_MINUS_EXPR
:
2639 /* The multiplcation operands are commutative. */
2642 case TRUTH_AND_EXPR
:
2644 case TRUTH_XOR_EXPR
:
2645 if (OP_SAME (0) && OP_SAME (1))
2648 /* Otherwise take into account this is a commutative operation. */
2649 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2650 TREE_OPERAND (arg1
, 1), flags
)
2651 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2652 TREE_OPERAND (arg1
, 0), flags
));
2657 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2664 switch (TREE_CODE (arg0
))
2667 /* If the CALL_EXPRs call different functions, then they
2668 clearly can not be equal. */
2669 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2674 unsigned int cef
= call_expr_flags (arg0
);
2675 if (flags
& OEP_PURE_SAME
)
2676 cef
&= ECF_CONST
| ECF_PURE
;
2683 /* Now see if all the arguments are the same. */
2685 const_call_expr_arg_iterator iter0
, iter1
;
2687 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2688 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2690 a0
= next_const_call_expr_arg (&iter0
),
2691 a1
= next_const_call_expr_arg (&iter1
))
2692 if (! operand_equal_p (a0
, a1
, flags
))
2695 /* If we get here and both argument lists are exhausted
2696 then the CALL_EXPRs are equal. */
2697 return ! (a0
|| a1
);
2703 case tcc_declaration
:
2704 /* Consider __builtin_sqrt equal to sqrt. */
2705 return (TREE_CODE (arg0
) == FUNCTION_DECL
2706 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2707 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2708 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2715 #undef OP_SAME_WITH_NULL
2718 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2719 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2721 When in doubt, return 0. */
2724 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2726 int unsignedp1
, unsignedpo
;
2727 tree primarg0
, primarg1
, primother
;
2728 unsigned int correct_width
;
2730 if (operand_equal_p (arg0
, arg1
, 0))
2733 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2734 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2737 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2738 and see if the inner values are the same. This removes any
2739 signedness comparison, which doesn't matter here. */
2740 primarg0
= arg0
, primarg1
= arg1
;
2741 STRIP_NOPS (primarg0
);
2742 STRIP_NOPS (primarg1
);
2743 if (operand_equal_p (primarg0
, primarg1
, 0))
2746 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2747 actual comparison operand, ARG0.
2749 First throw away any conversions to wider types
2750 already present in the operands. */
2752 primarg1
= get_narrower (arg1
, &unsignedp1
);
2753 primother
= get_narrower (other
, &unsignedpo
);
2755 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2756 if (unsignedp1
== unsignedpo
2757 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2758 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2760 tree type
= TREE_TYPE (arg0
);
2762 /* Make sure shorter operand is extended the right way
2763 to match the longer operand. */
2764 primarg1
= fold_convert (signed_or_unsigned_type_for
2765 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2767 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2774 /* See if ARG is an expression that is either a comparison or is performing
2775 arithmetic on comparisons. The comparisons must only be comparing
2776 two different values, which will be stored in *CVAL1 and *CVAL2; if
2777 they are nonzero it means that some operands have already been found.
2778 No variables may be used anywhere else in the expression except in the
2779 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2780 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2782 If this is true, return 1. Otherwise, return zero. */
2785 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2787 enum tree_code code
= TREE_CODE (arg
);
2788 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2790 /* We can handle some of the tcc_expression cases here. */
2791 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2793 else if (tclass
== tcc_expression
2794 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2795 || code
== COMPOUND_EXPR
))
2796 tclass
= tcc_binary
;
2798 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2799 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2801 /* If we've already found a CVAL1 or CVAL2, this expression is
2802 two complex to handle. */
2803 if (*cval1
|| *cval2
)
2813 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2816 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2817 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2818 cval1
, cval2
, save_p
));
2823 case tcc_expression
:
2824 if (code
== COND_EXPR
)
2825 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2826 cval1
, cval2
, save_p
)
2827 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2828 cval1
, cval2
, save_p
)
2829 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2830 cval1
, cval2
, save_p
));
2833 case tcc_comparison
:
2834 /* First see if we can handle the first operand, then the second. For
2835 the second operand, we know *CVAL1 can't be zero. It must be that
2836 one side of the comparison is each of the values; test for the
2837 case where this isn't true by failing if the two operands
2840 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2841 TREE_OPERAND (arg
, 1), 0))
2845 *cval1
= TREE_OPERAND (arg
, 0);
2846 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2848 else if (*cval2
== 0)
2849 *cval2
= TREE_OPERAND (arg
, 0);
2850 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2855 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2857 else if (*cval2
== 0)
2858 *cval2
= TREE_OPERAND (arg
, 1);
2859 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2871 /* ARG is a tree that is known to contain just arithmetic operations and
2872 comparisons. Evaluate the operations in the tree substituting NEW0 for
2873 any occurrence of OLD0 as an operand of a comparison and likewise for
2877 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2878 tree old1
, tree new1
)
2880 tree type
= TREE_TYPE (arg
);
2881 enum tree_code code
= TREE_CODE (arg
);
2882 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2884 /* We can handle some of the tcc_expression cases here. */
2885 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2887 else if (tclass
== tcc_expression
2888 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2889 tclass
= tcc_binary
;
2894 return fold_build1_loc (loc
, code
, type
,
2895 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2896 old0
, new0
, old1
, new1
));
2899 return fold_build2_loc (loc
, code
, type
,
2900 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2901 old0
, new0
, old1
, new1
),
2902 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2903 old0
, new0
, old1
, new1
));
2905 case tcc_expression
:
2909 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2913 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2917 return fold_build3_loc (loc
, code
, type
,
2918 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2919 old0
, new0
, old1
, new1
),
2920 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2921 old0
, new0
, old1
, new1
),
2922 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2923 old0
, new0
, old1
, new1
));
2927 /* Fall through - ??? */
2929 case tcc_comparison
:
2931 tree arg0
= TREE_OPERAND (arg
, 0);
2932 tree arg1
= TREE_OPERAND (arg
, 1);
2934 /* We need to check both for exact equality and tree equality. The
2935 former will be true if the operand has a side-effect. In that
2936 case, we know the operand occurred exactly once. */
2938 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2940 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2943 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2945 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2948 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2956 /* Return a tree for the case when the result of an expression is RESULT
2957 converted to TYPE and OMITTED was previously an operand of the expression
2958 but is now not needed (e.g., we folded OMITTED * 0).
2960 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2961 the conversion of RESULT to TYPE. */
2964 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2966 tree t
= fold_convert_loc (loc
, type
, result
);
2968 /* If the resulting operand is an empty statement, just return the omitted
2969 statement casted to void. */
2970 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2971 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2972 fold_ignored_result (omitted
));
2974 if (TREE_SIDE_EFFECTS (omitted
))
2975 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2976 fold_ignored_result (omitted
), t
);
2978 return non_lvalue_loc (loc
, t
);
2981 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2984 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2987 tree t
= fold_convert_loc (loc
, type
, result
);
2989 /* If the resulting operand is an empty statement, just return the omitted
2990 statement casted to void. */
2991 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2992 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2993 fold_ignored_result (omitted
));
2995 if (TREE_SIDE_EFFECTS (omitted
))
2996 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2997 fold_ignored_result (omitted
), t
);
2999 return pedantic_non_lvalue_loc (loc
, t
);
3002 /* Return a tree for the case when the result of an expression is RESULT
3003 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3004 of the expression but are now not needed.
3006 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3007 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3008 evaluated before OMITTED2. Otherwise, if neither has side effects,
3009 just do the conversion of RESULT to TYPE. */
3012 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3013 tree omitted1
, tree omitted2
)
3015 tree t
= fold_convert_loc (loc
, type
, result
);
3017 if (TREE_SIDE_EFFECTS (omitted2
))
3018 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3019 if (TREE_SIDE_EFFECTS (omitted1
))
3020 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3022 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3026 /* Return a simplified tree node for the truth-negation of ARG. This
3027 never alters ARG itself. We assume that ARG is an operation that
3028 returns a truth value (0 or 1).
3030 FIXME: one would think we would fold the result, but it causes
3031 problems with the dominator optimizer. */
3034 fold_truth_not_expr (location_t loc
, tree arg
)
3036 tree type
= TREE_TYPE (arg
);
3037 enum tree_code code
= TREE_CODE (arg
);
3038 location_t loc1
, loc2
;
3040 /* If this is a comparison, we can simply invert it, except for
3041 floating-point non-equality comparisons, in which case we just
3042 enclose a TRUTH_NOT_EXPR around what we have. */
3044 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3046 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3047 if (FLOAT_TYPE_P (op_type
)
3048 && flag_trapping_math
3049 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3050 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3053 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3054 if (code
== ERROR_MARK
)
3057 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3058 TREE_OPERAND (arg
, 1));
3064 return constant_boolean_node (integer_zerop (arg
), type
);
3066 case TRUTH_AND_EXPR
:
3067 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3068 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3069 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3070 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3071 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3074 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3075 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3076 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3077 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3078 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3080 case TRUTH_XOR_EXPR
:
3081 /* Here we can invert either operand. We invert the first operand
3082 unless the second operand is a TRUTH_NOT_EXPR in which case our
3083 result is the XOR of the first operand with the inside of the
3084 negation of the second operand. */
3086 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3087 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3088 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3090 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3091 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3092 TREE_OPERAND (arg
, 1));
3094 case TRUTH_ANDIF_EXPR
:
3095 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3096 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3097 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3098 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3099 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3101 case TRUTH_ORIF_EXPR
:
3102 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3103 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3104 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3105 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3106 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3108 case TRUTH_NOT_EXPR
:
3109 return TREE_OPERAND (arg
, 0);
3113 tree arg1
= TREE_OPERAND (arg
, 1);
3114 tree arg2
= TREE_OPERAND (arg
, 2);
3116 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3117 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3119 /* A COND_EXPR may have a throw as one operand, which
3120 then has void type. Just leave void operands
3122 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3123 VOID_TYPE_P (TREE_TYPE (arg1
))
3124 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3125 VOID_TYPE_P (TREE_TYPE (arg2
))
3126 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3130 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3131 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3132 TREE_OPERAND (arg
, 0),
3133 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3135 case NON_LVALUE_EXPR
:
3136 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3137 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3140 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3141 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3143 /* ... fall through ... */
3146 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3147 return build1_loc (loc
, TREE_CODE (arg
), type
,
3148 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3151 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3153 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3156 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3158 case CLEANUP_POINT_EXPR
:
3159 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3160 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3161 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3168 /* Return a simplified tree node for the truth-negation of ARG. This
3169 never alters ARG itself. We assume that ARG is an operation that
3170 returns a truth value (0 or 1).
3172 FIXME: one would think we would fold the result, but it causes
3173 problems with the dominator optimizer. */
3176 invert_truthvalue_loc (location_t loc
, tree arg
)
3180 if (TREE_CODE (arg
) == ERROR_MARK
)
3183 tem
= fold_truth_not_expr (loc
, arg
);
3185 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3190 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3191 operands are another bit-wise operation with a common input. If so,
3192 distribute the bit operations to save an operation and possibly two if
3193 constants are involved. For example, convert
3194 (A | B) & (A | C) into A | (B & C)
3195 Further simplification will occur if B and C are constants.
3197 If this optimization cannot be done, 0 will be returned. */
3200 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3201 tree arg0
, tree arg1
)
3206 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3207 || TREE_CODE (arg0
) == code
3208 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3209 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3212 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3214 common
= TREE_OPERAND (arg0
, 0);
3215 left
= TREE_OPERAND (arg0
, 1);
3216 right
= TREE_OPERAND (arg1
, 1);
3218 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3220 common
= TREE_OPERAND (arg0
, 0);
3221 left
= TREE_OPERAND (arg0
, 1);
3222 right
= TREE_OPERAND (arg1
, 0);
3224 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3226 common
= TREE_OPERAND (arg0
, 1);
3227 left
= TREE_OPERAND (arg0
, 0);
3228 right
= TREE_OPERAND (arg1
, 1);
3230 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3232 common
= TREE_OPERAND (arg0
, 1);
3233 left
= TREE_OPERAND (arg0
, 0);
3234 right
= TREE_OPERAND (arg1
, 0);
3239 common
= fold_convert_loc (loc
, type
, common
);
3240 left
= fold_convert_loc (loc
, type
, left
);
3241 right
= fold_convert_loc (loc
, type
, right
);
3242 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3243 fold_build2_loc (loc
, code
, type
, left
, right
));
3246 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3247 with code CODE. This optimization is unsafe. */
3249 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3250 tree arg0
, tree arg1
)
3252 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3253 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3255 /* (A / C) +- (B / C) -> (A +- B) / C. */
3257 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3258 TREE_OPERAND (arg1
, 1), 0))
3259 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3260 fold_build2_loc (loc
, code
, type
,
3261 TREE_OPERAND (arg0
, 0),
3262 TREE_OPERAND (arg1
, 0)),
3263 TREE_OPERAND (arg0
, 1));
3265 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3266 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3267 TREE_OPERAND (arg1
, 0), 0)
3268 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3269 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3271 REAL_VALUE_TYPE r0
, r1
;
3272 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3273 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3275 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3277 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3278 real_arithmetic (&r0
, code
, &r0
, &r1
);
3279 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3280 TREE_OPERAND (arg0
, 0),
3281 build_real (type
, r0
));
3287 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3288 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3291 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3292 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3294 tree result
, bftype
;
3298 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3299 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3300 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3301 && host_integerp (size
, 0)
3302 && tree_low_cst (size
, 0) == bitsize
)
3303 return fold_convert_loc (loc
, type
, inner
);
3307 if (TYPE_PRECISION (bftype
) != bitsize
3308 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3309 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3311 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3312 size_int (bitsize
), bitsize_int (bitpos
));
3315 result
= fold_convert_loc (loc
, type
, result
);
3320 /* Optimize a bit-field compare.
3322 There are two cases: First is a compare against a constant and the
3323 second is a comparison of two items where the fields are at the same
3324 bit position relative to the start of a chunk (byte, halfword, word)
3325 large enough to contain it. In these cases we can avoid the shift
3326 implicit in bitfield extractions.
3328 For constants, we emit a compare of the shifted constant with the
3329 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3330 compared. For two fields at the same position, we do the ANDs with the
3331 similar mask and compare the result of the ANDs.
3333 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3334 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3335 are the left and right operands of the comparison, respectively.
3337 If the optimization described above can be done, we return the resulting
3338 tree. Otherwise we return zero. */
3341 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3342 tree compare_type
, tree lhs
, tree rhs
)
3344 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3345 tree type
= TREE_TYPE (lhs
);
3346 tree signed_type
, unsigned_type
;
3347 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3348 enum machine_mode lmode
, rmode
, nmode
;
3349 int lunsignedp
, runsignedp
;
3350 int lvolatilep
= 0, rvolatilep
= 0;
3351 tree linner
, rinner
= NULL_TREE
;
3355 /* In the strict volatile bitfields case, doing code changes here may prevent
3356 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3357 if (flag_strict_volatile_bitfields
> 0)
3360 /* Get all the information about the extractions being done. If the bit size
3361 if the same as the size of the underlying object, we aren't doing an
3362 extraction at all and so can do nothing. We also don't want to
3363 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3364 then will no longer be able to replace it. */
3365 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3366 &lunsignedp
, &lvolatilep
, false);
3367 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3368 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3373 /* If this is not a constant, we can only do something if bit positions,
3374 sizes, and signedness are the same. */
3375 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3376 &runsignedp
, &rvolatilep
, false);
3378 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3379 || lunsignedp
!= runsignedp
|| offset
!= 0
3380 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3384 /* See if we can find a mode to refer to this field. We should be able to,
3385 but fail if we can't. */
3387 && GET_MODE_BITSIZE (lmode
) > 0
3388 && flag_strict_volatile_bitfields
> 0)
3391 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3392 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3393 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3394 TYPE_ALIGN (TREE_TYPE (rinner
))),
3395 word_mode
, lvolatilep
|| rvolatilep
);
3396 if (nmode
== VOIDmode
)
3399 /* Set signed and unsigned types of the precision of this mode for the
3401 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3402 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3404 /* Compute the bit position and size for the new reference and our offset
3405 within it. If the new reference is the same size as the original, we
3406 won't optimize anything, so return zero. */
3407 nbitsize
= GET_MODE_BITSIZE (nmode
);
3408 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3410 if (nbitsize
== lbitsize
)
3413 if (BYTES_BIG_ENDIAN
)
3414 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3416 /* Make the mask to be used against the extracted field. */
3417 mask
= build_int_cst_type (unsigned_type
, -1);
3418 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3419 mask
= const_binop (RSHIFT_EXPR
, mask
,
3420 size_int (nbitsize
- lbitsize
- lbitpos
));
3423 /* If not comparing with constant, just rework the comparison
3425 return fold_build2_loc (loc
, code
, compare_type
,
3426 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3427 make_bit_field_ref (loc
, linner
,
3432 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3433 make_bit_field_ref (loc
, rinner
,
3439 /* Otherwise, we are handling the constant case. See if the constant is too
3440 big for the field. Warn and return a tree of for 0 (false) if so. We do
3441 this not only for its own sake, but to avoid having to test for this
3442 error case below. If we didn't, we might generate wrong code.
3444 For unsigned fields, the constant shifted right by the field length should
3445 be all zero. For signed fields, the high-order bits should agree with
3450 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3451 fold_convert_loc (loc
,
3452 unsigned_type
, rhs
),
3453 size_int (lbitsize
))))
3455 warning (0, "comparison is always %d due to width of bit-field",
3457 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3462 tree tem
= const_binop (RSHIFT_EXPR
,
3463 fold_convert_loc (loc
, signed_type
, rhs
),
3464 size_int (lbitsize
- 1));
3465 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3467 warning (0, "comparison is always %d due to width of bit-field",
3469 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3473 /* Single-bit compares should always be against zero. */
3474 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3476 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3477 rhs
= build_int_cst (type
, 0);
3480 /* Make a new bitfield reference, shift the constant over the
3481 appropriate number of bits and mask it with the computed mask
3482 (in case this was a signed field). If we changed it, make a new one. */
3483 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3486 TREE_SIDE_EFFECTS (lhs
) = 1;
3487 TREE_THIS_VOLATILE (lhs
) = 1;
3490 rhs
= const_binop (BIT_AND_EXPR
,
3491 const_binop (LSHIFT_EXPR
,
3492 fold_convert_loc (loc
, unsigned_type
, rhs
),
3493 size_int (lbitpos
)),
3496 lhs
= build2_loc (loc
, code
, compare_type
,
3497 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3501 /* Subroutine for fold_truth_andor_1: decode a field reference.
3503 If EXP is a comparison reference, we return the innermost reference.
3505 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3506 set to the starting bit number.
3508 If the innermost field can be completely contained in a mode-sized
3509 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3511 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3512 otherwise it is not changed.
3514 *PUNSIGNEDP is set to the signedness of the field.
3516 *PMASK is set to the mask used. This is either contained in a
3517 BIT_AND_EXPR or derived from the width of the field.
3519 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3521 Return 0 if this is not a component reference or is one that we can't
3522 do anything with. */
3525 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3526 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3527 int *punsignedp
, int *pvolatilep
,
3528 tree
*pmask
, tree
*pand_mask
)
3530 tree outer_type
= 0;
3532 tree mask
, inner
, offset
;
3534 unsigned int precision
;
3536 /* All the optimizations using this function assume integer fields.
3537 There are problems with FP fields since the type_for_size call
3538 below can fail for, e.g., XFmode. */
3539 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3542 /* We are interested in the bare arrangement of bits, so strip everything
3543 that doesn't affect the machine mode. However, record the type of the
3544 outermost expression if it may matter below. */
3545 if (CONVERT_EXPR_P (exp
)
3546 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3547 outer_type
= TREE_TYPE (exp
);
3550 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3552 and_mask
= TREE_OPERAND (exp
, 1);
3553 exp
= TREE_OPERAND (exp
, 0);
3554 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3555 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3559 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3560 punsignedp
, pvolatilep
, false);
3561 if ((inner
== exp
&& and_mask
== 0)
3562 || *pbitsize
< 0 || offset
!= 0
3563 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3566 /* If the number of bits in the reference is the same as the bitsize of
3567 the outer type, then the outer type gives the signedness. Otherwise
3568 (in case of a small bitfield) the signedness is unchanged. */
3569 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3570 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3572 /* Compute the mask to access the bitfield. */
3573 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3574 precision
= TYPE_PRECISION (unsigned_type
);
3576 mask
= build_int_cst_type (unsigned_type
, -1);
3578 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3579 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3581 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3583 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3584 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3587 *pand_mask
= and_mask
;
3591 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3595 all_ones_mask_p (const_tree mask
, int size
)
3597 tree type
= TREE_TYPE (mask
);
3598 unsigned int precision
= TYPE_PRECISION (type
);
3601 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3604 tree_int_cst_equal (mask
,
3605 const_binop (RSHIFT_EXPR
,
3606 const_binop (LSHIFT_EXPR
, tmask
,
3607 size_int (precision
- size
)),
3608 size_int (precision
- size
)));
3611 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3612 represents the sign bit of EXP's type. If EXP represents a sign
3613 or zero extension, also test VAL against the unextended type.
3614 The return value is the (sub)expression whose sign bit is VAL,
3615 or NULL_TREE otherwise. */
3618 sign_bit_p (tree exp
, const_tree val
)
3620 unsigned HOST_WIDE_INT mask_lo
, lo
;
3621 HOST_WIDE_INT mask_hi
, hi
;
3625 /* Tree EXP must have an integral type. */
3626 t
= TREE_TYPE (exp
);
3627 if (! INTEGRAL_TYPE_P (t
))
3630 /* Tree VAL must be an integer constant. */
3631 if (TREE_CODE (val
) != INTEGER_CST
3632 || TREE_OVERFLOW (val
))
3635 width
= TYPE_PRECISION (t
);
3636 if (width
> HOST_BITS_PER_WIDE_INT
)
3638 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3641 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3642 >> (HOST_BITS_PER_DOUBLE_INT
- width
));
3648 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3651 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3652 >> (HOST_BITS_PER_WIDE_INT
- width
));
3655 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3656 treat VAL as if it were unsigned. */
3657 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3658 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3661 /* Handle extension from a narrower type. */
3662 if (TREE_CODE (exp
) == NOP_EXPR
3663 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3664 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3669 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3670 to be evaluated unconditionally. */
3673 simple_operand_p (const_tree exp
)
3675 /* Strip any conversions that don't change the machine mode. */
3678 return (CONSTANT_CLASS_P (exp
)
3679 || TREE_CODE (exp
) == SSA_NAME
3681 && ! TREE_ADDRESSABLE (exp
)
3682 && ! TREE_THIS_VOLATILE (exp
)
3683 && ! DECL_NONLOCAL (exp
)
3684 /* Don't regard global variables as simple. They may be
3685 allocated in ways unknown to the compiler (shared memory,
3686 #pragma weak, etc). */
3687 && ! TREE_PUBLIC (exp
)
3688 && ! DECL_EXTERNAL (exp
)
3689 /* Loading a static variable is unduly expensive, but global
3690 registers aren't expensive. */
3691 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3694 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3695 to be evaluated unconditionally.
3696 I addition to simple_operand_p, we assume that comparisons, conversions,
3697 and logic-not operations are simple, if their operands are simple, too. */
3700 simple_operand_p_2 (tree exp
)
3702 enum tree_code code
;
3704 if (TREE_SIDE_EFFECTS (exp
)
3705 || tree_could_trap_p (exp
))
3708 while (CONVERT_EXPR_P (exp
))
3709 exp
= TREE_OPERAND (exp
, 0);
3711 code
= TREE_CODE (exp
);
3713 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3714 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3715 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3717 if (code
== TRUTH_NOT_EXPR
)
3718 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3720 return simple_operand_p (exp
);
3724 /* The following functions are subroutines to fold_range_test and allow it to
3725 try to change a logical combination of comparisons into a range test.
3728 X == 2 || X == 3 || X == 4 || X == 5
3732 (unsigned) (X - 2) <= 3
3734 We describe each set of comparisons as being either inside or outside
3735 a range, using a variable named like IN_P, and then describe the
3736 range with a lower and upper bound. If one of the bounds is omitted,
3737 it represents either the highest or lowest value of the type.
3739 In the comments below, we represent a range by two numbers in brackets
3740 preceded by a "+" to designate being inside that range, or a "-" to
3741 designate being outside that range, so the condition can be inverted by
3742 flipping the prefix. An omitted bound is represented by a "-". For
3743 example, "- [-, 10]" means being outside the range starting at the lowest
3744 possible value and ending at 10, in other words, being greater than 10.
3745 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3748 We set up things so that the missing bounds are handled in a consistent
3749 manner so neither a missing bound nor "true" and "false" need to be
3750 handled using a special case. */
3752 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3753 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3754 and UPPER1_P are nonzero if the respective argument is an upper bound
3755 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3756 must be specified for a comparison. ARG1 will be converted to ARG0's
3757 type if both are specified. */
3760 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3761 tree arg1
, int upper1_p
)
3767 /* If neither arg represents infinity, do the normal operation.
3768 Else, if not a comparison, return infinity. Else handle the special
3769 comparison rules. Note that most of the cases below won't occur, but
3770 are handled for consistency. */
3772 if (arg0
!= 0 && arg1
!= 0)
3774 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3775 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3777 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3780 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3783 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3784 for neither. In real maths, we cannot assume open ended ranges are
3785 the same. But, this is computer arithmetic, where numbers are finite.
3786 We can therefore make the transformation of any unbounded range with
3787 the value Z, Z being greater than any representable number. This permits
3788 us to treat unbounded ranges as equal. */
3789 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3790 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3794 result
= sgn0
== sgn1
;
3797 result
= sgn0
!= sgn1
;
3800 result
= sgn0
< sgn1
;
3803 result
= sgn0
<= sgn1
;
3806 result
= sgn0
> sgn1
;
3809 result
= sgn0
>= sgn1
;
3815 return constant_boolean_node (result
, type
);
3818 /* Helper routine for make_range. Perform one step for it, return
3819 new expression if the loop should continue or NULL_TREE if it should
3823 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3824 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3825 bool *strict_overflow_p
)
3827 tree arg0_type
= TREE_TYPE (arg0
);
3828 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3829 int in_p
= *p_in_p
, n_in_p
;
3833 case TRUTH_NOT_EXPR
:
3837 case EQ_EXPR
: case NE_EXPR
:
3838 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3839 /* We can only do something if the range is testing for zero
3840 and if the second operand is an integer constant. Note that
3841 saying something is "in" the range we make is done by
3842 complementing IN_P since it will set in the initial case of
3843 being not equal to zero; "out" is leaving it alone. */
3844 if (low
== NULL_TREE
|| high
== NULL_TREE
3845 || ! integer_zerop (low
) || ! integer_zerop (high
)
3846 || TREE_CODE (arg1
) != INTEGER_CST
)
3851 case NE_EXPR
: /* - [c, c] */
3854 case EQ_EXPR
: /* + [c, c] */
3855 in_p
= ! in_p
, low
= high
= arg1
;
3857 case GT_EXPR
: /* - [-, c] */
3858 low
= 0, high
= arg1
;
3860 case GE_EXPR
: /* + [c, -] */
3861 in_p
= ! in_p
, low
= arg1
, high
= 0;
3863 case LT_EXPR
: /* - [c, -] */
3864 low
= arg1
, high
= 0;
3866 case LE_EXPR
: /* + [-, c] */
3867 in_p
= ! in_p
, low
= 0, high
= arg1
;
3873 /* If this is an unsigned comparison, we also know that EXP is
3874 greater than or equal to zero. We base the range tests we make
3875 on that fact, so we record it here so we can parse existing
3876 range tests. We test arg0_type since often the return type
3877 of, e.g. EQ_EXPR, is boolean. */
3878 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3880 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3882 build_int_cst (arg0_type
, 0),
3886 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3888 /* If the high bound is missing, but we have a nonzero low
3889 bound, reverse the range so it goes from zero to the low bound
3891 if (high
== 0 && low
&& ! integer_zerop (low
))
3894 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3895 integer_one_node
, 0);
3896 low
= build_int_cst (arg0_type
, 0);
3906 /* (-x) IN [a,b] -> x in [-b, -a] */
3907 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3908 build_int_cst (exp_type
, 0),
3910 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3911 build_int_cst (exp_type
, 0),
3913 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3919 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3920 build_int_cst (exp_type
, 1));
3924 if (TREE_CODE (arg1
) != INTEGER_CST
)
3927 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3928 move a constant to the other side. */
3929 if (!TYPE_UNSIGNED (arg0_type
)
3930 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3933 /* If EXP is signed, any overflow in the computation is undefined,
3934 so we don't worry about it so long as our computations on
3935 the bounds don't overflow. For unsigned, overflow is defined
3936 and this is exactly the right thing. */
3937 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3938 arg0_type
, low
, 0, arg1
, 0);
3939 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3940 arg0_type
, high
, 1, arg1
, 0);
3941 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3942 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3945 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3946 *strict_overflow_p
= true;
3949 /* Check for an unsigned range which has wrapped around the maximum
3950 value thus making n_high < n_low, and normalize it. */
3951 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3953 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3954 integer_one_node
, 0);
3955 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3956 integer_one_node
, 0);
3958 /* If the range is of the form +/- [ x+1, x ], we won't
3959 be able to normalize it. But then, it represents the
3960 whole range or the empty set, so make it
3962 if (tree_int_cst_equal (n_low
, low
)
3963 && tree_int_cst_equal (n_high
, high
))
3969 low
= n_low
, high
= n_high
;
3977 case NON_LVALUE_EXPR
:
3978 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3981 if (! INTEGRAL_TYPE_P (arg0_type
)
3982 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3983 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3986 n_low
= low
, n_high
= high
;
3989 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3992 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3994 /* If we're converting arg0 from an unsigned type, to exp,
3995 a signed type, we will be doing the comparison as unsigned.
3996 The tests above have already verified that LOW and HIGH
3999 So we have to ensure that we will handle large unsigned
4000 values the same way that the current signed bounds treat
4003 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4007 /* For fixed-point modes, we need to pass the saturating flag
4008 as the 2nd parameter. */
4009 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4011 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4012 TYPE_SATURATING (arg0_type
));
4015 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4017 /* A range without an upper bound is, naturally, unbounded.
4018 Since convert would have cropped a very large value, use
4019 the max value for the destination type. */
4021 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4022 : TYPE_MAX_VALUE (arg0_type
);
4024 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4025 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4026 fold_convert_loc (loc
, arg0_type
,
4028 build_int_cst (arg0_type
, 1));
4030 /* If the low bound is specified, "and" the range with the
4031 range for which the original unsigned value will be
4035 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4036 1, fold_convert_loc (loc
, arg0_type
,
4041 in_p
= (n_in_p
== in_p
);
4045 /* Otherwise, "or" the range with the range of the input
4046 that will be interpreted as negative. */
4047 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4048 1, fold_convert_loc (loc
, arg0_type
,
4053 in_p
= (in_p
!= n_in_p
);
4067 /* Given EXP, a logical expression, set the range it is testing into
4068 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4069 actually being tested. *PLOW and *PHIGH will be made of the same
4070 type as the returned expression. If EXP is not a comparison, we
4071 will most likely not be returning a useful value and range. Set
4072 *STRICT_OVERFLOW_P to true if the return value is only valid
4073 because signed overflow is undefined; otherwise, do not change
4074 *STRICT_OVERFLOW_P. */
4077 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4078 bool *strict_overflow_p
)
4080 enum tree_code code
;
4081 tree arg0
, arg1
= NULL_TREE
;
4082 tree exp_type
, nexp
;
4085 location_t loc
= EXPR_LOCATION (exp
);
4087 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4088 and see if we can refine the range. Some of the cases below may not
4089 happen, but it doesn't seem worth worrying about this. We "continue"
4090 the outer loop when we've changed something; otherwise we "break"
4091 the switch, which will "break" the while. */
4094 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4098 code
= TREE_CODE (exp
);
4099 exp_type
= TREE_TYPE (exp
);
4102 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4104 if (TREE_OPERAND_LENGTH (exp
) > 0)
4105 arg0
= TREE_OPERAND (exp
, 0);
4106 if (TREE_CODE_CLASS (code
) == tcc_binary
4107 || TREE_CODE_CLASS (code
) == tcc_comparison
4108 || (TREE_CODE_CLASS (code
) == tcc_expression
4109 && TREE_OPERAND_LENGTH (exp
) > 1))
4110 arg1
= TREE_OPERAND (exp
, 1);
4112 if (arg0
== NULL_TREE
)
4115 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4116 &high
, &in_p
, strict_overflow_p
);
4117 if (nexp
== NULL_TREE
)
4122 /* If EXP is a constant, we can evaluate whether this is true or false. */
4123 if (TREE_CODE (exp
) == INTEGER_CST
)
4125 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4127 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4133 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4137 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4138 type, TYPE, return an expression to test if EXP is in (or out of, depending
4139 on IN_P) the range. Return 0 if the test couldn't be created. */
4142 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4143 tree low
, tree high
)
4145 tree etype
= TREE_TYPE (exp
), value
;
4147 #ifdef HAVE_canonicalize_funcptr_for_compare
4148 /* Disable this optimization for function pointer expressions
4149 on targets that require function pointer canonicalization. */
4150 if (HAVE_canonicalize_funcptr_for_compare
4151 && TREE_CODE (etype
) == POINTER_TYPE
4152 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4158 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4160 return invert_truthvalue_loc (loc
, value
);
4165 if (low
== 0 && high
== 0)
4166 return build_int_cst (type
, 1);
4169 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4170 fold_convert_loc (loc
, etype
, high
));
4173 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4174 fold_convert_loc (loc
, etype
, low
));
4176 if (operand_equal_p (low
, high
, 0))
4177 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4178 fold_convert_loc (loc
, etype
, low
));
4180 if (integer_zerop (low
))
4182 if (! TYPE_UNSIGNED (etype
))
4184 etype
= unsigned_type_for (etype
);
4185 high
= fold_convert_loc (loc
, etype
, high
);
4186 exp
= fold_convert_loc (loc
, etype
, exp
);
4188 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4191 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4192 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4194 unsigned HOST_WIDE_INT lo
;
4198 prec
= TYPE_PRECISION (etype
);
4199 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4202 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4206 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4207 lo
= (unsigned HOST_WIDE_INT
) -1;
4210 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4212 if (TYPE_UNSIGNED (etype
))
4214 tree signed_etype
= signed_type_for (etype
);
4215 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4217 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4219 etype
= signed_etype
;
4220 exp
= fold_convert_loc (loc
, etype
, exp
);
4222 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4223 build_int_cst (etype
, 0));
4227 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4228 This requires wrap-around arithmetics for the type of the expression.
4229 First make sure that arithmetics in this type is valid, then make sure
4230 that it wraps around. */
4231 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4232 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4233 TYPE_UNSIGNED (etype
));
4235 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4237 tree utype
, minv
, maxv
;
4239 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4240 for the type in question, as we rely on this here. */
4241 utype
= unsigned_type_for (etype
);
4242 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4243 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4244 integer_one_node
, 1);
4245 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4247 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4254 high
= fold_convert_loc (loc
, etype
, high
);
4255 low
= fold_convert_loc (loc
, etype
, low
);
4256 exp
= fold_convert_loc (loc
, etype
, exp
);
4258 value
= const_binop (MINUS_EXPR
, high
, low
);
4261 if (POINTER_TYPE_P (etype
))
4263 if (value
!= 0 && !TREE_OVERFLOW (value
))
4265 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4266 return build_range_check (loc
, type
,
4267 fold_build_pointer_plus_loc (loc
, exp
, low
),
4268 1, build_int_cst (etype
, 0), value
);
4273 if (value
!= 0 && !TREE_OVERFLOW (value
))
4274 return build_range_check (loc
, type
,
4275 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4276 1, build_int_cst (etype
, 0), value
);
4281 /* Return the predecessor of VAL in its type, handling the infinite case. */
4284 range_predecessor (tree val
)
4286 tree type
= TREE_TYPE (val
);
4288 if (INTEGRAL_TYPE_P (type
)
4289 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4292 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4295 /* Return the successor of VAL in its type, handling the infinite case. */
4298 range_successor (tree val
)
4300 tree type
= TREE_TYPE (val
);
4302 if (INTEGRAL_TYPE_P (type
)
4303 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4306 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4309 /* Given two ranges, see if we can merge them into one. Return 1 if we
4310 can, 0 if we can't. Set the output range into the specified parameters. */
4313 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4314 tree high0
, int in1_p
, tree low1
, tree high1
)
4322 int lowequal
= ((low0
== 0 && low1
== 0)
4323 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4324 low0
, 0, low1
, 0)));
4325 int highequal
= ((high0
== 0 && high1
== 0)
4326 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4327 high0
, 1, high1
, 1)));
4329 /* Make range 0 be the range that starts first, or ends last if they
4330 start at the same value. Swap them if it isn't. */
4331 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4334 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4335 high1
, 1, high0
, 1))))
4337 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4338 tem
= low0
, low0
= low1
, low1
= tem
;
4339 tem
= high0
, high0
= high1
, high1
= tem
;
4342 /* Now flag two cases, whether the ranges are disjoint or whether the
4343 second range is totally subsumed in the first. Note that the tests
4344 below are simplified by the ones above. */
4345 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4346 high0
, 1, low1
, 0));
4347 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4348 high1
, 1, high0
, 1));
4350 /* We now have four cases, depending on whether we are including or
4351 excluding the two ranges. */
4354 /* If they don't overlap, the result is false. If the second range
4355 is a subset it is the result. Otherwise, the range is from the start
4356 of the second to the end of the first. */
4358 in_p
= 0, low
= high
= 0;
4360 in_p
= 1, low
= low1
, high
= high1
;
4362 in_p
= 1, low
= low1
, high
= high0
;
4365 else if (in0_p
&& ! in1_p
)
4367 /* If they don't overlap, the result is the first range. If they are
4368 equal, the result is false. If the second range is a subset of the
4369 first, and the ranges begin at the same place, we go from just after
4370 the end of the second range to the end of the first. If the second
4371 range is not a subset of the first, or if it is a subset and both
4372 ranges end at the same place, the range starts at the start of the
4373 first range and ends just before the second range.
4374 Otherwise, we can't describe this as a single range. */
4376 in_p
= 1, low
= low0
, high
= high0
;
4377 else if (lowequal
&& highequal
)
4378 in_p
= 0, low
= high
= 0;
4379 else if (subset
&& lowequal
)
4381 low
= range_successor (high1
);
4386 /* We are in the weird situation where high0 > high1 but
4387 high1 has no successor. Punt. */
4391 else if (! subset
|| highequal
)
4394 high
= range_predecessor (low1
);
4398 /* low0 < low1 but low1 has no predecessor. Punt. */
4406 else if (! in0_p
&& in1_p
)
4408 /* If they don't overlap, the result is the second range. If the second
4409 is a subset of the first, the result is false. Otherwise,
4410 the range starts just after the first range and ends at the
4411 end of the second. */
4413 in_p
= 1, low
= low1
, high
= high1
;
4414 else if (subset
|| highequal
)
4415 in_p
= 0, low
= high
= 0;
4418 low
= range_successor (high0
);
4423 /* high1 > high0 but high0 has no successor. Punt. */
4431 /* The case where we are excluding both ranges. Here the complex case
4432 is if they don't overlap. In that case, the only time we have a
4433 range is if they are adjacent. If the second is a subset of the
4434 first, the result is the first. Otherwise, the range to exclude
4435 starts at the beginning of the first range and ends at the end of the
4439 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4440 range_successor (high0
),
4442 in_p
= 0, low
= low0
, high
= high1
;
4445 /* Canonicalize - [min, x] into - [-, x]. */
4446 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4447 switch (TREE_CODE (TREE_TYPE (low0
)))
4450 if (TYPE_PRECISION (TREE_TYPE (low0
))
4451 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4455 if (tree_int_cst_equal (low0
,
4456 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4460 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4461 && integer_zerop (low0
))
4468 /* Canonicalize - [x, max] into - [x, -]. */
4469 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4470 switch (TREE_CODE (TREE_TYPE (high1
)))
4473 if (TYPE_PRECISION (TREE_TYPE (high1
))
4474 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4478 if (tree_int_cst_equal (high1
,
4479 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4483 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4484 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4486 integer_one_node
, 1)))
4493 /* The ranges might be also adjacent between the maximum and
4494 minimum values of the given type. For
4495 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4496 return + [x + 1, y - 1]. */
4497 if (low0
== 0 && high1
== 0)
4499 low
= range_successor (high0
);
4500 high
= range_predecessor (low1
);
4501 if (low
== 0 || high
== 0)
4511 in_p
= 0, low
= low0
, high
= high0
;
4513 in_p
= 0, low
= low0
, high
= high1
;
4516 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4521 /* Subroutine of fold, looking inside expressions of the form
4522 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4523 of the COND_EXPR. This function is being used also to optimize
4524 A op B ? C : A, by reversing the comparison first.
4526 Return a folded expression whose code is not a COND_EXPR
4527 anymore, or NULL_TREE if no folding opportunity is found. */
4530 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4531 tree arg0
, tree arg1
, tree arg2
)
4533 enum tree_code comp_code
= TREE_CODE (arg0
);
4534 tree arg00
= TREE_OPERAND (arg0
, 0);
4535 tree arg01
= TREE_OPERAND (arg0
, 1);
4536 tree arg1_type
= TREE_TYPE (arg1
);
4542 /* If we have A op 0 ? A : -A, consider applying the following
4545 A == 0? A : -A same as -A
4546 A != 0? A : -A same as A
4547 A >= 0? A : -A same as abs (A)
4548 A > 0? A : -A same as abs (A)
4549 A <= 0? A : -A same as -abs (A)
4550 A < 0? A : -A same as -abs (A)
4552 None of these transformations work for modes with signed
4553 zeros. If A is +/-0, the first two transformations will
4554 change the sign of the result (from +0 to -0, or vice
4555 versa). The last four will fix the sign of the result,
4556 even though the original expressions could be positive or
4557 negative, depending on the sign of A.
4559 Note that all these transformations are correct if A is
4560 NaN, since the two alternatives (A and -A) are also NaNs. */
4561 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4562 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4563 ? real_zerop (arg01
)
4564 : integer_zerop (arg01
))
4565 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4566 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4567 /* In the case that A is of the form X-Y, '-A' (arg2) may
4568 have already been folded to Y-X, check for that. */
4569 || (TREE_CODE (arg1
) == MINUS_EXPR
4570 && TREE_CODE (arg2
) == MINUS_EXPR
4571 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4572 TREE_OPERAND (arg2
, 1), 0)
4573 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4574 TREE_OPERAND (arg2
, 0), 0))))
4579 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4580 return pedantic_non_lvalue_loc (loc
,
4581 fold_convert_loc (loc
, type
,
4582 negate_expr (tem
)));
4585 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4588 if (flag_trapping_math
)
4593 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4594 arg1
= fold_convert_loc (loc
, signed_type_for
4595 (TREE_TYPE (arg1
)), arg1
);
4596 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4597 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4600 if (flag_trapping_math
)
4604 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4605 arg1
= fold_convert_loc (loc
, signed_type_for
4606 (TREE_TYPE (arg1
)), arg1
);
4607 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4608 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4610 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4614 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4615 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4616 both transformations are correct when A is NaN: A != 0
4617 is then true, and A == 0 is false. */
4619 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4620 && integer_zerop (arg01
) && integer_zerop (arg2
))
4622 if (comp_code
== NE_EXPR
)
4623 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4624 else if (comp_code
== EQ_EXPR
)
4625 return build_int_cst (type
, 0);
4628 /* Try some transformations of A op B ? A : B.
4630 A == B? A : B same as B
4631 A != B? A : B same as A
4632 A >= B? A : B same as max (A, B)
4633 A > B? A : B same as max (B, A)
4634 A <= B? A : B same as min (A, B)
4635 A < B? A : B same as min (B, A)
4637 As above, these transformations don't work in the presence
4638 of signed zeros. For example, if A and B are zeros of
4639 opposite sign, the first two transformations will change
4640 the sign of the result. In the last four, the original
4641 expressions give different results for (A=+0, B=-0) and
4642 (A=-0, B=+0), but the transformed expressions do not.
4644 The first two transformations are correct if either A or B
4645 is a NaN. In the first transformation, the condition will
4646 be false, and B will indeed be chosen. In the case of the
4647 second transformation, the condition A != B will be true,
4648 and A will be chosen.
4650 The conversions to max() and min() are not correct if B is
4651 a number and A is not. The conditions in the original
4652 expressions will be false, so all four give B. The min()
4653 and max() versions would give a NaN instead. */
4654 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4655 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4656 /* Avoid these transformations if the COND_EXPR may be used
4657 as an lvalue in the C++ front-end. PR c++/19199. */
4659 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4660 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4661 || ! maybe_lvalue_p (arg1
)
4662 || ! maybe_lvalue_p (arg2
)))
4664 tree comp_op0
= arg00
;
4665 tree comp_op1
= arg01
;
4666 tree comp_type
= TREE_TYPE (comp_op0
);
4668 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4669 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4679 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4681 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4686 /* In C++ a ?: expression can be an lvalue, so put the
4687 operand which will be used if they are equal first
4688 so that we can convert this back to the
4689 corresponding COND_EXPR. */
4690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4692 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4693 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4694 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4695 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4696 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4697 comp_op1
, comp_op0
);
4698 return pedantic_non_lvalue_loc (loc
,
4699 fold_convert_loc (loc
, type
, tem
));
4706 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4708 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4709 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4710 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4711 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4712 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4713 comp_op1
, comp_op0
);
4714 return pedantic_non_lvalue_loc (loc
,
4715 fold_convert_loc (loc
, type
, tem
));
4719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4720 return pedantic_non_lvalue_loc (loc
,
4721 fold_convert_loc (loc
, type
, arg2
));
4724 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4725 return pedantic_non_lvalue_loc (loc
,
4726 fold_convert_loc (loc
, type
, arg1
));
4729 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4734 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4735 we might still be able to simplify this. For example,
4736 if C1 is one less or one more than C2, this might have started
4737 out as a MIN or MAX and been transformed by this function.
4738 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4740 if (INTEGRAL_TYPE_P (type
)
4741 && TREE_CODE (arg01
) == INTEGER_CST
4742 && TREE_CODE (arg2
) == INTEGER_CST
)
4746 if (TREE_CODE (arg1
) == INTEGER_CST
)
4748 /* We can replace A with C1 in this case. */
4749 arg1
= fold_convert_loc (loc
, type
, arg01
);
4750 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4753 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4754 MIN_EXPR, to preserve the signedness of the comparison. */
4755 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4757 && operand_equal_p (arg01
,
4758 const_binop (PLUS_EXPR
, arg2
,
4759 build_int_cst (type
, 1)),
4762 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4763 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4765 return pedantic_non_lvalue_loc (loc
,
4766 fold_convert_loc (loc
, type
, tem
));
4771 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4773 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4775 && operand_equal_p (arg01
,
4776 const_binop (MINUS_EXPR
, arg2
,
4777 build_int_cst (type
, 1)),
4780 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4781 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4783 return pedantic_non_lvalue_loc (loc
,
4784 fold_convert_loc (loc
, type
, tem
));
4789 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4790 MAX_EXPR, to preserve the signedness of the comparison. */
4791 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4793 && operand_equal_p (arg01
,
4794 const_binop (MINUS_EXPR
, arg2
,
4795 build_int_cst (type
, 1)),
4798 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4799 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4801 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4806 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4807 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4809 && operand_equal_p (arg01
,
4810 const_binop (PLUS_EXPR
, arg2
,
4811 build_int_cst (type
, 1)),
4814 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4815 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4817 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4831 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4832 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4833 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4837 /* EXP is some logical combination of boolean tests. See if we can
4838 merge it into some range test. Return the new tree if so. */
4841 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4844 int or_op
= (code
== TRUTH_ORIF_EXPR
4845 || code
== TRUTH_OR_EXPR
);
4846 int in0_p
, in1_p
, in_p
;
4847 tree low0
, low1
, low
, high0
, high1
, high
;
4848 bool strict_overflow_p
= false;
4849 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4850 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4852 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4853 "when simplifying range test");
4855 /* If this is an OR operation, invert both sides; we will invert
4856 again at the end. */
4858 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4860 /* If both expressions are the same, if we can merge the ranges, and we
4861 can build the range test, return it or it inverted. If one of the
4862 ranges is always true or always false, consider it to be the same
4863 expression as the other. */
4864 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4865 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4867 && 0 != (tem
= (build_range_check (loc
, type
,
4869 : rhs
!= 0 ? rhs
: integer_zero_node
,
4872 if (strict_overflow_p
)
4873 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4874 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4877 /* On machines where the branch cost is expensive, if this is a
4878 short-circuited branch and the underlying object on both sides
4879 is the same, make a non-short-circuit operation. */
4880 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4881 && lhs
!= 0 && rhs
!= 0
4882 && (code
== TRUTH_ANDIF_EXPR
4883 || code
== TRUTH_ORIF_EXPR
)
4884 && operand_equal_p (lhs
, rhs
, 0))
4886 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4887 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4888 which cases we can't do this. */
4889 if (simple_operand_p (lhs
))
4890 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4891 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4894 else if (!lang_hooks
.decls
.global_bindings_p ()
4895 && !CONTAINS_PLACEHOLDER_P (lhs
))
4897 tree common
= save_expr (lhs
);
4899 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4900 or_op
? ! in0_p
: in0_p
,
4902 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4903 or_op
? ! in1_p
: in1_p
,
4906 if (strict_overflow_p
)
4907 fold_overflow_warning (warnmsg
,
4908 WARN_STRICT_OVERFLOW_COMPARISON
);
4909 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4910 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4919 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4920 bit value. Arrange things so the extra bits will be set to zero if and
4921 only if C is signed-extended to its full width. If MASK is nonzero,
4922 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4925 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4927 tree type
= TREE_TYPE (c
);
4928 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4931 if (p
== modesize
|| unsignedp
)
4934 /* We work by getting just the sign bit into the low-order bit, then
4935 into the high-order bit, then sign-extend. We then XOR that value
4937 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4938 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4940 /* We must use a signed type in order to get an arithmetic right shift.
4941 However, we must also avoid introducing accidental overflows, so that
4942 a subsequent call to integer_zerop will work. Hence we must
4943 do the type conversion here. At this point, the constant is either
4944 zero or one, and the conversion to a signed type can never overflow.
4945 We could get an overflow if this conversion is done anywhere else. */
4946 if (TYPE_UNSIGNED (type
))
4947 temp
= fold_convert (signed_type_for (type
), temp
);
4949 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4950 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4952 temp
= const_binop (BIT_AND_EXPR
, temp
,
4953 fold_convert (TREE_TYPE (c
), mask
));
4954 /* If necessary, convert the type back to match the type of C. */
4955 if (TYPE_UNSIGNED (type
))
4956 temp
= fold_convert (type
, temp
);
4958 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4961 /* For an expression that has the form
4965 we can drop one of the inner expressions and simplify to
4969 LOC is the location of the resulting expression. OP is the inner
4970 logical operation; the left-hand side in the examples above, while CMPOP
4971 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4972 removing a condition that guards another, as in
4973 (A != NULL && A->...) || A == NULL
4974 which we must not transform. If RHS_ONLY is true, only eliminate the
4975 right-most operand of the inner logical operation. */
4978 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4981 tree type
= TREE_TYPE (cmpop
);
4982 enum tree_code code
= TREE_CODE (cmpop
);
4983 enum tree_code truthop_code
= TREE_CODE (op
);
4984 tree lhs
= TREE_OPERAND (op
, 0);
4985 tree rhs
= TREE_OPERAND (op
, 1);
4986 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4987 enum tree_code rhs_code
= TREE_CODE (rhs
);
4988 enum tree_code lhs_code
= TREE_CODE (lhs
);
4989 enum tree_code inv_code
;
4991 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4994 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4997 if (rhs_code
== truthop_code
)
4999 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5000 if (newrhs
!= NULL_TREE
)
5003 rhs_code
= TREE_CODE (rhs
);
5006 if (lhs_code
== truthop_code
&& !rhs_only
)
5008 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5009 if (newlhs
!= NULL_TREE
)
5012 lhs_code
= TREE_CODE (lhs
);
5016 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5017 if (inv_code
== rhs_code
5018 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5019 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5021 if (!rhs_only
&& inv_code
== lhs_code
5022 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5023 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5025 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5026 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5031 /* Find ways of folding logical expressions of LHS and RHS:
5032 Try to merge two comparisons to the same innermost item.
5033 Look for range tests like "ch >= '0' && ch <= '9'".
5034 Look for combinations of simple terms on machines with expensive branches
5035 and evaluate the RHS unconditionally.
5037 For example, if we have p->a == 2 && p->b == 4 and we can make an
5038 object large enough to span both A and B, we can do this with a comparison
5039 against the object ANDed with the a mask.
5041 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5042 operations to do this with one comparison.
5044 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5045 function and the one above.
5047 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5048 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5050 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5053 We return the simplified tree or 0 if no optimization is possible. */
5056 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5059 /* If this is the "or" of two comparisons, we can do something if
5060 the comparisons are NE_EXPR. If this is the "and", we can do something
5061 if the comparisons are EQ_EXPR. I.e.,
5062 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5064 WANTED_CODE is this operation code. For single bit fields, we can
5065 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5066 comparison for one-bit fields. */
5068 enum tree_code wanted_code
;
5069 enum tree_code lcode
, rcode
;
5070 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5071 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5072 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5073 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5074 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5075 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5076 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5077 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5078 enum machine_mode lnmode
, rnmode
;
5079 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5080 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5081 tree l_const
, r_const
;
5082 tree lntype
, rntype
, result
;
5083 HOST_WIDE_INT first_bit
, end_bit
;
5086 /* Start by getting the comparison codes. Fail if anything is volatile.
5087 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5088 it were surrounded with a NE_EXPR. */
5090 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5093 lcode
= TREE_CODE (lhs
);
5094 rcode
= TREE_CODE (rhs
);
5096 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5098 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5099 build_int_cst (TREE_TYPE (lhs
), 0));
5103 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5105 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5106 build_int_cst (TREE_TYPE (rhs
), 0));
5110 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5111 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5114 ll_arg
= TREE_OPERAND (lhs
, 0);
5115 lr_arg
= TREE_OPERAND (lhs
, 1);
5116 rl_arg
= TREE_OPERAND (rhs
, 0);
5117 rr_arg
= TREE_OPERAND (rhs
, 1);
5119 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5120 if (simple_operand_p (ll_arg
)
5121 && simple_operand_p (lr_arg
))
5123 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5124 && operand_equal_p (lr_arg
, rr_arg
, 0))
5126 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5127 truth_type
, ll_arg
, lr_arg
);
5131 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5132 && operand_equal_p (lr_arg
, rl_arg
, 0))
5134 result
= combine_comparisons (loc
, code
, lcode
,
5135 swap_tree_comparison (rcode
),
5136 truth_type
, ll_arg
, lr_arg
);
5142 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5143 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5145 /* If the RHS can be evaluated unconditionally and its operands are
5146 simple, it wins to evaluate the RHS unconditionally on machines
5147 with expensive branches. In this case, this isn't a comparison
5148 that can be merged. */
5150 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5152 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5153 && simple_operand_p (rl_arg
)
5154 && simple_operand_p (rr_arg
))
5156 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5157 if (code
== TRUTH_OR_EXPR
5158 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5159 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5160 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5161 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5162 return build2_loc (loc
, NE_EXPR
, truth_type
,
5163 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5165 build_int_cst (TREE_TYPE (ll_arg
), 0));
5167 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5168 if (code
== TRUTH_AND_EXPR
5169 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5170 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5171 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5172 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5173 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5174 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5176 build_int_cst (TREE_TYPE (ll_arg
), 0));
5179 /* See if the comparisons can be merged. Then get all the parameters for
5182 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5183 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5187 ll_inner
= decode_field_reference (loc
, ll_arg
,
5188 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5189 &ll_unsignedp
, &volatilep
, &ll_mask
,
5191 lr_inner
= decode_field_reference (loc
, lr_arg
,
5192 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5193 &lr_unsignedp
, &volatilep
, &lr_mask
,
5195 rl_inner
= decode_field_reference (loc
, rl_arg
,
5196 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5197 &rl_unsignedp
, &volatilep
, &rl_mask
,
5199 rr_inner
= decode_field_reference (loc
, rr_arg
,
5200 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5201 &rr_unsignedp
, &volatilep
, &rr_mask
,
5204 /* It must be true that the inner operation on the lhs of each
5205 comparison must be the same if we are to be able to do anything.
5206 Then see if we have constants. If not, the same must be true for
5208 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5209 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5212 if (TREE_CODE (lr_arg
) == INTEGER_CST
5213 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5214 l_const
= lr_arg
, r_const
= rr_arg
;
5215 else if (lr_inner
== 0 || rr_inner
== 0
5216 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5219 l_const
= r_const
= 0;
5221 /* If either comparison code is not correct for our logical operation,
5222 fail. However, we can convert a one-bit comparison against zero into
5223 the opposite comparison against that bit being set in the field. */
5225 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5226 if (lcode
!= wanted_code
)
5228 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5230 /* Make the left operand unsigned, since we are only interested
5231 in the value of one bit. Otherwise we are doing the wrong
5240 /* This is analogous to the code for l_const above. */
5241 if (rcode
!= wanted_code
)
5243 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5252 /* See if we can find a mode that contains both fields being compared on
5253 the left. If we can't, fail. Otherwise, update all constants and masks
5254 to be relative to a field of that size. */
5255 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5256 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5257 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5258 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5260 if (lnmode
== VOIDmode
)
5263 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5264 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5265 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5266 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5268 if (BYTES_BIG_ENDIAN
)
5270 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5271 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5274 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5275 size_int (xll_bitpos
));
5276 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5277 size_int (xrl_bitpos
));
5281 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5282 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5283 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5284 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5285 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5288 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5290 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5295 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5296 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5297 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5298 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5299 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5302 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5304 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5308 /* If the right sides are not constant, do the same for it. Also,
5309 disallow this optimization if a size or signedness mismatch occurs
5310 between the left and right sides. */
5313 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5314 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5315 /* Make sure the two fields on the right
5316 correspond to the left without being swapped. */
5317 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5320 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5321 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5322 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5323 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5325 if (rnmode
== VOIDmode
)
5328 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5329 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5330 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5331 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5333 if (BYTES_BIG_ENDIAN
)
5335 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5336 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5339 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5341 size_int (xlr_bitpos
));
5342 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5344 size_int (xrr_bitpos
));
5346 /* Make a mask that corresponds to both fields being compared.
5347 Do this for both items being compared. If the operands are the
5348 same size and the bits being compared are in the same position
5349 then we can do this by masking both and comparing the masked
5351 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5352 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5353 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5355 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5356 ll_unsignedp
|| rl_unsignedp
);
5357 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5358 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5360 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5361 lr_unsignedp
|| rr_unsignedp
);
5362 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5363 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5365 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5368 /* There is still another way we can do something: If both pairs of
5369 fields being compared are adjacent, we may be able to make a wider
5370 field containing them both.
5372 Note that we still must mask the lhs/rhs expressions. Furthermore,
5373 the mask must be shifted to account for the shift done by
5374 make_bit_field_ref. */
5375 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5376 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5377 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5378 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5382 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5383 ll_bitsize
+ rl_bitsize
,
5384 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5385 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5386 lr_bitsize
+ rr_bitsize
,
5387 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5389 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5390 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5391 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5392 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5394 /* Convert to the smaller type before masking out unwanted bits. */
5396 if (lntype
!= rntype
)
5398 if (lnbitsize
> rnbitsize
)
5400 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5401 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5404 else if (lnbitsize
< rnbitsize
)
5406 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5407 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5412 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5413 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5415 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5416 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5418 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5424 /* Handle the case of comparisons with constants. If there is something in
5425 common between the masks, those bits of the constants must be the same.
5426 If not, the condition is always false. Test for this to avoid generating
5427 incorrect code below. */
5428 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5429 if (! integer_zerop (result
)
5430 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5431 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5433 if (wanted_code
== NE_EXPR
)
5435 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5436 return constant_boolean_node (true, truth_type
);
5440 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5441 return constant_boolean_node (false, truth_type
);
5445 /* Construct the expression we will return. First get the component
5446 reference we will make. Unless the mask is all ones the width of
5447 that field, perform the mask operation. Then compare with the
5449 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5450 ll_unsignedp
|| rl_unsignedp
);
5452 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5453 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5454 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5456 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5457 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5460 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5464 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5468 enum tree_code op_code
;
5471 int consts_equal
, consts_lt
;
5474 STRIP_SIGN_NOPS (arg0
);
5476 op_code
= TREE_CODE (arg0
);
5477 minmax_const
= TREE_OPERAND (arg0
, 1);
5478 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5479 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5480 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5481 inner
= TREE_OPERAND (arg0
, 0);
5483 /* If something does not permit us to optimize, return the original tree. */
5484 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5485 || TREE_CODE (comp_const
) != INTEGER_CST
5486 || TREE_OVERFLOW (comp_const
)
5487 || TREE_CODE (minmax_const
) != INTEGER_CST
5488 || TREE_OVERFLOW (minmax_const
))
5491 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5492 and GT_EXPR, doing the rest with recursive calls using logical
5496 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5499 = optimize_minmax_comparison (loc
,
5500 invert_tree_comparison (code
, false),
5503 return invert_truthvalue_loc (loc
, tem
);
5509 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5510 optimize_minmax_comparison
5511 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5512 optimize_minmax_comparison
5513 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5516 if (op_code
== MAX_EXPR
&& consts_equal
)
5517 /* MAX (X, 0) == 0 -> X <= 0 */
5518 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5520 else if (op_code
== MAX_EXPR
&& consts_lt
)
5521 /* MAX (X, 0) == 5 -> X == 5 */
5522 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5524 else if (op_code
== MAX_EXPR
)
5525 /* MAX (X, 0) == -1 -> false */
5526 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5528 else if (consts_equal
)
5529 /* MIN (X, 0) == 0 -> X >= 0 */
5530 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5533 /* MIN (X, 0) == 5 -> false */
5534 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5537 /* MIN (X, 0) == -1 -> X == -1 */
5538 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5541 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5542 /* MAX (X, 0) > 0 -> X > 0
5543 MAX (X, 0) > 5 -> X > 5 */
5544 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5546 else if (op_code
== MAX_EXPR
)
5547 /* MAX (X, 0) > -1 -> true */
5548 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5550 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5551 /* MIN (X, 0) > 0 -> false
5552 MIN (X, 0) > 5 -> false */
5553 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5556 /* MIN (X, 0) > -1 -> X > -1 */
5557 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5564 /* T is an integer expression that is being multiplied, divided, or taken a
5565 modulus (CODE says which and what kind of divide or modulus) by a
5566 constant C. See if we can eliminate that operation by folding it with
5567 other operations already in T. WIDE_TYPE, if non-null, is a type that
5568 should be used for the computation if wider than our type.
5570 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5571 (X * 2) + (Y * 4). We must, however, be assured that either the original
5572 expression would not overflow or that overflow is undefined for the type
5573 in the language in question.
5575 If we return a non-null expression, it is an equivalent form of the
5576 original computation, but need not be in the original type.
5578 We set *STRICT_OVERFLOW_P to true if the return values depends on
5579 signed overflow being undefined. Otherwise we do not change
5580 *STRICT_OVERFLOW_P. */
5583 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5584 bool *strict_overflow_p
)
5586 /* To avoid exponential search depth, refuse to allow recursion past
5587 three levels. Beyond that (1) it's highly unlikely that we'll find
5588 something interesting and (2) we've probably processed it before
5589 when we built the inner expression. */
5598 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5605 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5606 bool *strict_overflow_p
)
5608 tree type
= TREE_TYPE (t
);
5609 enum tree_code tcode
= TREE_CODE (t
);
5610 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5611 > GET_MODE_SIZE (TYPE_MODE (type
)))
5612 ? wide_type
: type
);
5614 int same_p
= tcode
== code
;
5615 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5616 bool sub_strict_overflow_p
;
5618 /* Don't deal with constants of zero here; they confuse the code below. */
5619 if (integer_zerop (c
))
5622 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5623 op0
= TREE_OPERAND (t
, 0);
5625 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5626 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5628 /* Note that we need not handle conditional operations here since fold
5629 already handles those cases. So just do arithmetic here. */
5633 /* For a constant, we can always simplify if we are a multiply
5634 or (for divide and modulus) if it is a multiple of our constant. */
5635 if (code
== MULT_EXPR
5636 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5637 return const_binop (code
, fold_convert (ctype
, t
),
5638 fold_convert (ctype
, c
));
5641 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5642 /* If op0 is an expression ... */
5643 if ((COMPARISON_CLASS_P (op0
)
5644 || UNARY_CLASS_P (op0
)
5645 || BINARY_CLASS_P (op0
)
5646 || VL_EXP_CLASS_P (op0
)
5647 || EXPRESSION_CLASS_P (op0
))
5648 /* ... and has wrapping overflow, and its type is smaller
5649 than ctype, then we cannot pass through as widening. */
5650 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5651 && (TYPE_PRECISION (ctype
)
5652 > TYPE_PRECISION (TREE_TYPE (op0
))))
5653 /* ... or this is a truncation (t is narrower than op0),
5654 then we cannot pass through this narrowing. */
5655 || (TYPE_PRECISION (type
)
5656 < TYPE_PRECISION (TREE_TYPE (op0
)))
5657 /* ... or signedness changes for division or modulus,
5658 then we cannot pass through this conversion. */
5659 || (code
!= MULT_EXPR
5660 && (TYPE_UNSIGNED (ctype
)
5661 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5662 /* ... or has undefined overflow while the converted to
5663 type has not, we cannot do the operation in the inner type
5664 as that would introduce undefined overflow. */
5665 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5666 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5669 /* Pass the constant down and see if we can make a simplification. If
5670 we can, replace this expression with the inner simplification for
5671 possible later conversion to our or some other type. */
5672 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5673 && TREE_CODE (t2
) == INTEGER_CST
5674 && !TREE_OVERFLOW (t2
)
5675 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5677 ? ctype
: NULL_TREE
,
5678 strict_overflow_p
))))
5683 /* If widening the type changes it from signed to unsigned, then we
5684 must avoid building ABS_EXPR itself as unsigned. */
5685 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5687 tree cstype
= (*signed_type_for
) (ctype
);
5688 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5691 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5692 return fold_convert (ctype
, t1
);
5696 /* If the constant is negative, we cannot simplify this. */
5697 if (tree_int_cst_sgn (c
) == -1)
5701 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5703 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5706 case MIN_EXPR
: case MAX_EXPR
:
5707 /* If widening the type changes the signedness, then we can't perform
5708 this optimization as that changes the result. */
5709 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5712 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5713 sub_strict_overflow_p
= false;
5714 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5715 &sub_strict_overflow_p
)) != 0
5716 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5717 &sub_strict_overflow_p
)) != 0)
5719 if (tree_int_cst_sgn (c
) < 0)
5720 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5721 if (sub_strict_overflow_p
)
5722 *strict_overflow_p
= true;
5723 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5724 fold_convert (ctype
, t2
));
5728 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5729 /* If the second operand is constant, this is a multiplication
5730 or floor division, by a power of two, so we can treat it that
5731 way unless the multiplier or divisor overflows. Signed
5732 left-shift overflow is implementation-defined rather than
5733 undefined in C90, so do not convert signed left shift into
5735 if (TREE_CODE (op1
) == INTEGER_CST
5736 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5737 /* const_binop may not detect overflow correctly,
5738 so check for it explicitly here. */
5739 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5740 && TREE_INT_CST_HIGH (op1
) == 0
5741 && 0 != (t1
= fold_convert (ctype
,
5742 const_binop (LSHIFT_EXPR
,
5745 && !TREE_OVERFLOW (t1
))
5746 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5747 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5749 fold_convert (ctype
, op0
),
5751 c
, code
, wide_type
, strict_overflow_p
);
5754 case PLUS_EXPR
: case MINUS_EXPR
:
5755 /* See if we can eliminate the operation on both sides. If we can, we
5756 can return a new PLUS or MINUS. If we can't, the only remaining
5757 cases where we can do anything are if the second operand is a
5759 sub_strict_overflow_p
= false;
5760 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5761 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5762 if (t1
!= 0 && t2
!= 0
5763 && (code
== MULT_EXPR
5764 /* If not multiplication, we can only do this if both operands
5765 are divisible by c. */
5766 || (multiple_of_p (ctype
, op0
, c
)
5767 && multiple_of_p (ctype
, op1
, c
))))
5769 if (sub_strict_overflow_p
)
5770 *strict_overflow_p
= true;
5771 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5772 fold_convert (ctype
, t2
));
5775 /* If this was a subtraction, negate OP1 and set it to be an addition.
5776 This simplifies the logic below. */
5777 if (tcode
== MINUS_EXPR
)
5779 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5780 /* If OP1 was not easily negatable, the constant may be OP0. */
5781 if (TREE_CODE (op0
) == INTEGER_CST
)
5792 if (TREE_CODE (op1
) != INTEGER_CST
)
5795 /* If either OP1 or C are negative, this optimization is not safe for
5796 some of the division and remainder types while for others we need
5797 to change the code. */
5798 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5800 if (code
== CEIL_DIV_EXPR
)
5801 code
= FLOOR_DIV_EXPR
;
5802 else if (code
== FLOOR_DIV_EXPR
)
5803 code
= CEIL_DIV_EXPR
;
5804 else if (code
!= MULT_EXPR
5805 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5809 /* If it's a multiply or a division/modulus operation of a multiple
5810 of our constant, do the operation and verify it doesn't overflow. */
5811 if (code
== MULT_EXPR
5812 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5814 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5815 fold_convert (ctype
, c
));
5816 /* We allow the constant to overflow with wrapping semantics. */
5818 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5824 /* If we have an unsigned type is not a sizetype, we cannot widen
5825 the operation since it will change the result if the original
5826 computation overflowed. */
5827 if (TYPE_UNSIGNED (ctype
)
5831 /* If we were able to eliminate our operation from the first side,
5832 apply our operation to the second side and reform the PLUS. */
5833 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5834 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5836 /* The last case is if we are a multiply. In that case, we can
5837 apply the distributive law to commute the multiply and addition
5838 if the multiplication of the constants doesn't overflow. */
5839 if (code
== MULT_EXPR
)
5840 return fold_build2 (tcode
, ctype
,
5841 fold_build2 (code
, ctype
,
5842 fold_convert (ctype
, op0
),
5843 fold_convert (ctype
, c
)),
5849 /* We have a special case here if we are doing something like
5850 (C * 8) % 4 since we know that's zero. */
5851 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5852 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5853 /* If the multiplication can overflow we cannot optimize this. */
5854 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5855 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5856 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5858 *strict_overflow_p
= true;
5859 return omit_one_operand (type
, integer_zero_node
, op0
);
5862 /* ... fall through ... */
5864 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5865 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5866 /* If we can extract our operation from the LHS, do so and return a
5867 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5868 do something only if the second operand is a constant. */
5870 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5871 strict_overflow_p
)) != 0)
5872 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5873 fold_convert (ctype
, op1
));
5874 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5875 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5876 strict_overflow_p
)) != 0)
5877 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5878 fold_convert (ctype
, t1
));
5879 else if (TREE_CODE (op1
) != INTEGER_CST
)
5882 /* If these are the same operation types, we can associate them
5883 assuming no overflow. */
5888 unsigned prec
= TYPE_PRECISION (ctype
);
5889 bool uns
= TYPE_UNSIGNED (ctype
);
5890 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
5891 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
5892 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
5893 overflow_p
= ((!uns
&& overflow_p
)
5894 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5895 if (!double_int_fits_to_tree_p (ctype
, mul
)
5896 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
5899 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5900 double_int_to_tree (ctype
, mul
));
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type, we cannot do this since it will change
5909 the result if the original computation overflowed. */
5910 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5911 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5912 || (tcode
== MULT_EXPR
5913 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5914 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5915 && code
!= MULT_EXPR
)))
5917 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5919 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5920 *strict_overflow_p
= true;
5921 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5922 fold_convert (ctype
,
5923 const_binop (TRUNC_DIV_EXPR
,
5926 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5928 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5929 *strict_overflow_p
= true;
5930 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5931 fold_convert (ctype
,
5932 const_binop (TRUNC_DIV_EXPR
,
5945 /* Return a node which has the indicated constant VALUE (either 0 or
5946 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5947 and is of the indicated TYPE. */
5950 constant_boolean_node (bool value
, tree type
)
5952 if (type
== integer_type_node
)
5953 return value
? integer_one_node
: integer_zero_node
;
5954 else if (type
== boolean_type_node
)
5955 return value
? boolean_true_node
: boolean_false_node
;
5956 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5957 return build_vector_from_val (type
,
5958 build_int_cst (TREE_TYPE (type
),
5961 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5965 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5966 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5967 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5968 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5969 COND is the first argument to CODE; otherwise (as in the example
5970 given here), it is the second argument. TYPE is the type of the
5971 original expression. Return NULL_TREE if no simplification is
5975 fold_binary_op_with_conditional_arg (location_t loc
,
5976 enum tree_code code
,
5977 tree type
, tree op0
, tree op1
,
5978 tree cond
, tree arg
, int cond_first_p
)
5980 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5981 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5982 tree test
, true_value
, false_value
;
5983 tree lhs
= NULL_TREE
;
5984 tree rhs
= NULL_TREE
;
5986 if (TREE_CODE (cond
) == COND_EXPR
)
5988 test
= TREE_OPERAND (cond
, 0);
5989 true_value
= TREE_OPERAND (cond
, 1);
5990 false_value
= TREE_OPERAND (cond
, 2);
5991 /* If this operand throws an expression, then it does not make
5992 sense to try to perform a logical or arithmetic operation
5994 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5996 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6001 tree testtype
= TREE_TYPE (cond
);
6003 true_value
= constant_boolean_node (true, testtype
);
6004 false_value
= constant_boolean_node (false, testtype
);
6007 /* This transformation is only worthwhile if we don't have to wrap ARG
6008 in a SAVE_EXPR and the operation can be simplified on at least one
6009 of the branches once its pushed inside the COND_EXPR. */
6010 if (!TREE_CONSTANT (arg
)
6011 && (TREE_SIDE_EFFECTS (arg
)
6012 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6015 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6018 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6020 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6022 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6026 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6028 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6030 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6033 /* Check that we have simplified at least one of the branches. */
6034 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6037 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6041 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6043 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6044 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6045 ADDEND is the same as X.
6047 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6048 and finite. The problematic cases are when X is zero, and its mode
6049 has signed zeros. In the case of rounding towards -infinity,
6050 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6051 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6054 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6056 if (!real_zerop (addend
))
6059 /* Don't allow the fold with -fsignaling-nans. */
6060 if (HONOR_SNANS (TYPE_MODE (type
)))
6063 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6064 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6067 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6068 if (TREE_CODE (addend
) == REAL_CST
6069 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6072 /* The mode has signed zeros, and we have to honor their sign.
6073 In this situation, there is only one case we can return true for.
6074 X - 0 is the same as X unless rounding towards -infinity is
6076 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6079 /* Subroutine of fold() that checks comparisons of built-in math
6080 functions against real constants.
6082 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6083 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6084 is the type of the result and ARG0 and ARG1 are the operands of the
6085 comparison. ARG1 must be a TREE_REAL_CST.
6087 The function returns the constant folded tree if a simplification
6088 can be made, and NULL_TREE otherwise. */
6091 fold_mathfn_compare (location_t loc
,
6092 enum built_in_function fcode
, enum tree_code code
,
6093 tree type
, tree arg0
, tree arg1
)
6097 if (BUILTIN_SQRT_P (fcode
))
6099 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6100 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6102 c
= TREE_REAL_CST (arg1
);
6103 if (REAL_VALUE_NEGATIVE (c
))
6105 /* sqrt(x) < y is always false, if y is negative. */
6106 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6107 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6109 /* sqrt(x) > y is always true, if y is negative and we
6110 don't care about NaNs, i.e. negative values of x. */
6111 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6112 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6114 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6115 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6116 build_real (TREE_TYPE (arg
), dconst0
));
6118 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6122 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6123 real_convert (&c2
, mode
, &c2
);
6125 if (REAL_VALUE_ISINF (c2
))
6127 /* sqrt(x) > y is x == +Inf, when y is very large. */
6128 if (HONOR_INFINITIES (mode
))
6129 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6130 build_real (TREE_TYPE (arg
), c2
));
6132 /* sqrt(x) > y is always false, when y is very large
6133 and we don't care about infinities. */
6134 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6137 /* sqrt(x) > c is the same as x > c*c. */
6138 return fold_build2_loc (loc
, code
, type
, arg
,
6139 build_real (TREE_TYPE (arg
), c2
));
6141 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6145 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6146 real_convert (&c2
, mode
, &c2
);
6148 if (REAL_VALUE_ISINF (c2
))
6150 /* sqrt(x) < y is always true, when y is a very large
6151 value and we don't care about NaNs or Infinities. */
6152 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6153 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6155 /* sqrt(x) < y is x != +Inf when y is very large and we
6156 don't care about NaNs. */
6157 if (! HONOR_NANS (mode
))
6158 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6159 build_real (TREE_TYPE (arg
), c2
));
6161 /* sqrt(x) < y is x >= 0 when y is very large and we
6162 don't care about Infinities. */
6163 if (! HONOR_INFINITIES (mode
))
6164 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6165 build_real (TREE_TYPE (arg
), dconst0
));
6167 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6168 arg
= save_expr (arg
);
6169 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6170 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6171 build_real (TREE_TYPE (arg
),
6173 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6174 build_real (TREE_TYPE (arg
),
6178 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6179 if (! HONOR_NANS (mode
))
6180 return fold_build2_loc (loc
, code
, type
, arg
,
6181 build_real (TREE_TYPE (arg
), c2
));
6183 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6184 arg
= save_expr (arg
);
6185 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6186 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6187 build_real (TREE_TYPE (arg
),
6189 fold_build2_loc (loc
, code
, type
, arg
,
6190 build_real (TREE_TYPE (arg
),
6198 /* Subroutine of fold() that optimizes comparisons against Infinities,
6199 either +Inf or -Inf.
6201 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6202 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6203 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6205 The function returns the constant folded tree if a simplification
6206 can be made, and NULL_TREE otherwise. */
6209 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6210 tree arg0
, tree arg1
)
6212 enum machine_mode mode
;
6213 REAL_VALUE_TYPE max
;
6217 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6219 /* For negative infinity swap the sense of the comparison. */
6220 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6222 code
= swap_tree_comparison (code
);
6227 /* x > +Inf is always false, if with ignore sNANs. */
6228 if (HONOR_SNANS (mode
))
6230 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6233 /* x <= +Inf is always true, if we don't case about NaNs. */
6234 if (! HONOR_NANS (mode
))
6235 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6237 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6238 arg0
= save_expr (arg0
);
6239 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6243 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6244 real_maxval (&max
, neg
, mode
);
6245 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6246 arg0
, build_real (TREE_TYPE (arg0
), max
));
6249 /* x < +Inf is always equal to x <= DBL_MAX. */
6250 real_maxval (&max
, neg
, mode
);
6251 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6252 arg0
, build_real (TREE_TYPE (arg0
), max
));
6255 /* x != +Inf is always equal to !(x > DBL_MAX). */
6256 real_maxval (&max
, neg
, mode
);
6257 if (! HONOR_NANS (mode
))
6258 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6259 arg0
, build_real (TREE_TYPE (arg0
), max
));
6261 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6262 arg0
, build_real (TREE_TYPE (arg0
), max
));
6263 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6272 /* Subroutine of fold() that optimizes comparisons of a division by
6273 a nonzero integer constant against an integer constant, i.e.
6276 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6277 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6278 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6280 The function returns the constant folded tree if a simplification
6281 can be made, and NULL_TREE otherwise. */
6284 fold_div_compare (location_t loc
,
6285 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6287 tree prod
, tmp
, hi
, lo
;
6288 tree arg00
= TREE_OPERAND (arg0
, 0);
6289 tree arg01
= TREE_OPERAND (arg0
, 1);
6291 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6295 /* We have to do this the hard way to detect unsigned overflow.
6296 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6297 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6298 TREE_INT_CST_HIGH (arg01
),
6299 TREE_INT_CST_LOW (arg1
),
6300 TREE_INT_CST_HIGH (arg1
),
6301 &val
.low
, &val
.high
, unsigned_p
);
6302 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6303 neg_overflow
= false;
6307 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6308 build_int_cst (TREE_TYPE (arg01
), 1));
6311 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6312 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6313 TREE_INT_CST_HIGH (prod
),
6314 TREE_INT_CST_LOW (tmp
),
6315 TREE_INT_CST_HIGH (tmp
),
6316 &val
.low
, &val
.high
, unsigned_p
);
6317 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6318 -1, overflow
| TREE_OVERFLOW (prod
));
6320 else if (tree_int_cst_sgn (arg01
) >= 0)
6322 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6323 build_int_cst (TREE_TYPE (arg01
), 1));
6324 switch (tree_int_cst_sgn (arg1
))
6327 neg_overflow
= true;
6328 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6333 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6338 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6348 /* A negative divisor reverses the relational operators. */
6349 code
= swap_tree_comparison (code
);
6351 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6352 build_int_cst (TREE_TYPE (arg01
), 1));
6353 switch (tree_int_cst_sgn (arg1
))
6356 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6361 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6366 neg_overflow
= true;
6367 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6379 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6380 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6381 if (TREE_OVERFLOW (hi
))
6382 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6383 if (TREE_OVERFLOW (lo
))
6384 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6385 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6388 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6389 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6390 if (TREE_OVERFLOW (hi
))
6391 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6392 if (TREE_OVERFLOW (lo
))
6393 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6394 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6397 if (TREE_OVERFLOW (lo
))
6399 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6400 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6402 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6405 if (TREE_OVERFLOW (hi
))
6407 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6408 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6410 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6413 if (TREE_OVERFLOW (hi
))
6415 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6416 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6418 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6421 if (TREE_OVERFLOW (lo
))
6423 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6424 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6426 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6436 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6437 equality/inequality test, then return a simplified form of the test
6438 using a sign testing. Otherwise return NULL. TYPE is the desired
6442 fold_single_bit_test_into_sign_test (location_t loc
,
6443 enum tree_code code
, tree arg0
, tree arg1
,
6446 /* If this is testing a single bit, we can optimize the test. */
6447 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6448 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6449 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6451 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6452 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6453 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6455 if (arg00
!= NULL_TREE
6456 /* This is only a win if casting to a signed type is cheap,
6457 i.e. when arg00's type is not a partial mode. */
6458 && TYPE_PRECISION (TREE_TYPE (arg00
))
6459 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6461 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6462 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6464 fold_convert_loc (loc
, stype
, arg00
),
6465 build_int_cst (stype
, 0));
6472 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6473 equality/inequality test, then return a simplified form of
6474 the test using shifts and logical operations. Otherwise return
6475 NULL. TYPE is the desired result type. */
6478 fold_single_bit_test (location_t loc
, enum tree_code code
,
6479 tree arg0
, tree arg1
, tree result_type
)
6481 /* If this is testing a single bit, we can optimize the test. */
6482 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6483 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6484 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6486 tree inner
= TREE_OPERAND (arg0
, 0);
6487 tree type
= TREE_TYPE (arg0
);
6488 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6489 enum machine_mode operand_mode
= TYPE_MODE (type
);
6491 tree signed_type
, unsigned_type
, intermediate_type
;
6494 /* First, see if we can fold the single bit test into a sign-bit
6496 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6501 /* Otherwise we have (A & C) != 0 where C is a single bit,
6502 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6503 Similarly for (A & C) == 0. */
6505 /* If INNER is a right shift of a constant and it plus BITNUM does
6506 not overflow, adjust BITNUM and INNER. */
6507 if (TREE_CODE (inner
) == RSHIFT_EXPR
6508 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6509 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6510 && bitnum
< TYPE_PRECISION (type
)
6511 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6512 bitnum
- TYPE_PRECISION (type
)))
6514 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6515 inner
= TREE_OPERAND (inner
, 0);
6518 /* If we are going to be able to omit the AND below, we must do our
6519 operations as unsigned. If we must use the AND, we have a choice.
6520 Normally unsigned is faster, but for some machines signed is. */
6521 #ifdef LOAD_EXTEND_OP
6522 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6523 && !flag_syntax_only
) ? 0 : 1;
6528 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6529 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6530 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6531 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6534 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6535 inner
, size_int (bitnum
));
6537 one
= build_int_cst (intermediate_type
, 1);
6539 if (code
== EQ_EXPR
)
6540 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6542 /* Put the AND last so it can combine with more things. */
6543 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6545 /* Make sure to return the proper type. */
6546 inner
= fold_convert_loc (loc
, result_type
, inner
);
6553 /* Check whether we are allowed to reorder operands arg0 and arg1,
6554 such that the evaluation of arg1 occurs before arg0. */
6557 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6559 if (! flag_evaluation_order
)
6561 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6563 return ! TREE_SIDE_EFFECTS (arg0
)
6564 && ! TREE_SIDE_EFFECTS (arg1
);
6567 /* Test whether it is preferable two swap two operands, ARG0 and
6568 ARG1, for example because ARG0 is an integer constant and ARG1
6569 isn't. If REORDER is true, only recommend swapping if we can
6570 evaluate the operands in reverse order. */
6573 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6575 STRIP_SIGN_NOPS (arg0
);
6576 STRIP_SIGN_NOPS (arg1
);
6578 if (TREE_CODE (arg1
) == INTEGER_CST
)
6580 if (TREE_CODE (arg0
) == INTEGER_CST
)
6583 if (TREE_CODE (arg1
) == REAL_CST
)
6585 if (TREE_CODE (arg0
) == REAL_CST
)
6588 if (TREE_CODE (arg1
) == FIXED_CST
)
6590 if (TREE_CODE (arg0
) == FIXED_CST
)
6593 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6595 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6598 if (TREE_CONSTANT (arg1
))
6600 if (TREE_CONSTANT (arg0
))
6603 if (optimize_function_for_size_p (cfun
))
6606 if (reorder
&& flag_evaluation_order
6607 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6610 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6611 for commutative and comparison operators. Ensuring a canonical
6612 form allows the optimizers to find additional redundancies without
6613 having to explicitly check for both orderings. */
6614 if (TREE_CODE (arg0
) == SSA_NAME
6615 && TREE_CODE (arg1
) == SSA_NAME
6616 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6619 /* Put SSA_NAMEs last. */
6620 if (TREE_CODE (arg1
) == SSA_NAME
)
6622 if (TREE_CODE (arg0
) == SSA_NAME
)
6625 /* Put variables last. */
6634 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6635 ARG0 is extended to a wider type. */
6638 fold_widened_comparison (location_t loc
, enum tree_code code
,
6639 tree type
, tree arg0
, tree arg1
)
6641 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6643 tree shorter_type
, outer_type
;
6647 if (arg0_unw
== arg0
)
6649 shorter_type
= TREE_TYPE (arg0_unw
);
6651 #ifdef HAVE_canonicalize_funcptr_for_compare
6652 /* Disable this optimization if we're casting a function pointer
6653 type on targets that require function pointer canonicalization. */
6654 if (HAVE_canonicalize_funcptr_for_compare
6655 && TREE_CODE (shorter_type
) == POINTER_TYPE
6656 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6660 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6663 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6665 /* If possible, express the comparison in the shorter mode. */
6666 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6667 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6668 && (TREE_TYPE (arg1_unw
) == shorter_type
6669 || ((TYPE_PRECISION (shorter_type
)
6670 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6671 && (TYPE_UNSIGNED (shorter_type
)
6672 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6673 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6674 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6675 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6676 && int_fits_type_p (arg1_unw
, shorter_type
))))
6677 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6678 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6680 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6681 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6682 || !int_fits_type_p (arg1_unw
, shorter_type
))
6685 /* If we are comparing with the integer that does not fit into the range
6686 of the shorter type, the result is known. */
6687 outer_type
= TREE_TYPE (arg1_unw
);
6688 min
= lower_bound_in_type (outer_type
, shorter_type
);
6689 max
= upper_bound_in_type (outer_type
, shorter_type
);
6691 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6693 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6700 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6705 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6711 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6713 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6718 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6720 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6729 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6730 ARG0 just the signedness is changed. */
6733 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6734 tree arg0
, tree arg1
)
6737 tree inner_type
, outer_type
;
6739 if (!CONVERT_EXPR_P (arg0
))
6742 outer_type
= TREE_TYPE (arg0
);
6743 arg0_inner
= TREE_OPERAND (arg0
, 0);
6744 inner_type
= TREE_TYPE (arg0_inner
);
6746 #ifdef HAVE_canonicalize_funcptr_for_compare
6747 /* Disable this optimization if we're casting a function pointer
6748 type on targets that require function pointer canonicalization. */
6749 if (HAVE_canonicalize_funcptr_for_compare
6750 && TREE_CODE (inner_type
) == POINTER_TYPE
6751 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6755 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6758 if (TREE_CODE (arg1
) != INTEGER_CST
6759 && !(CONVERT_EXPR_P (arg1
)
6760 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6763 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6764 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6769 if (TREE_CODE (arg1
) == INTEGER_CST
)
6770 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6771 0, TREE_OVERFLOW (arg1
));
6773 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6775 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6778 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6779 step of the array. Reconstructs s and delta in the case of s *
6780 delta being an integer constant (and thus already folded). ADDR is
6781 the address. MULT is the multiplicative expression. If the
6782 function succeeds, the new address expression is returned.
6783 Otherwise NULL_TREE is returned. LOC is the location of the
6784 resulting expression. */
6787 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6789 tree s
, delta
, step
;
6790 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6795 /* Strip the nops that might be added when converting op1 to sizetype. */
6798 /* Canonicalize op1 into a possibly non-constant delta
6799 and an INTEGER_CST s. */
6800 if (TREE_CODE (op1
) == MULT_EXPR
)
6802 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6807 if (TREE_CODE (arg0
) == INTEGER_CST
)
6812 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6820 else if (TREE_CODE (op1
) == INTEGER_CST
)
6827 /* Simulate we are delta * 1. */
6829 s
= integer_one_node
;
6832 /* Handle &x.array the same as we would handle &x.array[0]. */
6833 if (TREE_CODE (ref
) == COMPONENT_REF
6834 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6838 /* Remember if this was a multi-dimensional array. */
6839 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6842 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6845 itype
= TREE_TYPE (domain
);
6847 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6848 if (TREE_CODE (step
) != INTEGER_CST
)
6853 if (! tree_int_cst_equal (step
, s
))
6858 /* Try if delta is a multiple of step. */
6859 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6865 /* Only fold here if we can verify we do not overflow one
6866 dimension of a multi-dimensional array. */
6871 if (!TYPE_MIN_VALUE (domain
)
6872 || !TYPE_MAX_VALUE (domain
)
6873 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6876 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6877 fold_convert_loc (loc
, itype
,
6878 TYPE_MIN_VALUE (domain
)),
6879 fold_convert_loc (loc
, itype
, delta
));
6880 if (TREE_CODE (tmp
) != INTEGER_CST
6881 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6885 /* We found a suitable component reference. */
6887 pref
= TREE_OPERAND (addr
, 0);
6888 ret
= copy_node (pref
);
6889 SET_EXPR_LOCATION (ret
, loc
);
6891 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6893 (loc
, PLUS_EXPR
, itype
,
6894 fold_convert_loc (loc
, itype
,
6896 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6897 fold_convert_loc (loc
, itype
, delta
)),
6898 NULL_TREE
, NULL_TREE
);
6899 return build_fold_addr_expr_loc (loc
, ret
);
6904 for (;; ref
= TREE_OPERAND (ref
, 0))
6906 if (TREE_CODE (ref
) == ARRAY_REF
)
6910 /* Remember if this was a multi-dimensional array. */
6911 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6914 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6917 itype
= TREE_TYPE (domain
);
6919 step
= array_ref_element_size (ref
);
6920 if (TREE_CODE (step
) != INTEGER_CST
)
6925 if (! tree_int_cst_equal (step
, s
))
6930 /* Try if delta is a multiple of step. */
6931 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6937 /* Only fold here if we can verify we do not overflow one
6938 dimension of a multi-dimensional array. */
6943 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6944 || !TYPE_MAX_VALUE (domain
)
6945 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6948 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6949 fold_convert_loc (loc
, itype
,
6950 TREE_OPERAND (ref
, 1)),
6951 fold_convert_loc (loc
, itype
, delta
));
6953 || TREE_CODE (tmp
) != INTEGER_CST
6954 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6963 if (!handled_component_p (ref
))
6967 /* We found the suitable array reference. So copy everything up to it,
6968 and replace the index. */
6970 pref
= TREE_OPERAND (addr
, 0);
6971 ret
= copy_node (pref
);
6972 SET_EXPR_LOCATION (ret
, loc
);
6977 pref
= TREE_OPERAND (pref
, 0);
6978 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6979 pos
= TREE_OPERAND (pos
, 0);
6982 TREE_OPERAND (pos
, 1)
6983 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6984 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6985 fold_convert_loc (loc
, itype
, delta
));
6986 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6990 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6991 means A >= Y && A != MAX, but in this case we know that
6992 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6995 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6997 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6999 if (TREE_CODE (bound
) == LT_EXPR
)
7000 a
= TREE_OPERAND (bound
, 0);
7001 else if (TREE_CODE (bound
) == GT_EXPR
)
7002 a
= TREE_OPERAND (bound
, 1);
7006 typea
= TREE_TYPE (a
);
7007 if (!INTEGRAL_TYPE_P (typea
)
7008 && !POINTER_TYPE_P (typea
))
7011 if (TREE_CODE (ineq
) == LT_EXPR
)
7013 a1
= TREE_OPERAND (ineq
, 1);
7014 y
= TREE_OPERAND (ineq
, 0);
7016 else if (TREE_CODE (ineq
) == GT_EXPR
)
7018 a1
= TREE_OPERAND (ineq
, 0);
7019 y
= TREE_OPERAND (ineq
, 1);
7024 if (TREE_TYPE (a1
) != typea
)
7027 if (POINTER_TYPE_P (typea
))
7029 /* Convert the pointer types into integer before taking the difference. */
7030 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7031 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7032 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7035 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7037 if (!diff
|| !integer_onep (diff
))
7040 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7043 /* Fold a sum or difference of at least one multiplication.
7044 Returns the folded tree or NULL if no simplification could be made. */
7047 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7048 tree arg0
, tree arg1
)
7050 tree arg00
, arg01
, arg10
, arg11
;
7051 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7053 /* (A * C) +- (B * C) -> (A+-B) * C.
7054 (A * C) +- A -> A * (C+-1).
7055 We are most concerned about the case where C is a constant,
7056 but other combinations show up during loop reduction. Since
7057 it is not difficult, try all four possibilities. */
7059 if (TREE_CODE (arg0
) == MULT_EXPR
)
7061 arg00
= TREE_OPERAND (arg0
, 0);
7062 arg01
= TREE_OPERAND (arg0
, 1);
7064 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7066 arg00
= build_one_cst (type
);
7071 /* We cannot generate constant 1 for fract. */
7072 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7075 arg01
= build_one_cst (type
);
7077 if (TREE_CODE (arg1
) == MULT_EXPR
)
7079 arg10
= TREE_OPERAND (arg1
, 0);
7080 arg11
= TREE_OPERAND (arg1
, 1);
7082 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7084 arg10
= build_one_cst (type
);
7085 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7086 the purpose of this canonicalization. */
7087 if (TREE_INT_CST_HIGH (arg1
) == -1
7088 && negate_expr_p (arg1
)
7089 && code
== PLUS_EXPR
)
7091 arg11
= negate_expr (arg1
);
7099 /* We cannot generate constant 1 for fract. */
7100 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7103 arg11
= build_one_cst (type
);
7107 if (operand_equal_p (arg01
, arg11
, 0))
7108 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7109 else if (operand_equal_p (arg00
, arg10
, 0))
7110 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7111 else if (operand_equal_p (arg00
, arg11
, 0))
7112 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7113 else if (operand_equal_p (arg01
, arg10
, 0))
7114 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7116 /* No identical multiplicands; see if we can find a common
7117 power-of-two factor in non-power-of-two multiplies. This
7118 can help in multi-dimensional array access. */
7119 else if (host_integerp (arg01
, 0)
7120 && host_integerp (arg11
, 0))
7122 HOST_WIDE_INT int01
, int11
, tmp
;
7125 int01
= TREE_INT_CST_LOW (arg01
);
7126 int11
= TREE_INT_CST_LOW (arg11
);
7128 /* Move min of absolute values to int11. */
7129 if (absu_hwi (int01
) < absu_hwi (int11
))
7131 tmp
= int01
, int01
= int11
, int11
= tmp
;
7132 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7139 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7140 /* The remainder should not be a constant, otherwise we
7141 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7142 increased the number of multiplications necessary. */
7143 && TREE_CODE (arg10
) != INTEGER_CST
)
7145 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7146 build_int_cst (TREE_TYPE (arg00
),
7151 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7156 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7157 fold_build2_loc (loc
, code
, type
,
7158 fold_convert_loc (loc
, type
, alt0
),
7159 fold_convert_loc (loc
, type
, alt1
)),
7160 fold_convert_loc (loc
, type
, same
));
7165 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7166 specified by EXPR into the buffer PTR of length LEN bytes.
7167 Return the number of bytes placed in the buffer, or zero
7171 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7173 tree type
= TREE_TYPE (expr
);
7174 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7175 int byte
, offset
, word
, words
;
7176 unsigned char value
;
7178 if (total_bytes
> len
)
7180 words
= total_bytes
/ UNITS_PER_WORD
;
7182 for (byte
= 0; byte
< total_bytes
; byte
++)
7184 int bitpos
= byte
* BITS_PER_UNIT
;
7185 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7186 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7188 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7189 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7191 if (total_bytes
> UNITS_PER_WORD
)
7193 word
= byte
/ UNITS_PER_WORD
;
7194 if (WORDS_BIG_ENDIAN
)
7195 word
= (words
- 1) - word
;
7196 offset
= word
* UNITS_PER_WORD
;
7197 if (BYTES_BIG_ENDIAN
)
7198 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7200 offset
+= byte
% UNITS_PER_WORD
;
7203 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7204 ptr
[offset
] = value
;
7210 /* Subroutine of native_encode_expr. Encode the REAL_CST
7211 specified by EXPR into the buffer PTR of length LEN bytes.
7212 Return the number of bytes placed in the buffer, or zero
7216 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7218 tree type
= TREE_TYPE (expr
);
7219 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7220 int byte
, offset
, word
, words
, bitpos
;
7221 unsigned char value
;
7223 /* There are always 32 bits in each long, no matter the size of
7224 the hosts long. We handle floating point representations with
7228 if (total_bytes
> len
)
7230 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7232 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7234 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7235 bitpos
+= BITS_PER_UNIT
)
7237 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7238 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7240 if (UNITS_PER_WORD
< 4)
7242 word
= byte
/ UNITS_PER_WORD
;
7243 if (WORDS_BIG_ENDIAN
)
7244 word
= (words
- 1) - word
;
7245 offset
= word
* UNITS_PER_WORD
;
7246 if (BYTES_BIG_ENDIAN
)
7247 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7249 offset
+= byte
% UNITS_PER_WORD
;
7252 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7253 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7258 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7259 specified by EXPR into the buffer PTR of length LEN bytes.
7260 Return the number of bytes placed in the buffer, or zero
7264 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7269 part
= TREE_REALPART (expr
);
7270 rsize
= native_encode_expr (part
, ptr
, len
);
7273 part
= TREE_IMAGPART (expr
);
7274 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7277 return rsize
+ isize
;
7281 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7282 specified by EXPR into the buffer PTR of length LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero
7287 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7294 count
= VECTOR_CST_NELTS (expr
);
7295 itype
= TREE_TYPE (TREE_TYPE (expr
));
7296 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7297 for (i
= 0; i
< count
; i
++)
7299 elem
= VECTOR_CST_ELT (expr
, i
);
7300 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7308 /* Subroutine of native_encode_expr. Encode the STRING_CST
7309 specified by EXPR into the buffer PTR of length LEN bytes.
7310 Return the number of bytes placed in the buffer, or zero
7314 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7316 tree type
= TREE_TYPE (expr
);
7317 HOST_WIDE_INT total_bytes
;
7319 if (TREE_CODE (type
) != ARRAY_TYPE
7320 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7321 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7322 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7324 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7325 if (total_bytes
> len
)
7327 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7329 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7330 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7331 total_bytes
- TREE_STRING_LENGTH (expr
));
7334 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7339 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7340 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7341 buffer PTR of length LEN bytes. Return the number of bytes
7342 placed in the buffer, or zero upon failure. */
7345 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7347 switch (TREE_CODE (expr
))
7350 return native_encode_int (expr
, ptr
, len
);
7353 return native_encode_real (expr
, ptr
, len
);
7356 return native_encode_complex (expr
, ptr
, len
);
7359 return native_encode_vector (expr
, ptr
, len
);
7362 return native_encode_string (expr
, ptr
, len
);
7370 /* Subroutine of native_interpret_expr. Interpret the contents of
7371 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7372 If the buffer cannot be interpreted, return NULL_TREE. */
7375 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7377 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7378 int byte
, offset
, word
, words
;
7379 unsigned char value
;
7382 if (total_bytes
> len
)
7384 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7387 result
= double_int_zero
;
7388 words
= total_bytes
/ UNITS_PER_WORD
;
7390 for (byte
= 0; byte
< total_bytes
; byte
++)
7392 int bitpos
= byte
* BITS_PER_UNIT
;
7393 if (total_bytes
> UNITS_PER_WORD
)
7395 word
= byte
/ UNITS_PER_WORD
;
7396 if (WORDS_BIG_ENDIAN
)
7397 word
= (words
- 1) - word
;
7398 offset
= word
* UNITS_PER_WORD
;
7399 if (BYTES_BIG_ENDIAN
)
7400 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7402 offset
+= byte
% UNITS_PER_WORD
;
7405 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7406 value
= ptr
[offset
];
7408 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7409 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7411 result
.high
|= (unsigned HOST_WIDE_INT
) value
7412 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7415 return double_int_to_tree (type
, result
);
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7424 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7426 enum machine_mode mode
= TYPE_MODE (type
);
7427 int total_bytes
= GET_MODE_SIZE (mode
);
7428 int byte
, offset
, word
, words
, bitpos
;
7429 unsigned char value
;
7430 /* There are always 32 bits in each long, no matter the size of
7431 the hosts long. We handle floating point representations with
7436 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7437 if (total_bytes
> len
|| total_bytes
> 24)
7439 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7441 memset (tmp
, 0, sizeof (tmp
));
7442 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7443 bitpos
+= BITS_PER_UNIT
)
7445 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7446 if (UNITS_PER_WORD
< 4)
7448 word
= byte
/ UNITS_PER_WORD
;
7449 if (WORDS_BIG_ENDIAN
)
7450 word
= (words
- 1) - word
;
7451 offset
= word
* UNITS_PER_WORD
;
7452 if (BYTES_BIG_ENDIAN
)
7453 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7455 offset
+= byte
% UNITS_PER_WORD
;
7458 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7459 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7461 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7464 real_from_target (&r
, tmp
, mode
);
7465 return build_real (type
, r
);
7469 /* Subroutine of native_interpret_expr. Interpret the contents of
7470 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7471 If the buffer cannot be interpreted, return NULL_TREE. */
7474 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7476 tree etype
, rpart
, ipart
;
7479 etype
= TREE_TYPE (type
);
7480 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7483 rpart
= native_interpret_expr (etype
, ptr
, size
);
7486 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7489 return build_complex (type
, rpart
, ipart
);
7493 /* Subroutine of native_interpret_expr. Interpret the contents of
7494 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7495 If the buffer cannot be interpreted, return NULL_TREE. */
7498 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7504 etype
= TREE_TYPE (type
);
7505 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7506 count
= TYPE_VECTOR_SUBPARTS (type
);
7507 if (size
* count
> len
)
7510 elements
= XALLOCAVEC (tree
, count
);
7511 for (i
= count
- 1; i
>= 0; i
--)
7513 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7518 return build_vector (type
, elements
);
7522 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7523 the buffer PTR of length LEN as a constant of type TYPE. For
7524 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7525 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7526 return NULL_TREE. */
7529 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7531 switch (TREE_CODE (type
))
7537 case REFERENCE_TYPE
:
7538 return native_interpret_int (type
, ptr
, len
);
7541 return native_interpret_real (type
, ptr
, len
);
7544 return native_interpret_complex (type
, ptr
, len
);
7547 return native_interpret_vector (type
, ptr
, len
);
7554 /* Returns true if we can interpret the contents of a native encoding
7558 can_native_interpret_type_p (tree type
)
7560 switch (TREE_CODE (type
))
7566 case REFERENCE_TYPE
:
7576 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7577 TYPE at compile-time. If we're unable to perform the conversion
7578 return NULL_TREE. */
7581 fold_view_convert_expr (tree type
, tree expr
)
7583 /* We support up to 512-bit values (for V8DFmode). */
7584 unsigned char buffer
[64];
7587 /* Check that the host and target are sane. */
7588 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7591 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7595 return native_interpret_expr (type
, buffer
, len
);
7598 /* Build an expression for the address of T. Folds away INDIRECT_REF
7599 to avoid confusing the gimplify process. */
7602 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7604 /* The size of the object is not relevant when talking about its address. */
7605 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7606 t
= TREE_OPERAND (t
, 0);
7608 if (TREE_CODE (t
) == INDIRECT_REF
)
7610 t
= TREE_OPERAND (t
, 0);
7612 if (TREE_TYPE (t
) != ptrtype
)
7613 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7615 else if (TREE_CODE (t
) == MEM_REF
7616 && integer_zerop (TREE_OPERAND (t
, 1)))
7617 return TREE_OPERAND (t
, 0);
7618 else if (TREE_CODE (t
) == MEM_REF
7619 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7620 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7621 TREE_OPERAND (t
, 0),
7622 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7623 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7625 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7627 if (TREE_TYPE (t
) != ptrtype
)
7628 t
= fold_convert_loc (loc
, ptrtype
, t
);
7631 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7636 /* Build an expression for the address of T. */
7639 build_fold_addr_expr_loc (location_t loc
, tree t
)
7641 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7643 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7646 static bool vec_cst_ctor_to_array (tree
, tree
*);
7648 /* Fold a unary expression of code CODE and type TYPE with operand
7649 OP0. Return the folded expression if folding is successful.
7650 Otherwise, return NULL_TREE. */
7653 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7657 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7659 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7660 && TREE_CODE_LENGTH (code
) == 1);
7665 if (CONVERT_EXPR_CODE_P (code
)
7666 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7668 /* Don't use STRIP_NOPS, because signedness of argument type
7670 STRIP_SIGN_NOPS (arg0
);
7674 /* Strip any conversions that don't change the mode. This
7675 is safe for every expression, except for a comparison
7676 expression because its signedness is derived from its
7679 Note that this is done as an internal manipulation within
7680 the constant folder, in order to find the simplest
7681 representation of the arguments so that their form can be
7682 studied. In any cases, the appropriate type conversions
7683 should be put back in the tree that will get out of the
7689 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7691 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7692 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7693 fold_build1_loc (loc
, code
, type
,
7694 fold_convert_loc (loc
, TREE_TYPE (op0
),
7695 TREE_OPERAND (arg0
, 1))));
7696 else if (TREE_CODE (arg0
) == COND_EXPR
)
7698 tree arg01
= TREE_OPERAND (arg0
, 1);
7699 tree arg02
= TREE_OPERAND (arg0
, 2);
7700 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7701 arg01
= fold_build1_loc (loc
, code
, type
,
7702 fold_convert_loc (loc
,
7703 TREE_TYPE (op0
), arg01
));
7704 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7705 arg02
= fold_build1_loc (loc
, code
, type
,
7706 fold_convert_loc (loc
,
7707 TREE_TYPE (op0
), arg02
));
7708 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7711 /* If this was a conversion, and all we did was to move into
7712 inside the COND_EXPR, bring it back out. But leave it if
7713 it is a conversion from integer to integer and the
7714 result precision is no wider than a word since such a
7715 conversion is cheap and may be optimized away by combine,
7716 while it couldn't if it were outside the COND_EXPR. Then return
7717 so we don't get into an infinite recursion loop taking the
7718 conversion out and then back in. */
7720 if ((CONVERT_EXPR_CODE_P (code
)
7721 || code
== NON_LVALUE_EXPR
)
7722 && TREE_CODE (tem
) == COND_EXPR
7723 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7724 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7725 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7726 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7727 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7728 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7729 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7731 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7732 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7733 || flag_syntax_only
))
7734 tem
= build1_loc (loc
, code
, type
,
7736 TREE_TYPE (TREE_OPERAND
7737 (TREE_OPERAND (tem
, 1), 0)),
7738 TREE_OPERAND (tem
, 0),
7739 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7740 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7749 /* Re-association barriers around constants and other re-association
7750 barriers can be removed. */
7751 if (CONSTANT_CLASS_P (op0
)
7752 || TREE_CODE (op0
) == PAREN_EXPR
)
7753 return fold_convert_loc (loc
, type
, op0
);
7758 case FIX_TRUNC_EXPR
:
7759 if (TREE_TYPE (op0
) == type
)
7762 if (COMPARISON_CLASS_P (op0
))
7764 /* If we have (type) (a CMP b) and type is an integral type, return
7765 new expression involving the new type. Canonicalize
7766 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7768 Do not fold the result as that would not simplify further, also
7769 folding again results in recursions. */
7770 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7771 return build2_loc (loc
, TREE_CODE (op0
), type
,
7772 TREE_OPERAND (op0
, 0),
7773 TREE_OPERAND (op0
, 1));
7774 else if (!INTEGRAL_TYPE_P (type
))
7775 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7776 constant_boolean_node (true, type
),
7777 constant_boolean_node (false, type
));
7780 /* Handle cases of two conversions in a row. */
7781 if (CONVERT_EXPR_P (op0
))
7783 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7784 tree inter_type
= TREE_TYPE (op0
);
7785 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7786 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7787 int inside_float
= FLOAT_TYPE_P (inside_type
);
7788 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7789 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7790 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7791 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7792 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7793 int inter_float
= FLOAT_TYPE_P (inter_type
);
7794 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7795 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7796 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7797 int final_int
= INTEGRAL_TYPE_P (type
);
7798 int final_ptr
= POINTER_TYPE_P (type
);
7799 int final_float
= FLOAT_TYPE_P (type
);
7800 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7801 unsigned int final_prec
= TYPE_PRECISION (type
);
7802 int final_unsignedp
= TYPE_UNSIGNED (type
);
7804 /* In addition to the cases of two conversions in a row
7805 handled below, if we are converting something to its own
7806 type via an object of identical or wider precision, neither
7807 conversion is needed. */
7808 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7809 && (((inter_int
|| inter_ptr
) && final_int
)
7810 || (inter_float
&& final_float
))
7811 && inter_prec
>= final_prec
)
7812 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7814 /* Likewise, if the intermediate and initial types are either both
7815 float or both integer, we don't need the middle conversion if the
7816 former is wider than the latter and doesn't change the signedness
7817 (for integers). Avoid this if the final type is a pointer since
7818 then we sometimes need the middle conversion. Likewise if the
7819 final type has a precision not equal to the size of its mode. */
7820 if (((inter_int
&& inside_int
)
7821 || (inter_float
&& inside_float
)
7822 || (inter_vec
&& inside_vec
))
7823 && inter_prec
>= inside_prec
7824 && (inter_float
|| inter_vec
7825 || inter_unsignedp
== inside_unsignedp
)
7826 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7827 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7829 && (! final_vec
|| inter_prec
== inside_prec
))
7830 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7832 /* If we have a sign-extension of a zero-extended value, we can
7833 replace that by a single zero-extension. Likewise if the
7834 final conversion does not change precision we can drop the
7835 intermediate conversion. */
7836 if (inside_int
&& inter_int
&& final_int
7837 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7838 && inside_unsignedp
&& !inter_unsignedp
)
7839 || final_prec
== inter_prec
))
7840 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7842 /* Two conversions in a row are not needed unless:
7843 - some conversion is floating-point (overstrict for now), or
7844 - some conversion is a vector (overstrict for now), or
7845 - the intermediate type is narrower than both initial and
7847 - the intermediate type and innermost type differ in signedness,
7848 and the outermost type is wider than the intermediate, or
7849 - the initial type is a pointer type and the precisions of the
7850 intermediate and final types differ, or
7851 - the final type is a pointer type and the precisions of the
7852 initial and intermediate types differ. */
7853 if (! inside_float
&& ! inter_float
&& ! final_float
7854 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7855 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7856 && ! (inside_int
&& inter_int
7857 && inter_unsignedp
!= inside_unsignedp
7858 && inter_prec
< final_prec
)
7859 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7860 == (final_unsignedp
&& final_prec
> inter_prec
))
7861 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7862 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7863 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7864 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7865 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7868 /* Handle (T *)&A.B.C for A being of type T and B and C
7869 living at offset zero. This occurs frequently in
7870 C++ upcasting and then accessing the base. */
7871 if (TREE_CODE (op0
) == ADDR_EXPR
7872 && POINTER_TYPE_P (type
)
7873 && handled_component_p (TREE_OPERAND (op0
, 0)))
7875 HOST_WIDE_INT bitsize
, bitpos
;
7877 enum machine_mode mode
;
7878 int unsignedp
, volatilep
;
7879 tree base
= TREE_OPERAND (op0
, 0);
7880 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7881 &mode
, &unsignedp
, &volatilep
, false);
7882 /* If the reference was to a (constant) zero offset, we can use
7883 the address of the base if it has the same base type
7884 as the result type and the pointer type is unqualified. */
7885 if (! offset
&& bitpos
== 0
7886 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7887 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7888 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7889 return fold_convert_loc (loc
, type
,
7890 build_fold_addr_expr_loc (loc
, base
));
7893 if (TREE_CODE (op0
) == MODIFY_EXPR
7894 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7895 /* Detect assigning a bitfield. */
7896 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7898 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7900 /* Don't leave an assignment inside a conversion
7901 unless assigning a bitfield. */
7902 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7903 /* First do the assignment, then return converted constant. */
7904 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7905 TREE_NO_WARNING (tem
) = 1;
7906 TREE_USED (tem
) = 1;
7910 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7911 constants (if x has signed type, the sign bit cannot be set
7912 in c). This folds extension into the BIT_AND_EXPR.
7913 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7914 very likely don't have maximal range for their precision and this
7915 transformation effectively doesn't preserve non-maximal ranges. */
7916 if (TREE_CODE (type
) == INTEGER_TYPE
7917 && TREE_CODE (op0
) == BIT_AND_EXPR
7918 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7920 tree and_expr
= op0
;
7921 tree and0
= TREE_OPERAND (and_expr
, 0);
7922 tree and1
= TREE_OPERAND (and_expr
, 1);
7925 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7926 || (TYPE_PRECISION (type
)
7927 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7929 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7930 <= HOST_BITS_PER_WIDE_INT
7931 && host_integerp (and1
, 1))
7933 unsigned HOST_WIDE_INT cst
;
7935 cst
= tree_low_cst (and1
, 1);
7936 cst
&= (HOST_WIDE_INT
) -1
7937 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7938 change
= (cst
== 0);
7939 #ifdef LOAD_EXTEND_OP
7941 && !flag_syntax_only
7942 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7945 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7946 and0
= fold_convert_loc (loc
, uns
, and0
);
7947 and1
= fold_convert_loc (loc
, uns
, and1
);
7953 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7954 0, TREE_OVERFLOW (and1
));
7955 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7956 fold_convert_loc (loc
, type
, and0
), tem
);
7960 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7961 when one of the new casts will fold away. Conservatively we assume
7962 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7963 if (POINTER_TYPE_P (type
)
7964 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7965 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7966 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7967 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7968 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7970 tree arg00
= TREE_OPERAND (arg0
, 0);
7971 tree arg01
= TREE_OPERAND (arg0
, 1);
7973 return fold_build_pointer_plus_loc
7974 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7977 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7978 of the same precision, and X is an integer type not narrower than
7979 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7980 if (INTEGRAL_TYPE_P (type
)
7981 && TREE_CODE (op0
) == BIT_NOT_EXPR
7982 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7983 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7984 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7986 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7987 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7988 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7989 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7990 fold_convert_loc (loc
, type
, tem
));
7993 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7994 type of X and Y (integer types only). */
7995 if (INTEGRAL_TYPE_P (type
)
7996 && TREE_CODE (op0
) == MULT_EXPR
7997 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7998 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8000 /* Be careful not to introduce new overflows. */
8002 if (TYPE_OVERFLOW_WRAPS (type
))
8005 mult_type
= unsigned_type_for (type
);
8007 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8009 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8010 fold_convert_loc (loc
, mult_type
,
8011 TREE_OPERAND (op0
, 0)),
8012 fold_convert_loc (loc
, mult_type
,
8013 TREE_OPERAND (op0
, 1)));
8014 return fold_convert_loc (loc
, type
, tem
);
8018 tem
= fold_convert_const (code
, type
, op0
);
8019 return tem
? tem
: NULL_TREE
;
8021 case ADDR_SPACE_CONVERT_EXPR
:
8022 if (integer_zerop (arg0
))
8023 return fold_convert_const (code
, type
, arg0
);
8026 case FIXED_CONVERT_EXPR
:
8027 tem
= fold_convert_const (code
, type
, arg0
);
8028 return tem
? tem
: NULL_TREE
;
8030 case VIEW_CONVERT_EXPR
:
8031 if (TREE_TYPE (op0
) == type
)
8033 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8034 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8035 type
, TREE_OPERAND (op0
, 0));
8036 if (TREE_CODE (op0
) == MEM_REF
)
8037 return fold_build2_loc (loc
, MEM_REF
, type
,
8038 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8040 /* For integral conversions with the same precision or pointer
8041 conversions use a NOP_EXPR instead. */
8042 if ((INTEGRAL_TYPE_P (type
)
8043 || POINTER_TYPE_P (type
))
8044 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8045 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8046 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8047 return fold_convert_loc (loc
, type
, op0
);
8049 /* Strip inner integral conversions that do not change the precision. */
8050 if (CONVERT_EXPR_P (op0
)
8051 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8052 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8053 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8054 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8055 && (TYPE_PRECISION (TREE_TYPE (op0
))
8056 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8057 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8058 type
, TREE_OPERAND (op0
, 0));
8060 return fold_view_convert_expr (type
, op0
);
8063 tem
= fold_negate_expr (loc
, arg0
);
8065 return fold_convert_loc (loc
, type
, tem
);
8069 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8070 return fold_abs_const (arg0
, type
);
8071 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8072 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8073 /* Convert fabs((double)float) into (double)fabsf(float). */
8074 else if (TREE_CODE (arg0
) == NOP_EXPR
8075 && TREE_CODE (type
) == REAL_TYPE
)
8077 tree targ0
= strip_float_extensions (arg0
);
8079 return fold_convert_loc (loc
, type
,
8080 fold_build1_loc (loc
, ABS_EXPR
,
8084 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8085 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8087 else if (tree_expr_nonnegative_p (arg0
))
8090 /* Strip sign ops from argument. */
8091 if (TREE_CODE (type
) == REAL_TYPE
)
8093 tem
= fold_strip_sign_ops (arg0
);
8095 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8096 fold_convert_loc (loc
, type
, tem
));
8101 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8102 return fold_convert_loc (loc
, type
, arg0
);
8103 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8105 tree itype
= TREE_TYPE (type
);
8106 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8107 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8108 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8109 negate_expr (ipart
));
8111 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8113 tree itype
= TREE_TYPE (type
);
8114 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8115 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8116 return build_complex (type
, rpart
, negate_expr (ipart
));
8118 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8119 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8123 if (TREE_CODE (arg0
) == INTEGER_CST
)
8124 return fold_not_const (arg0
, type
);
8125 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8126 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8127 /* Convert ~ (-A) to A - 1. */
8128 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8129 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8130 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8131 build_int_cst (type
, 1));
8132 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8133 else if (INTEGRAL_TYPE_P (type
)
8134 && ((TREE_CODE (arg0
) == MINUS_EXPR
8135 && integer_onep (TREE_OPERAND (arg0
, 1)))
8136 || (TREE_CODE (arg0
) == PLUS_EXPR
8137 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8138 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8139 fold_convert_loc (loc
, type
,
8140 TREE_OPERAND (arg0
, 0)));
8141 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8142 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8143 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8144 fold_convert_loc (loc
, type
,
8145 TREE_OPERAND (arg0
, 0)))))
8146 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8147 fold_convert_loc (loc
, type
,
8148 TREE_OPERAND (arg0
, 1)));
8149 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8150 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8151 fold_convert_loc (loc
, type
,
8152 TREE_OPERAND (arg0
, 1)))))
8153 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8154 fold_convert_loc (loc
, type
,
8155 TREE_OPERAND (arg0
, 0)), tem
);
8156 /* Perform BIT_NOT_EXPR on each element individually. */
8157 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8161 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8163 elements
= XALLOCAVEC (tree
, count
);
8164 for (i
= 0; i
< count
; i
++)
8166 elem
= VECTOR_CST_ELT (arg0
, i
);
8167 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8168 if (elem
== NULL_TREE
)
8173 return build_vector (type
, elements
);
8178 case TRUTH_NOT_EXPR
:
8179 /* The argument to invert_truthvalue must have Boolean type. */
8180 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8181 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8183 /* Note that the operand of this must be an int
8184 and its values must be 0 or 1.
8185 ("true" is a fixed value perhaps depending on the language,
8186 but we don't handle values other than 1 correctly yet.) */
8187 tem
= fold_truth_not_expr (loc
, arg0
);
8190 return fold_convert_loc (loc
, type
, tem
);
8193 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8194 return fold_convert_loc (loc
, type
, arg0
);
8195 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8196 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8197 TREE_OPERAND (arg0
, 1));
8198 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8199 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8200 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8202 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8203 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8204 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8205 TREE_OPERAND (arg0
, 0)),
8206 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8207 TREE_OPERAND (arg0
, 1)));
8208 return fold_convert_loc (loc
, type
, tem
);
8210 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8212 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8213 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8214 TREE_OPERAND (arg0
, 0));
8215 return fold_convert_loc (loc
, type
, tem
);
8217 if (TREE_CODE (arg0
) == CALL_EXPR
)
8219 tree fn
= get_callee_fndecl (arg0
);
8220 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8221 switch (DECL_FUNCTION_CODE (fn
))
8223 CASE_FLT_FN (BUILT_IN_CEXPI
):
8224 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8226 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8236 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8237 return build_zero_cst (type
);
8238 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8239 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8240 TREE_OPERAND (arg0
, 0));
8241 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8242 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8243 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8245 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8246 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8247 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8248 TREE_OPERAND (arg0
, 0)),
8249 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8250 TREE_OPERAND (arg0
, 1)));
8251 return fold_convert_loc (loc
, type
, tem
);
8253 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8255 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8256 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8257 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8259 if (TREE_CODE (arg0
) == CALL_EXPR
)
8261 tree fn
= get_callee_fndecl (arg0
);
8262 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8263 switch (DECL_FUNCTION_CODE (fn
))
8265 CASE_FLT_FN (BUILT_IN_CEXPI
):
8266 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8268 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8278 /* Fold *&X to X if X is an lvalue. */
8279 if (TREE_CODE (op0
) == ADDR_EXPR
)
8281 tree op00
= TREE_OPERAND (op0
, 0);
8282 if ((TREE_CODE (op00
) == VAR_DECL
8283 || TREE_CODE (op00
) == PARM_DECL
8284 || TREE_CODE (op00
) == RESULT_DECL
)
8285 && !TREE_READONLY (op00
))
8290 case VEC_UNPACK_LO_EXPR
:
8291 case VEC_UNPACK_HI_EXPR
:
8292 case VEC_UNPACK_FLOAT_LO_EXPR
:
8293 case VEC_UNPACK_FLOAT_HI_EXPR
:
8295 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8297 enum tree_code subcode
;
8299 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8300 if (TREE_CODE (arg0
) != VECTOR_CST
)
8303 elts
= XALLOCAVEC (tree
, nelts
* 2);
8304 if (!vec_cst_ctor_to_array (arg0
, elts
))
8307 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8308 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8311 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8314 subcode
= FLOAT_EXPR
;
8316 for (i
= 0; i
< nelts
; i
++)
8318 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8319 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8323 return build_vector (type
, elts
);
8328 } /* switch (code) */
8332 /* If the operation was a conversion do _not_ mark a resulting constant
8333 with TREE_OVERFLOW if the original constant was not. These conversions
8334 have implementation defined behavior and retaining the TREE_OVERFLOW
8335 flag here would confuse later passes such as VRP. */
8337 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8338 tree type
, tree op0
)
8340 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8342 && TREE_CODE (res
) == INTEGER_CST
8343 && TREE_CODE (op0
) == INTEGER_CST
8344 && CONVERT_EXPR_CODE_P (code
))
8345 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8350 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8351 operands OP0 and OP1. LOC is the location of the resulting expression.
8352 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8353 Return the folded expression if folding is successful. Otherwise,
8354 return NULL_TREE. */
8356 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8357 tree arg0
, tree arg1
, tree op0
, tree op1
)
8361 /* We only do these simplifications if we are optimizing. */
8365 /* Check for things like (A || B) && (A || C). We can convert this
8366 to A || (B && C). Note that either operator can be any of the four
8367 truth and/or operations and the transformation will still be
8368 valid. Also note that we only care about order for the
8369 ANDIF and ORIF operators. If B contains side effects, this
8370 might change the truth-value of A. */
8371 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8372 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8373 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8374 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8375 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8376 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8378 tree a00
= TREE_OPERAND (arg0
, 0);
8379 tree a01
= TREE_OPERAND (arg0
, 1);
8380 tree a10
= TREE_OPERAND (arg1
, 0);
8381 tree a11
= TREE_OPERAND (arg1
, 1);
8382 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8383 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8384 && (code
== TRUTH_AND_EXPR
8385 || code
== TRUTH_OR_EXPR
));
8387 if (operand_equal_p (a00
, a10
, 0))
8388 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8389 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8390 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8391 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8392 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8393 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8394 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8395 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8397 /* This case if tricky because we must either have commutative
8398 operators or else A10 must not have side-effects. */
8400 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8401 && operand_equal_p (a01
, a11
, 0))
8402 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8403 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8407 /* See if we can build a range comparison. */
8408 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8411 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8412 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8414 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8416 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8419 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8420 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8422 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8424 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8427 /* Check for the possibility of merging component references. If our
8428 lhs is another similar operation, try to merge its rhs with our
8429 rhs. Then try to merge our lhs and rhs. */
8430 if (TREE_CODE (arg0
) == code
8431 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8432 TREE_OPERAND (arg0
, 1), arg1
)))
8433 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8435 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8438 if ((BRANCH_COST (optimize_function_for_speed_p (cfun
),
8440 && LOGICAL_OP_NON_SHORT_CIRCUIT
8441 && (code
== TRUTH_AND_EXPR
8442 || code
== TRUTH_ANDIF_EXPR
8443 || code
== TRUTH_OR_EXPR
8444 || code
== TRUTH_ORIF_EXPR
))
8446 enum tree_code ncode
, icode
;
8448 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8449 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8450 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8452 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8453 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8454 We don't want to pack more than two leafs to a non-IF AND/OR
8456 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8457 equal to IF-CODE, then we don't want to add right-hand operand.
8458 If the inner right-hand side of left-hand operand has
8459 side-effects, or isn't simple, then we can't add to it,
8460 as otherwise we might destroy if-sequence. */
8461 if (TREE_CODE (arg0
) == icode
8462 && simple_operand_p_2 (arg1
)
8463 /* Needed for sequence points to handle trappings, and
8465 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8467 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8469 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8472 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8473 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8474 else if (TREE_CODE (arg1
) == icode
8475 && simple_operand_p_2 (arg0
)
8476 /* Needed for sequence points to handle trappings, and
8478 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8480 tem
= fold_build2_loc (loc
, ncode
, type
,
8481 arg0
, TREE_OPERAND (arg1
, 0));
8482 return fold_build2_loc (loc
, icode
, type
, tem
,
8483 TREE_OPERAND (arg1
, 1));
8485 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8487 For sequence point consistancy, we need to check for trapping,
8488 and side-effects. */
8489 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8490 && simple_operand_p_2 (arg1
))
8491 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8497 /* Fold a binary expression of code CODE and type TYPE with operands
8498 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8499 Return the folded expression if folding is successful. Otherwise,
8500 return NULL_TREE. */
8503 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8505 enum tree_code compl_code
;
8507 if (code
== MIN_EXPR
)
8508 compl_code
= MAX_EXPR
;
8509 else if (code
== MAX_EXPR
)
8510 compl_code
= MIN_EXPR
;
8514 /* MIN (MAX (a, b), b) == b. */
8515 if (TREE_CODE (op0
) == compl_code
8516 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8517 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8519 /* MIN (MAX (b, a), b) == b. */
8520 if (TREE_CODE (op0
) == compl_code
8521 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8522 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8523 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8525 /* MIN (a, MAX (a, b)) == a. */
8526 if (TREE_CODE (op1
) == compl_code
8527 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8528 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8529 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8531 /* MIN (a, MAX (b, a)) == a. */
8532 if (TREE_CODE (op1
) == compl_code
8533 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8534 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8535 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8540 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8541 by changing CODE to reduce the magnitude of constants involved in
8542 ARG0 of the comparison.
8543 Returns a canonicalized comparison tree if a simplification was
8544 possible, otherwise returns NULL_TREE.
8545 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8546 valid if signed overflow is undefined. */
8549 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8550 tree arg0
, tree arg1
,
8551 bool *strict_overflow_p
)
8553 enum tree_code code0
= TREE_CODE (arg0
);
8554 tree t
, cst0
= NULL_TREE
;
8558 /* Match A +- CST code arg1 and CST code arg1. We can change the
8559 first form only if overflow is undefined. */
8560 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8561 /* In principle pointers also have undefined overflow behavior,
8562 but that causes problems elsewhere. */
8563 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8564 && (code0
== MINUS_EXPR
8565 || code0
== PLUS_EXPR
)
8566 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8567 || code0
== INTEGER_CST
))
8570 /* Identify the constant in arg0 and its sign. */
8571 if (code0
== INTEGER_CST
)
8574 cst0
= TREE_OPERAND (arg0
, 1);
8575 sgn0
= tree_int_cst_sgn (cst0
);
8577 /* Overflowed constants and zero will cause problems. */
8578 if (integer_zerop (cst0
)
8579 || TREE_OVERFLOW (cst0
))
8582 /* See if we can reduce the magnitude of the constant in
8583 arg0 by changing the comparison code. */
8584 if (code0
== INTEGER_CST
)
8586 /* CST <= arg1 -> CST-1 < arg1. */
8587 if (code
== LE_EXPR
&& sgn0
== 1)
8589 /* -CST < arg1 -> -CST-1 <= arg1. */
8590 else if (code
== LT_EXPR
&& sgn0
== -1)
8592 /* CST > arg1 -> CST-1 >= arg1. */
8593 else if (code
== GT_EXPR
&& sgn0
== 1)
8595 /* -CST >= arg1 -> -CST-1 > arg1. */
8596 else if (code
== GE_EXPR
&& sgn0
== -1)
8600 /* arg1 code' CST' might be more canonical. */
8605 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8607 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8609 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8610 else if (code
== GT_EXPR
8611 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8613 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8614 else if (code
== LE_EXPR
8615 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8617 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8618 else if (code
== GE_EXPR
8619 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8623 *strict_overflow_p
= true;
8626 /* Now build the constant reduced in magnitude. But not if that
8627 would produce one outside of its types range. */
8628 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8630 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8631 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8633 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8634 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8635 /* We cannot swap the comparison here as that would cause us to
8636 endlessly recurse. */
8639 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8640 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8641 if (code0
!= INTEGER_CST
)
8642 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8643 t
= fold_convert (TREE_TYPE (arg1
), t
);
8645 /* If swapping might yield to a more canonical form, do so. */
8647 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8649 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8652 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8653 overflow further. Try to decrease the magnitude of constants involved
8654 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8655 and put sole constants at the second argument position.
8656 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8659 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8660 tree arg0
, tree arg1
)
8663 bool strict_overflow_p
;
8664 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8665 "when reducing constant in comparison");
8667 /* Try canonicalization by simplifying arg0. */
8668 strict_overflow_p
= false;
8669 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8670 &strict_overflow_p
);
8673 if (strict_overflow_p
)
8674 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8678 /* Try canonicalization by simplifying arg1 using the swapped
8680 code
= swap_tree_comparison (code
);
8681 strict_overflow_p
= false;
8682 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8683 &strict_overflow_p
);
8684 if (t
&& strict_overflow_p
)
8685 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8689 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8690 space. This is used to avoid issuing overflow warnings for
8691 expressions like &p->x which can not wrap. */
8694 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8696 unsigned HOST_WIDE_INT offset_low
, total_low
;
8697 HOST_WIDE_INT size
, offset_high
, total_high
;
8699 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8705 if (offset
== NULL_TREE
)
8710 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8714 offset_low
= TREE_INT_CST_LOW (offset
);
8715 offset_high
= TREE_INT_CST_HIGH (offset
);
8718 if (add_double_with_sign (offset_low
, offset_high
,
8719 bitpos
/ BITS_PER_UNIT
, 0,
8720 &total_low
, &total_high
,
8724 if (total_high
!= 0)
8727 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8731 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8733 if (TREE_CODE (base
) == ADDR_EXPR
)
8735 HOST_WIDE_INT base_size
;
8737 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8738 if (base_size
> 0 && size
< base_size
)
8742 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8745 /* Subroutine of fold_binary. This routine performs all of the
8746 transformations that are common to the equality/inequality
8747 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8748 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8749 fold_binary should call fold_binary. Fold a comparison with
8750 tree code CODE and type TYPE with operands OP0 and OP1. Return
8751 the folded comparison or NULL_TREE. */
8754 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8757 tree arg0
, arg1
, tem
;
8762 STRIP_SIGN_NOPS (arg0
);
8763 STRIP_SIGN_NOPS (arg1
);
8765 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8766 if (tem
!= NULL_TREE
)
8769 /* If one arg is a real or integer constant, put it last. */
8770 if (tree_swap_operands_p (arg0
, arg1
, true))
8771 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8773 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8774 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8775 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8776 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8777 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8778 && (TREE_CODE (arg1
) == INTEGER_CST
8779 && !TREE_OVERFLOW (arg1
)))
8781 tree const1
= TREE_OPERAND (arg0
, 1);
8783 tree variable
= TREE_OPERAND (arg0
, 0);
8786 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8788 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8789 TREE_TYPE (arg1
), const2
, const1
);
8791 /* If the constant operation overflowed this can be
8792 simplified as a comparison against INT_MAX/INT_MIN. */
8793 if (TREE_CODE (lhs
) == INTEGER_CST
8794 && TREE_OVERFLOW (lhs
))
8796 int const1_sgn
= tree_int_cst_sgn (const1
);
8797 enum tree_code code2
= code
;
8799 /* Get the sign of the constant on the lhs if the
8800 operation were VARIABLE + CONST1. */
8801 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8802 const1_sgn
= -const1_sgn
;
8804 /* The sign of the constant determines if we overflowed
8805 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8806 Canonicalize to the INT_MIN overflow by swapping the comparison
8808 if (const1_sgn
== -1)
8809 code2
= swap_tree_comparison (code
);
8811 /* We now can look at the canonicalized case
8812 VARIABLE + 1 CODE2 INT_MIN
8813 and decide on the result. */
8814 if (code2
== LT_EXPR
8816 || code2
== EQ_EXPR
)
8817 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8818 else if (code2
== NE_EXPR
8820 || code2
== GT_EXPR
)
8821 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8824 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8825 && (TREE_CODE (lhs
) != INTEGER_CST
8826 || !TREE_OVERFLOW (lhs
)))
8828 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8829 fold_overflow_warning ("assuming signed overflow does not occur "
8830 "when changing X +- C1 cmp C2 to "
8832 WARN_STRICT_OVERFLOW_COMPARISON
);
8833 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8837 /* For comparisons of pointers we can decompose it to a compile time
8838 comparison of the base objects and the offsets into the object.
8839 This requires at least one operand being an ADDR_EXPR or a
8840 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8841 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8842 && (TREE_CODE (arg0
) == ADDR_EXPR
8843 || TREE_CODE (arg1
) == ADDR_EXPR
8844 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8845 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8847 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8848 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8849 enum machine_mode mode
;
8850 int volatilep
, unsignedp
;
8851 bool indirect_base0
= false, indirect_base1
= false;
8853 /* Get base and offset for the access. Strip ADDR_EXPR for
8854 get_inner_reference, but put it back by stripping INDIRECT_REF
8855 off the base object if possible. indirect_baseN will be true
8856 if baseN is not an address but refers to the object itself. */
8858 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8860 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8861 &bitsize
, &bitpos0
, &offset0
, &mode
,
8862 &unsignedp
, &volatilep
, false);
8863 if (TREE_CODE (base0
) == INDIRECT_REF
)
8864 base0
= TREE_OPERAND (base0
, 0);
8866 indirect_base0
= true;
8868 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8870 base0
= TREE_OPERAND (arg0
, 0);
8871 STRIP_SIGN_NOPS (base0
);
8872 if (TREE_CODE (base0
) == ADDR_EXPR
)
8874 base0
= TREE_OPERAND (base0
, 0);
8875 indirect_base0
= true;
8877 offset0
= TREE_OPERAND (arg0
, 1);
8878 if (host_integerp (offset0
, 0))
8880 HOST_WIDE_INT off
= size_low_cst (offset0
);
8881 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8883 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8885 bitpos0
= off
* BITS_PER_UNIT
;
8886 offset0
= NULL_TREE
;
8892 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8894 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8895 &bitsize
, &bitpos1
, &offset1
, &mode
,
8896 &unsignedp
, &volatilep
, false);
8897 if (TREE_CODE (base1
) == INDIRECT_REF
)
8898 base1
= TREE_OPERAND (base1
, 0);
8900 indirect_base1
= true;
8902 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8904 base1
= TREE_OPERAND (arg1
, 0);
8905 STRIP_SIGN_NOPS (base1
);
8906 if (TREE_CODE (base1
) == ADDR_EXPR
)
8908 base1
= TREE_OPERAND (base1
, 0);
8909 indirect_base1
= true;
8911 offset1
= TREE_OPERAND (arg1
, 1);
8912 if (host_integerp (offset1
, 0))
8914 HOST_WIDE_INT off
= size_low_cst (offset1
);
8915 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8917 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8919 bitpos1
= off
* BITS_PER_UNIT
;
8920 offset1
= NULL_TREE
;
8925 /* A local variable can never be pointed to by
8926 the default SSA name of an incoming parameter. */
8927 if ((TREE_CODE (arg0
) == ADDR_EXPR
8929 && TREE_CODE (base0
) == VAR_DECL
8930 && auto_var_in_fn_p (base0
, current_function_decl
)
8932 && TREE_CODE (base1
) == SSA_NAME
8933 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8934 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8935 || (TREE_CODE (arg1
) == ADDR_EXPR
8937 && TREE_CODE (base1
) == VAR_DECL
8938 && auto_var_in_fn_p (base1
, current_function_decl
)
8940 && TREE_CODE (base0
) == SSA_NAME
8941 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8942 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8944 if (code
== NE_EXPR
)
8945 return constant_boolean_node (1, type
);
8946 else if (code
== EQ_EXPR
)
8947 return constant_boolean_node (0, type
);
8949 /* If we have equivalent bases we might be able to simplify. */
8950 else if (indirect_base0
== indirect_base1
8951 && operand_equal_p (base0
, base1
, 0))
8953 /* We can fold this expression to a constant if the non-constant
8954 offset parts are equal. */
8955 if ((offset0
== offset1
8956 || (offset0
&& offset1
8957 && operand_equal_p (offset0
, offset1
, 0)))
8960 || (indirect_base0
&& DECL_P (base0
))
8961 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8966 && bitpos0
!= bitpos1
8967 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8968 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8969 fold_overflow_warning (("assuming pointer wraparound does not "
8970 "occur when comparing P +- C1 with "
8972 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8977 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8979 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8981 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8983 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8985 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8987 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8991 /* We can simplify the comparison to a comparison of the variable
8992 offset parts if the constant offset parts are equal.
8993 Be careful to use signed size type here because otherwise we
8994 mess with array offsets in the wrong way. This is possible
8995 because pointer arithmetic is restricted to retain within an
8996 object and overflow on pointer differences is undefined as of
8997 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8998 else if (bitpos0
== bitpos1
8999 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9000 || (indirect_base0
&& DECL_P (base0
))
9001 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9003 /* By converting to signed size type we cover middle-end pointer
9004 arithmetic which operates on unsigned pointer types of size
9005 type size and ARRAY_REF offsets which are properly sign or
9006 zero extended from their type in case it is narrower than
9008 if (offset0
== NULL_TREE
)
9009 offset0
= build_int_cst (ssizetype
, 0);
9011 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9012 if (offset1
== NULL_TREE
)
9013 offset1
= build_int_cst (ssizetype
, 0);
9015 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9019 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9020 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9021 fold_overflow_warning (("assuming pointer wraparound does not "
9022 "occur when comparing P +- C1 with "
9024 WARN_STRICT_OVERFLOW_COMPARISON
);
9026 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9029 /* For non-equal bases we can simplify if they are addresses
9030 of local binding decls or constants. */
9031 else if (indirect_base0
&& indirect_base1
9032 /* We know that !operand_equal_p (base0, base1, 0)
9033 because the if condition was false. But make
9034 sure two decls are not the same. */
9036 && TREE_CODE (arg0
) == ADDR_EXPR
9037 && TREE_CODE (arg1
) == ADDR_EXPR
9038 && (((TREE_CODE (base0
) == VAR_DECL
9039 || TREE_CODE (base0
) == PARM_DECL
)
9040 && (targetm
.binds_local_p (base0
)
9041 || CONSTANT_CLASS_P (base1
)))
9042 || CONSTANT_CLASS_P (base0
))
9043 && (((TREE_CODE (base1
) == VAR_DECL
9044 || TREE_CODE (base1
) == PARM_DECL
)
9045 && (targetm
.binds_local_p (base1
)
9046 || CONSTANT_CLASS_P (base0
)))
9047 || CONSTANT_CLASS_P (base1
)))
9049 if (code
== EQ_EXPR
)
9050 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9052 else if (code
== NE_EXPR
)
9053 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9056 /* For equal offsets we can simplify to a comparison of the
9058 else if (bitpos0
== bitpos1
9060 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9062 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9063 && ((offset0
== offset1
)
9064 || (offset0
&& offset1
9065 && operand_equal_p (offset0
, offset1
, 0))))
9068 base0
= build_fold_addr_expr_loc (loc
, base0
);
9070 base1
= build_fold_addr_expr_loc (loc
, base1
);
9071 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9075 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9076 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9077 the resulting offset is smaller in absolute value than the
9079 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9080 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9081 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9082 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9083 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9084 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9085 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9087 tree const1
= TREE_OPERAND (arg0
, 1);
9088 tree const2
= TREE_OPERAND (arg1
, 1);
9089 tree variable1
= TREE_OPERAND (arg0
, 0);
9090 tree variable2
= TREE_OPERAND (arg1
, 0);
9092 const char * const warnmsg
= G_("assuming signed overflow does not "
9093 "occur when combining constants around "
9096 /* Put the constant on the side where it doesn't overflow and is
9097 of lower absolute value than before. */
9098 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9099 ? MINUS_EXPR
: PLUS_EXPR
,
9101 if (!TREE_OVERFLOW (cst
)
9102 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9104 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9105 return fold_build2_loc (loc
, code
, type
,
9107 fold_build2_loc (loc
,
9108 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9112 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9113 ? MINUS_EXPR
: PLUS_EXPR
,
9115 if (!TREE_OVERFLOW (cst
)
9116 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9118 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9119 return fold_build2_loc (loc
, code
, type
,
9120 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9126 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9127 signed arithmetic case. That form is created by the compiler
9128 often enough for folding it to be of value. One example is in
9129 computing loop trip counts after Operator Strength Reduction. */
9130 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9131 && TREE_CODE (arg0
) == MULT_EXPR
9132 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9133 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9134 && integer_zerop (arg1
))
9136 tree const1
= TREE_OPERAND (arg0
, 1);
9137 tree const2
= arg1
; /* zero */
9138 tree variable1
= TREE_OPERAND (arg0
, 0);
9139 enum tree_code cmp_code
= code
;
9141 /* Handle unfolded multiplication by zero. */
9142 if (integer_zerop (const1
))
9143 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9145 fold_overflow_warning (("assuming signed overflow does not occur when "
9146 "eliminating multiplication in comparison "
9148 WARN_STRICT_OVERFLOW_COMPARISON
);
9150 /* If const1 is negative we swap the sense of the comparison. */
9151 if (tree_int_cst_sgn (const1
) < 0)
9152 cmp_code
= swap_tree_comparison (cmp_code
);
9154 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9157 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9161 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9163 tree targ0
= strip_float_extensions (arg0
);
9164 tree targ1
= strip_float_extensions (arg1
);
9165 tree newtype
= TREE_TYPE (targ0
);
9167 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9168 newtype
= TREE_TYPE (targ1
);
9170 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9171 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9172 return fold_build2_loc (loc
, code
, type
,
9173 fold_convert_loc (loc
, newtype
, targ0
),
9174 fold_convert_loc (loc
, newtype
, targ1
));
9176 /* (-a) CMP (-b) -> b CMP a */
9177 if (TREE_CODE (arg0
) == NEGATE_EXPR
9178 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9179 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9180 TREE_OPERAND (arg0
, 0));
9182 if (TREE_CODE (arg1
) == REAL_CST
)
9184 REAL_VALUE_TYPE cst
;
9185 cst
= TREE_REAL_CST (arg1
);
9187 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9188 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9189 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9190 TREE_OPERAND (arg0
, 0),
9191 build_real (TREE_TYPE (arg1
),
9192 real_value_negate (&cst
)));
9194 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9195 /* a CMP (-0) -> a CMP 0 */
9196 if (REAL_VALUE_MINUS_ZERO (cst
))
9197 return fold_build2_loc (loc
, code
, type
, arg0
,
9198 build_real (TREE_TYPE (arg1
), dconst0
));
9200 /* x != NaN is always true, other ops are always false. */
9201 if (REAL_VALUE_ISNAN (cst
)
9202 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9204 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9205 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9208 /* Fold comparisons against infinity. */
9209 if (REAL_VALUE_ISINF (cst
)
9210 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9212 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9213 if (tem
!= NULL_TREE
)
9218 /* If this is a comparison of a real constant with a PLUS_EXPR
9219 or a MINUS_EXPR of a real constant, we can convert it into a
9220 comparison with a revised real constant as long as no overflow
9221 occurs when unsafe_math_optimizations are enabled. */
9222 if (flag_unsafe_math_optimizations
9223 && TREE_CODE (arg1
) == REAL_CST
9224 && (TREE_CODE (arg0
) == PLUS_EXPR
9225 || TREE_CODE (arg0
) == MINUS_EXPR
)
9226 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9227 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9228 ? MINUS_EXPR
: PLUS_EXPR
,
9229 arg1
, TREE_OPERAND (arg0
, 1)))
9230 && !TREE_OVERFLOW (tem
))
9231 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9233 /* Likewise, we can simplify a comparison of a real constant with
9234 a MINUS_EXPR whose first operand is also a real constant, i.e.
9235 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9236 floating-point types only if -fassociative-math is set. */
9237 if (flag_associative_math
9238 && TREE_CODE (arg1
) == REAL_CST
9239 && TREE_CODE (arg0
) == MINUS_EXPR
9240 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9241 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9243 && !TREE_OVERFLOW (tem
))
9244 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9245 TREE_OPERAND (arg0
, 1), tem
);
9247 /* Fold comparisons against built-in math functions. */
9248 if (TREE_CODE (arg1
) == REAL_CST
9249 && flag_unsafe_math_optimizations
9250 && ! flag_errno_math
)
9252 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9254 if (fcode
!= END_BUILTINS
)
9256 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9257 if (tem
!= NULL_TREE
)
9263 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9264 && CONVERT_EXPR_P (arg0
))
9266 /* If we are widening one operand of an integer comparison,
9267 see if the other operand is similarly being widened. Perhaps we
9268 can do the comparison in the narrower type. */
9269 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9273 /* Or if we are changing signedness. */
9274 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9279 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9280 constant, we can simplify it. */
9281 if (TREE_CODE (arg1
) == INTEGER_CST
9282 && (TREE_CODE (arg0
) == MIN_EXPR
9283 || TREE_CODE (arg0
) == MAX_EXPR
)
9284 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9286 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9291 /* Simplify comparison of something with itself. (For IEEE
9292 floating-point, we can only do some of these simplifications.) */
9293 if (operand_equal_p (arg0
, arg1
, 0))
9298 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9299 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9300 return constant_boolean_node (1, type
);
9305 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9306 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9307 return constant_boolean_node (1, type
);
9308 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9311 /* For NE, we can only do this simplification if integer
9312 or we don't honor IEEE floating point NaNs. */
9313 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9314 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9316 /* ... fall through ... */
9319 return constant_boolean_node (0, type
);
9325 /* If we are comparing an expression that just has comparisons
9326 of two integer values, arithmetic expressions of those comparisons,
9327 and constants, we can simplify it. There are only three cases
9328 to check: the two values can either be equal, the first can be
9329 greater, or the second can be greater. Fold the expression for
9330 those three values. Since each value must be 0 or 1, we have
9331 eight possibilities, each of which corresponds to the constant 0
9332 or 1 or one of the six possible comparisons.
9334 This handles common cases like (a > b) == 0 but also handles
9335 expressions like ((x > y) - (y > x)) > 0, which supposedly
9336 occur in macroized code. */
9338 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9340 tree cval1
= 0, cval2
= 0;
9343 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9344 /* Don't handle degenerate cases here; they should already
9345 have been handled anyway. */
9346 && cval1
!= 0 && cval2
!= 0
9347 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9348 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9349 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9350 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9351 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9352 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9353 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9355 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9356 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9358 /* We can't just pass T to eval_subst in case cval1 or cval2
9359 was the same as ARG1. */
9362 = fold_build2_loc (loc
, code
, type
,
9363 eval_subst (loc
, arg0
, cval1
, maxval
,
9367 = fold_build2_loc (loc
, code
, type
,
9368 eval_subst (loc
, arg0
, cval1
, maxval
,
9372 = fold_build2_loc (loc
, code
, type
,
9373 eval_subst (loc
, arg0
, cval1
, minval
,
9377 /* All three of these results should be 0 or 1. Confirm they are.
9378 Then use those values to select the proper code to use. */
9380 if (TREE_CODE (high_result
) == INTEGER_CST
9381 && TREE_CODE (equal_result
) == INTEGER_CST
9382 && TREE_CODE (low_result
) == INTEGER_CST
)
9384 /* Make a 3-bit mask with the high-order bit being the
9385 value for `>', the next for '=', and the low for '<'. */
9386 switch ((integer_onep (high_result
) * 4)
9387 + (integer_onep (equal_result
) * 2)
9388 + integer_onep (low_result
))
9392 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9413 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9418 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9419 SET_EXPR_LOCATION (tem
, loc
);
9422 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9427 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9428 into a single range test. */
9429 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9430 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9431 && TREE_CODE (arg1
) == INTEGER_CST
9432 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9433 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9434 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9435 && !TREE_OVERFLOW (arg1
))
9437 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9438 if (tem
!= NULL_TREE
)
9442 /* Fold ~X op ~Y as Y op X. */
9443 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9444 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9446 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9447 return fold_build2_loc (loc
, code
, type
,
9448 fold_convert_loc (loc
, cmp_type
,
9449 TREE_OPERAND (arg1
, 0)),
9450 TREE_OPERAND (arg0
, 0));
9453 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9454 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9455 && TREE_CODE (arg1
) == INTEGER_CST
)
9457 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9458 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9459 TREE_OPERAND (arg0
, 0),
9460 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9461 fold_convert_loc (loc
, cmp_type
, arg1
)));
9468 /* Subroutine of fold_binary. Optimize complex multiplications of the
9469 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9470 argument EXPR represents the expression "z" of type TYPE. */
9473 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9475 tree itype
= TREE_TYPE (type
);
9476 tree rpart
, ipart
, tem
;
9478 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9480 rpart
= TREE_OPERAND (expr
, 0);
9481 ipart
= TREE_OPERAND (expr
, 1);
9483 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9485 rpart
= TREE_REALPART (expr
);
9486 ipart
= TREE_IMAGPART (expr
);
9490 expr
= save_expr (expr
);
9491 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9492 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9495 rpart
= save_expr (rpart
);
9496 ipart
= save_expr (ipart
);
9497 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9498 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9499 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9500 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9501 build_zero_cst (itype
));
9505 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9506 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9507 guarantees that P and N have the same least significant log2(M) bits.
9508 N is not otherwise constrained. In particular, N is not normalized to
9509 0 <= N < M as is common. In general, the precise value of P is unknown.
9510 M is chosen as large as possible such that constant N can be determined.
9512 Returns M and sets *RESIDUE to N.
9514 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9515 account. This is not always possible due to PR 35705.
9518 static unsigned HOST_WIDE_INT
9519 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9520 bool allow_func_align
)
9522 enum tree_code code
;
9526 code
= TREE_CODE (expr
);
9527 if (code
== ADDR_EXPR
)
9529 unsigned int bitalign
;
9530 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9531 *residue
/= BITS_PER_UNIT
;
9532 return bitalign
/ BITS_PER_UNIT
;
9534 else if (code
== POINTER_PLUS_EXPR
)
9537 unsigned HOST_WIDE_INT modulus
;
9538 enum tree_code inner_code
;
9540 op0
= TREE_OPERAND (expr
, 0);
9542 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9545 op1
= TREE_OPERAND (expr
, 1);
9547 inner_code
= TREE_CODE (op1
);
9548 if (inner_code
== INTEGER_CST
)
9550 *residue
+= TREE_INT_CST_LOW (op1
);
9553 else if (inner_code
== MULT_EXPR
)
9555 op1
= TREE_OPERAND (op1
, 1);
9556 if (TREE_CODE (op1
) == INTEGER_CST
)
9558 unsigned HOST_WIDE_INT align
;
9560 /* Compute the greatest power-of-2 divisor of op1. */
9561 align
= TREE_INT_CST_LOW (op1
);
9564 /* If align is non-zero and less than *modulus, replace
9565 *modulus with align., If align is 0, then either op1 is 0
9566 or the greatest power-of-2 divisor of op1 doesn't fit in an
9567 unsigned HOST_WIDE_INT. In either case, no additional
9568 constraint is imposed. */
9570 modulus
= MIN (modulus
, align
);
9577 /* If we get here, we were unable to determine anything useful about the
9582 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9583 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9586 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9588 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9590 if (TREE_CODE (arg
) == VECTOR_CST
)
9592 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9593 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9595 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9597 constructor_elt
*elt
;
9599 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (arg
), i
, elt
)
9603 elts
[i
] = elt
->value
;
9607 for (; i
< nelts
; i
++)
9609 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9613 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9614 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9615 NULL_TREE otherwise. */
9618 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9620 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9622 bool need_ctor
= false;
9624 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9625 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9626 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9627 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9630 elts
= XALLOCAVEC (tree
, nelts
* 3);
9631 if (!vec_cst_ctor_to_array (arg0
, elts
)
9632 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9635 for (i
= 0; i
< nelts
; i
++)
9637 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9639 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9644 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, nelts
);
9645 for (i
= 0; i
< nelts
; i
++)
9646 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9647 return build_constructor (type
, v
);
9650 return build_vector (type
, &elts
[2 * nelts
]);
9653 /* Try to fold a pointer difference of type TYPE two address expressions of
9654 array references AREF0 and AREF1 using location LOC. Return a
9655 simplified expression for the difference or NULL_TREE. */
9658 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9659 tree aref0
, tree aref1
)
9661 tree base0
= TREE_OPERAND (aref0
, 0);
9662 tree base1
= TREE_OPERAND (aref1
, 0);
9663 tree base_offset
= build_int_cst (type
, 0);
9665 /* If the bases are array references as well, recurse. If the bases
9666 are pointer indirections compute the difference of the pointers.
9667 If the bases are equal, we are set. */
9668 if ((TREE_CODE (base0
) == ARRAY_REF
9669 && TREE_CODE (base1
) == ARRAY_REF
9671 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9672 || (INDIRECT_REF_P (base0
)
9673 && INDIRECT_REF_P (base1
)
9674 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9675 TREE_OPERAND (base0
, 0),
9676 TREE_OPERAND (base1
, 0))))
9677 || operand_equal_p (base0
, base1
, 0))
9679 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9680 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9681 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9682 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9683 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9685 fold_build2_loc (loc
, MULT_EXPR
, type
,
9691 /* If the real or vector real constant CST of type TYPE has an exact
9692 inverse, return it, else return NULL. */
9695 exact_inverse (tree type
, tree cst
)
9698 tree unit_type
, *elts
;
9699 enum machine_mode mode
;
9700 unsigned vec_nelts
, i
;
9702 switch (TREE_CODE (cst
))
9705 r
= TREE_REAL_CST (cst
);
9707 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9708 return build_real (type
, r
);
9713 vec_nelts
= VECTOR_CST_NELTS (cst
);
9714 elts
= XALLOCAVEC (tree
, vec_nelts
);
9715 unit_type
= TREE_TYPE (type
);
9716 mode
= TYPE_MODE (unit_type
);
9718 for (i
= 0; i
< vec_nelts
; i
++)
9720 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9721 if (!exact_real_inverse (mode
, &r
))
9723 elts
[i
] = build_real (unit_type
, r
);
9726 return build_vector (type
, elts
);
9733 /* Fold a binary expression of code CODE and type TYPE with operands
9734 OP0 and OP1. LOC is the location of the resulting expression.
9735 Return the folded expression if folding is successful. Otherwise,
9736 return NULL_TREE. */
9739 fold_binary_loc (location_t loc
,
9740 enum tree_code code
, tree type
, tree op0
, tree op1
)
9742 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9743 tree arg0
, arg1
, tem
;
9744 tree t1
= NULL_TREE
;
9745 bool strict_overflow_p
;
9747 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9748 && TREE_CODE_LENGTH (code
) == 2
9750 && op1
!= NULL_TREE
);
9755 /* Strip any conversions that don't change the mode. This is
9756 safe for every expression, except for a comparison expression
9757 because its signedness is derived from its operands. So, in
9758 the latter case, only strip conversions that don't change the
9759 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9762 Note that this is done as an internal manipulation within the
9763 constant folder, in order to find the simplest representation
9764 of the arguments so that their form can be studied. In any
9765 cases, the appropriate type conversions should be put back in
9766 the tree that will get out of the constant folder. */
9768 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9770 STRIP_SIGN_NOPS (arg0
);
9771 STRIP_SIGN_NOPS (arg1
);
9779 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9780 constant but we can't do arithmetic on them. */
9781 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9782 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9783 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9784 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9785 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9786 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9788 if (kind
== tcc_binary
)
9790 /* Make sure type and arg0 have the same saturating flag. */
9791 gcc_assert (TYPE_SATURATING (type
)
9792 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9793 tem
= const_binop (code
, arg0
, arg1
);
9795 else if (kind
== tcc_comparison
)
9796 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9800 if (tem
!= NULL_TREE
)
9802 if (TREE_TYPE (tem
) != type
)
9803 tem
= fold_convert_loc (loc
, type
, tem
);
9808 /* If this is a commutative operation, and ARG0 is a constant, move it
9809 to ARG1 to reduce the number of tests below. */
9810 if (commutative_tree_code (code
)
9811 && tree_swap_operands_p (arg0
, arg1
, true))
9812 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9814 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9816 First check for cases where an arithmetic operation is applied to a
9817 compound, conditional, or comparison operation. Push the arithmetic
9818 operation inside the compound or conditional to see if any folding
9819 can then be done. Convert comparison to conditional for this purpose.
9820 The also optimizes non-constant cases that used to be done in
9823 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9824 one of the operands is a comparison and the other is a comparison, a
9825 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9826 code below would make the expression more complex. Change it to a
9827 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9828 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9830 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9831 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9832 && ((truth_value_p (TREE_CODE (arg0
))
9833 && (truth_value_p (TREE_CODE (arg1
))
9834 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9835 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9836 || (truth_value_p (TREE_CODE (arg1
))
9837 && (truth_value_p (TREE_CODE (arg0
))
9838 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9839 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9841 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9842 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9845 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9846 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9848 if (code
== EQ_EXPR
)
9849 tem
= invert_truthvalue_loc (loc
, tem
);
9851 return fold_convert_loc (loc
, type
, tem
);
9854 if (TREE_CODE_CLASS (code
) == tcc_binary
9855 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9857 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9859 tem
= fold_build2_loc (loc
, code
, type
,
9860 fold_convert_loc (loc
, TREE_TYPE (op0
),
9861 TREE_OPERAND (arg0
, 1)), op1
);
9862 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9865 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9866 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9868 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9869 fold_convert_loc (loc
, TREE_TYPE (op1
),
9870 TREE_OPERAND (arg1
, 1)));
9871 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9875 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9877 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9879 /*cond_first_p=*/1);
9880 if (tem
!= NULL_TREE
)
9884 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9886 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9888 /*cond_first_p=*/0);
9889 if (tem
!= NULL_TREE
)
9897 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9898 if (TREE_CODE (arg0
) == ADDR_EXPR
9899 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9901 tree iref
= TREE_OPERAND (arg0
, 0);
9902 return fold_build2 (MEM_REF
, type
,
9903 TREE_OPERAND (iref
, 0),
9904 int_const_binop (PLUS_EXPR
, arg1
,
9905 TREE_OPERAND (iref
, 1)));
9908 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9909 if (TREE_CODE (arg0
) == ADDR_EXPR
9910 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9913 HOST_WIDE_INT coffset
;
9914 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9918 return fold_build2 (MEM_REF
, type
,
9919 build_fold_addr_expr (base
),
9920 int_const_binop (PLUS_EXPR
, arg1
,
9921 size_int (coffset
)));
9926 case POINTER_PLUS_EXPR
:
9927 /* 0 +p index -> (type)index */
9928 if (integer_zerop (arg0
))
9929 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9931 /* PTR +p 0 -> PTR */
9932 if (integer_zerop (arg1
))
9933 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9935 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9936 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9937 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9938 return fold_convert_loc (loc
, type
,
9939 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9940 fold_convert_loc (loc
, sizetype
,
9942 fold_convert_loc (loc
, sizetype
,
9945 /* (PTR +p B) +p A -> PTR +p (B + A) */
9946 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9949 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9950 tree arg00
= TREE_OPERAND (arg0
, 0);
9951 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9952 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9953 return fold_convert_loc (loc
, type
,
9954 fold_build_pointer_plus_loc (loc
,
9958 /* PTR_CST +p CST -> CST1 */
9959 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9960 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9961 fold_convert_loc (loc
, type
, arg1
));
9963 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9964 of the array. Loop optimizer sometimes produce this type of
9966 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9968 tem
= try_move_mult_to_index (loc
, arg0
,
9969 fold_convert_loc (loc
,
9972 return fold_convert_loc (loc
, type
, tem
);
9978 /* A + (-B) -> A - B */
9979 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9980 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9981 fold_convert_loc (loc
, type
, arg0
),
9982 fold_convert_loc (loc
, type
,
9983 TREE_OPERAND (arg1
, 0)));
9984 /* (-A) + B -> B - A */
9985 if (TREE_CODE (arg0
) == NEGATE_EXPR
9986 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9987 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9988 fold_convert_loc (loc
, type
, arg1
),
9989 fold_convert_loc (loc
, type
,
9990 TREE_OPERAND (arg0
, 0)));
9992 if (INTEGRAL_TYPE_P (type
))
9994 /* Convert ~A + 1 to -A. */
9995 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9996 && integer_onep (arg1
))
9997 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9998 fold_convert_loc (loc
, type
,
9999 TREE_OPERAND (arg0
, 0)));
10001 /* ~X + X is -1. */
10002 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10003 && !TYPE_OVERFLOW_TRAPS (type
))
10005 tree tem
= TREE_OPERAND (arg0
, 0);
10008 if (operand_equal_p (tem
, arg1
, 0))
10010 t1
= build_int_cst_type (type
, -1);
10011 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10015 /* X + ~X is -1. */
10016 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10017 && !TYPE_OVERFLOW_TRAPS (type
))
10019 tree tem
= TREE_OPERAND (arg1
, 0);
10022 if (operand_equal_p (arg0
, tem
, 0))
10024 t1
= build_int_cst_type (type
, -1);
10025 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10029 /* X + (X / CST) * -CST is X % CST. */
10030 if (TREE_CODE (arg1
) == MULT_EXPR
10031 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10032 && operand_equal_p (arg0
,
10033 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10035 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10036 tree cst1
= TREE_OPERAND (arg1
, 1);
10037 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10039 if (sum
&& integer_zerop (sum
))
10040 return fold_convert_loc (loc
, type
,
10041 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10042 TREE_TYPE (arg0
), arg0
,
10047 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10048 one. Make sure the type is not saturating and has the signedness of
10049 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10050 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10051 if ((TREE_CODE (arg0
) == MULT_EXPR
10052 || TREE_CODE (arg1
) == MULT_EXPR
)
10053 && !TYPE_SATURATING (type
)
10054 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10055 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10056 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10058 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10063 if (! FLOAT_TYPE_P (type
))
10065 if (integer_zerop (arg1
))
10066 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10068 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10069 with a constant, and the two constants have no bits in common,
10070 we should treat this as a BIT_IOR_EXPR since this may produce more
10071 simplifications. */
10072 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10073 && TREE_CODE (arg1
) == BIT_AND_EXPR
10074 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10075 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10076 && integer_zerop (const_binop (BIT_AND_EXPR
,
10077 TREE_OPERAND (arg0
, 1),
10078 TREE_OPERAND (arg1
, 1))))
10080 code
= BIT_IOR_EXPR
;
10084 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10085 (plus (plus (mult) (mult)) (foo)) so that we can
10086 take advantage of the factoring cases below. */
10087 if (TYPE_OVERFLOW_WRAPS (type
)
10088 && (((TREE_CODE (arg0
) == PLUS_EXPR
10089 || TREE_CODE (arg0
) == MINUS_EXPR
)
10090 && TREE_CODE (arg1
) == MULT_EXPR
)
10091 || ((TREE_CODE (arg1
) == PLUS_EXPR
10092 || TREE_CODE (arg1
) == MINUS_EXPR
)
10093 && TREE_CODE (arg0
) == MULT_EXPR
)))
10095 tree parg0
, parg1
, parg
, marg
;
10096 enum tree_code pcode
;
10098 if (TREE_CODE (arg1
) == MULT_EXPR
)
10099 parg
= arg0
, marg
= arg1
;
10101 parg
= arg1
, marg
= arg0
;
10102 pcode
= TREE_CODE (parg
);
10103 parg0
= TREE_OPERAND (parg
, 0);
10104 parg1
= TREE_OPERAND (parg
, 1);
10105 STRIP_NOPS (parg0
);
10106 STRIP_NOPS (parg1
);
10108 if (TREE_CODE (parg0
) == MULT_EXPR
10109 && TREE_CODE (parg1
) != MULT_EXPR
)
10110 return fold_build2_loc (loc
, pcode
, type
,
10111 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10112 fold_convert_loc (loc
, type
,
10114 fold_convert_loc (loc
, type
,
10116 fold_convert_loc (loc
, type
, parg1
));
10117 if (TREE_CODE (parg0
) != MULT_EXPR
10118 && TREE_CODE (parg1
) == MULT_EXPR
)
10120 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10121 fold_convert_loc (loc
, type
, parg0
),
10122 fold_build2_loc (loc
, pcode
, type
,
10123 fold_convert_loc (loc
, type
, marg
),
10124 fold_convert_loc (loc
, type
,
10130 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10131 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10132 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10134 /* Likewise if the operands are reversed. */
10135 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10136 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10138 /* Convert X + -C into X - C. */
10139 if (TREE_CODE (arg1
) == REAL_CST
10140 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10142 tem
= fold_negate_const (arg1
, type
);
10143 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10144 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10145 fold_convert_loc (loc
, type
, arg0
),
10146 fold_convert_loc (loc
, type
, tem
));
10149 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10150 to __complex__ ( x, y ). This is not the same for SNaNs or
10151 if signed zeros are involved. */
10152 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10154 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10156 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10157 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10158 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10159 bool arg0rz
= false, arg0iz
= false;
10160 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10161 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10163 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10164 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10165 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10167 tree rp
= arg1r
? arg1r
10168 : build1 (REALPART_EXPR
, rtype
, arg1
);
10169 tree ip
= arg0i
? arg0i
10170 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10171 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10173 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10175 tree rp
= arg0r
? arg0r
10176 : build1 (REALPART_EXPR
, rtype
, arg0
);
10177 tree ip
= arg1i
? arg1i
10178 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10179 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10184 if (flag_unsafe_math_optimizations
10185 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10186 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10187 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10190 /* Convert x+x into x*2.0. */
10191 if (operand_equal_p (arg0
, arg1
, 0)
10192 && SCALAR_FLOAT_TYPE_P (type
))
10193 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10194 build_real (type
, dconst2
));
10196 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10197 We associate floats only if the user has specified
10198 -fassociative-math. */
10199 if (flag_associative_math
10200 && TREE_CODE (arg1
) == PLUS_EXPR
10201 && TREE_CODE (arg0
) != MULT_EXPR
)
10203 tree tree10
= TREE_OPERAND (arg1
, 0);
10204 tree tree11
= TREE_OPERAND (arg1
, 1);
10205 if (TREE_CODE (tree11
) == MULT_EXPR
10206 && TREE_CODE (tree10
) == MULT_EXPR
)
10209 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10210 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10213 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10214 We associate floats only if the user has specified
10215 -fassociative-math. */
10216 if (flag_associative_math
10217 && TREE_CODE (arg0
) == PLUS_EXPR
10218 && TREE_CODE (arg1
) != MULT_EXPR
)
10220 tree tree00
= TREE_OPERAND (arg0
, 0);
10221 tree tree01
= TREE_OPERAND (arg0
, 1);
10222 if (TREE_CODE (tree01
) == MULT_EXPR
10223 && TREE_CODE (tree00
) == MULT_EXPR
)
10226 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10227 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10233 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10234 is a rotate of A by C1 bits. */
10235 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10236 is a rotate of A by B bits. */
10238 enum tree_code code0
, code1
;
10240 code0
= TREE_CODE (arg0
);
10241 code1
= TREE_CODE (arg1
);
10242 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10243 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10244 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10245 TREE_OPERAND (arg1
, 0), 0)
10246 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10247 TYPE_UNSIGNED (rtype
))
10248 /* Only create rotates in complete modes. Other cases are not
10249 expanded properly. */
10250 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10252 tree tree01
, tree11
;
10253 enum tree_code code01
, code11
;
10255 tree01
= TREE_OPERAND (arg0
, 1);
10256 tree11
= TREE_OPERAND (arg1
, 1);
10257 STRIP_NOPS (tree01
);
10258 STRIP_NOPS (tree11
);
10259 code01
= TREE_CODE (tree01
);
10260 code11
= TREE_CODE (tree11
);
10261 if (code01
== INTEGER_CST
10262 && code11
== INTEGER_CST
10263 && TREE_INT_CST_HIGH (tree01
) == 0
10264 && TREE_INT_CST_HIGH (tree11
) == 0
10265 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10266 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10268 tem
= build2_loc (loc
, LROTATE_EXPR
,
10269 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10270 TREE_OPERAND (arg0
, 0),
10271 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10272 return fold_convert_loc (loc
, type
, tem
);
10274 else if (code11
== MINUS_EXPR
)
10276 tree tree110
, tree111
;
10277 tree110
= TREE_OPERAND (tree11
, 0);
10278 tree111
= TREE_OPERAND (tree11
, 1);
10279 STRIP_NOPS (tree110
);
10280 STRIP_NOPS (tree111
);
10281 if (TREE_CODE (tree110
) == INTEGER_CST
10282 && 0 == compare_tree_int (tree110
,
10284 (TREE_TYPE (TREE_OPERAND
10286 && operand_equal_p (tree01
, tree111
, 0))
10288 fold_convert_loc (loc
, type
,
10289 build2 ((code0
== LSHIFT_EXPR
10292 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10293 TREE_OPERAND (arg0
, 0), tree01
));
10295 else if (code01
== MINUS_EXPR
)
10297 tree tree010
, tree011
;
10298 tree010
= TREE_OPERAND (tree01
, 0);
10299 tree011
= TREE_OPERAND (tree01
, 1);
10300 STRIP_NOPS (tree010
);
10301 STRIP_NOPS (tree011
);
10302 if (TREE_CODE (tree010
) == INTEGER_CST
10303 && 0 == compare_tree_int (tree010
,
10305 (TREE_TYPE (TREE_OPERAND
10307 && operand_equal_p (tree11
, tree011
, 0))
10308 return fold_convert_loc
10310 build2 ((code0
!= LSHIFT_EXPR
10313 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10314 TREE_OPERAND (arg0
, 0), tree11
));
10320 /* In most languages, can't associate operations on floats through
10321 parentheses. Rather than remember where the parentheses were, we
10322 don't associate floats at all, unless the user has specified
10323 -fassociative-math.
10324 And, we need to make sure type is not saturating. */
10326 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10327 && !TYPE_SATURATING (type
))
10329 tree var0
, con0
, lit0
, minus_lit0
;
10330 tree var1
, con1
, lit1
, minus_lit1
;
10333 /* Split both trees into variables, constants, and literals. Then
10334 associate each group together, the constants with literals,
10335 then the result with variables. This increases the chances of
10336 literals being recombined later and of generating relocatable
10337 expressions for the sum of a constant and literal. */
10338 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10339 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10340 code
== MINUS_EXPR
);
10342 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10343 if (code
== MINUS_EXPR
)
10346 /* With undefined overflow we can only associate constants with one
10347 variable, and constants whose association doesn't overflow. */
10348 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10349 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10356 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10357 tmp0
= TREE_OPERAND (tmp0
, 0);
10358 if (CONVERT_EXPR_P (tmp0
)
10359 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10360 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10361 <= TYPE_PRECISION (type
)))
10362 tmp0
= TREE_OPERAND (tmp0
, 0);
10363 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10364 tmp1
= TREE_OPERAND (tmp1
, 0);
10365 if (CONVERT_EXPR_P (tmp1
)
10366 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10367 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10368 <= TYPE_PRECISION (type
)))
10369 tmp1
= TREE_OPERAND (tmp1
, 0);
10370 /* The only case we can still associate with two variables
10371 is if they are the same, modulo negation and bit-pattern
10372 preserving conversions. */
10373 if (!operand_equal_p (tmp0
, tmp1
, 0))
10377 if (ok
&& lit0
&& lit1
)
10379 tree tmp0
= fold_convert (type
, lit0
);
10380 tree tmp1
= fold_convert (type
, lit1
);
10382 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10383 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10388 /* Only do something if we found more than two objects. Otherwise,
10389 nothing has changed and we risk infinite recursion. */
10391 && (2 < ((var0
!= 0) + (var1
!= 0)
10392 + (con0
!= 0) + (con1
!= 0)
10393 + (lit0
!= 0) + (lit1
!= 0)
10394 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10396 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10397 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10398 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10399 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10401 /* Preserve the MINUS_EXPR if the negative part of the literal is
10402 greater than the positive part. Otherwise, the multiplicative
10403 folding code (i.e extract_muldiv) may be fooled in case
10404 unsigned constants are subtracted, like in the following
10405 example: ((X*2 + 4) - 8U)/2. */
10406 if (minus_lit0
&& lit0
)
10408 if (TREE_CODE (lit0
) == INTEGER_CST
10409 && TREE_CODE (minus_lit0
) == INTEGER_CST
10410 && tree_int_cst_lt (lit0
, minus_lit0
))
10412 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10418 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10427 fold_convert_loc (loc
, type
,
10428 associate_trees (loc
, var0
, minus_lit0
,
10429 MINUS_EXPR
, type
));
10432 con0
= associate_trees (loc
, con0
, minus_lit0
,
10435 fold_convert_loc (loc
, type
,
10436 associate_trees (loc
, var0
, con0
,
10441 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10443 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10451 /* Pointer simplifications for subtraction, simple reassociations. */
10452 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10454 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10455 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10456 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10458 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10459 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10460 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10461 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10462 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10463 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10465 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10468 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10469 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10471 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10472 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10473 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10474 fold_convert_loc (loc
, type
, arg1
));
10476 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10479 /* A - (-B) -> A + B */
10480 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10481 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10482 fold_convert_loc (loc
, type
,
10483 TREE_OPERAND (arg1
, 0)));
10484 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10485 if (TREE_CODE (arg0
) == NEGATE_EXPR
10486 && (FLOAT_TYPE_P (type
)
10487 || INTEGRAL_TYPE_P (type
))
10488 && negate_expr_p (arg1
)
10489 && reorder_operands_p (arg0
, arg1
))
10490 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10491 fold_convert_loc (loc
, type
,
10492 negate_expr (arg1
)),
10493 fold_convert_loc (loc
, type
,
10494 TREE_OPERAND (arg0
, 0)));
10495 /* Convert -A - 1 to ~A. */
10496 if (INTEGRAL_TYPE_P (type
)
10497 && TREE_CODE (arg0
) == NEGATE_EXPR
10498 && integer_onep (arg1
)
10499 && !TYPE_OVERFLOW_TRAPS (type
))
10500 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10501 fold_convert_loc (loc
, type
,
10502 TREE_OPERAND (arg0
, 0)));
10504 /* Convert -1 - A to ~A. */
10505 if (INTEGRAL_TYPE_P (type
)
10506 && integer_all_onesp (arg0
))
10507 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10510 /* X - (X / CST) * CST is X % CST. */
10511 if (INTEGRAL_TYPE_P (type
)
10512 && TREE_CODE (arg1
) == MULT_EXPR
10513 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10514 && operand_equal_p (arg0
,
10515 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10516 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10517 TREE_OPERAND (arg1
, 1), 0))
10519 fold_convert_loc (loc
, type
,
10520 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10521 arg0
, TREE_OPERAND (arg1
, 1)));
10523 if (! FLOAT_TYPE_P (type
))
10525 if (integer_zerop (arg0
))
10526 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10527 if (integer_zerop (arg1
))
10528 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10530 /* Fold A - (A & B) into ~B & A. */
10531 if (!TREE_SIDE_EFFECTS (arg0
)
10532 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10534 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10536 tree arg10
= fold_convert_loc (loc
, type
,
10537 TREE_OPERAND (arg1
, 0));
10538 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10539 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10541 fold_convert_loc (loc
, type
, arg0
));
10543 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10545 tree arg11
= fold_convert_loc (loc
,
10546 type
, TREE_OPERAND (arg1
, 1));
10547 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10548 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10550 fold_convert_loc (loc
, type
, arg0
));
10554 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10555 any power of 2 minus 1. */
10556 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10557 && TREE_CODE (arg1
) == BIT_AND_EXPR
10558 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10559 TREE_OPERAND (arg1
, 0), 0))
10561 tree mask0
= TREE_OPERAND (arg0
, 1);
10562 tree mask1
= TREE_OPERAND (arg1
, 1);
10563 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10565 if (operand_equal_p (tem
, mask1
, 0))
10567 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10568 TREE_OPERAND (arg0
, 0), mask1
);
10569 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10574 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10575 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10576 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10578 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10579 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10580 (-ARG1 + ARG0) reduces to -ARG1. */
10581 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10582 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10584 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10585 __complex__ ( x, -y ). This is not the same for SNaNs or if
10586 signed zeros are involved. */
10587 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10588 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10589 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10591 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10592 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10593 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10594 bool arg0rz
= false, arg0iz
= false;
10595 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10596 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10598 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10599 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10600 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10602 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10604 : build1 (REALPART_EXPR
, rtype
, arg1
));
10605 tree ip
= arg0i
? arg0i
10606 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10607 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10609 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10611 tree rp
= arg0r
? arg0r
10612 : build1 (REALPART_EXPR
, rtype
, arg0
);
10613 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10615 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10616 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10621 /* Fold &x - &x. This can happen from &x.foo - &x.
10622 This is unsafe for certain floats even in non-IEEE formats.
10623 In IEEE, it is unsafe because it does wrong for NaNs.
10624 Also note that operand_equal_p is always false if an operand
10627 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10628 && operand_equal_p (arg0
, arg1
, 0))
10629 return build_zero_cst (type
);
10631 /* A - B -> A + (-B) if B is easily negatable. */
10632 if (negate_expr_p (arg1
)
10633 && ((FLOAT_TYPE_P (type
)
10634 /* Avoid this transformation if B is a positive REAL_CST. */
10635 && (TREE_CODE (arg1
) != REAL_CST
10636 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10637 || INTEGRAL_TYPE_P (type
)))
10638 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10639 fold_convert_loc (loc
, type
, arg0
),
10640 fold_convert_loc (loc
, type
,
10641 negate_expr (arg1
)));
10643 /* Try folding difference of addresses. */
10645 HOST_WIDE_INT diff
;
10647 if ((TREE_CODE (arg0
) == ADDR_EXPR
10648 || TREE_CODE (arg1
) == ADDR_EXPR
)
10649 && ptr_difference_const (arg0
, arg1
, &diff
))
10650 return build_int_cst_type (type
, diff
);
10653 /* Fold &a[i] - &a[j] to i-j. */
10654 if (TREE_CODE (arg0
) == ADDR_EXPR
10655 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10656 && TREE_CODE (arg1
) == ADDR_EXPR
10657 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10659 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10660 TREE_OPERAND (arg0
, 0),
10661 TREE_OPERAND (arg1
, 0));
10666 if (FLOAT_TYPE_P (type
)
10667 && flag_unsafe_math_optimizations
10668 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10669 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10670 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10673 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10674 one. Make sure the type is not saturating and has the signedness of
10675 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10676 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10677 if ((TREE_CODE (arg0
) == MULT_EXPR
10678 || TREE_CODE (arg1
) == MULT_EXPR
)
10679 && !TYPE_SATURATING (type
)
10680 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10681 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10682 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10684 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10692 /* (-A) * (-B) -> A * B */
10693 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10694 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10695 fold_convert_loc (loc
, type
,
10696 TREE_OPERAND (arg0
, 0)),
10697 fold_convert_loc (loc
, type
,
10698 negate_expr (arg1
)));
10699 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10700 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10701 fold_convert_loc (loc
, type
,
10702 negate_expr (arg0
)),
10703 fold_convert_loc (loc
, type
,
10704 TREE_OPERAND (arg1
, 0)));
10706 if (! FLOAT_TYPE_P (type
))
10708 if (integer_zerop (arg1
))
10709 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10710 if (integer_onep (arg1
))
10711 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10712 /* Transform x * -1 into -x. Make sure to do the negation
10713 on the original operand with conversions not stripped
10714 because we can only strip non-sign-changing conversions. */
10715 if (integer_all_onesp (arg1
))
10716 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10717 /* Transform x * -C into -x * C if x is easily negatable. */
10718 if (TREE_CODE (arg1
) == INTEGER_CST
10719 && tree_int_cst_sgn (arg1
) == -1
10720 && negate_expr_p (arg0
)
10721 && (tem
= negate_expr (arg1
)) != arg1
10722 && !TREE_OVERFLOW (tem
))
10723 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10724 fold_convert_loc (loc
, type
,
10725 negate_expr (arg0
)),
10728 /* (a * (1 << b)) is (a << b) */
10729 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10730 && integer_onep (TREE_OPERAND (arg1
, 0)))
10731 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10732 TREE_OPERAND (arg1
, 1));
10733 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10734 && integer_onep (TREE_OPERAND (arg0
, 0)))
10735 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10736 TREE_OPERAND (arg0
, 1));
10738 /* (A + A) * C -> A * 2 * C */
10739 if (TREE_CODE (arg0
) == PLUS_EXPR
10740 && TREE_CODE (arg1
) == INTEGER_CST
10741 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10742 TREE_OPERAND (arg0
, 1), 0))
10743 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10744 omit_one_operand_loc (loc
, type
,
10745 TREE_OPERAND (arg0
, 0),
10746 TREE_OPERAND (arg0
, 1)),
10747 fold_build2_loc (loc
, MULT_EXPR
, type
,
10748 build_int_cst (type
, 2) , arg1
));
10750 strict_overflow_p
= false;
10751 if (TREE_CODE (arg1
) == INTEGER_CST
10752 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10753 &strict_overflow_p
)))
10755 if (strict_overflow_p
)
10756 fold_overflow_warning (("assuming signed overflow does not "
10757 "occur when simplifying "
10759 WARN_STRICT_OVERFLOW_MISC
);
10760 return fold_convert_loc (loc
, type
, tem
);
10763 /* Optimize z * conj(z) for integer complex numbers. */
10764 if (TREE_CODE (arg0
) == CONJ_EXPR
10765 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10766 return fold_mult_zconjz (loc
, type
, arg1
);
10767 if (TREE_CODE (arg1
) == CONJ_EXPR
10768 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10769 return fold_mult_zconjz (loc
, type
, arg0
);
10773 /* Maybe fold x * 0 to 0. The expressions aren't the same
10774 when x is NaN, since x * 0 is also NaN. Nor are they the
10775 same in modes with signed zeros, since multiplying a
10776 negative value by 0 gives -0, not +0. */
10777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10778 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10779 && real_zerop (arg1
))
10780 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10781 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10782 Likewise for complex arithmetic with signed zeros. */
10783 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10784 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10785 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10786 && real_onep (arg1
))
10787 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10789 /* Transform x * -1.0 into -x. */
10790 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10791 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10792 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10793 && real_minus_onep (arg1
))
10794 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10796 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10797 the result for floating point types due to rounding so it is applied
10798 only if -fassociative-math was specify. */
10799 if (flag_associative_math
10800 && TREE_CODE (arg0
) == RDIV_EXPR
10801 && TREE_CODE (arg1
) == REAL_CST
10802 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10804 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10807 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10808 TREE_OPERAND (arg0
, 1));
10811 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10812 if (operand_equal_p (arg0
, arg1
, 0))
10814 tree tem
= fold_strip_sign_ops (arg0
);
10815 if (tem
!= NULL_TREE
)
10817 tem
= fold_convert_loc (loc
, type
, tem
);
10818 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10822 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10823 This is not the same for NaNs or if signed zeros are
10825 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10826 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10827 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10828 && TREE_CODE (arg1
) == COMPLEX_CST
10829 && real_zerop (TREE_REALPART (arg1
)))
10831 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10832 if (real_onep (TREE_IMAGPART (arg1
)))
10834 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10835 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10837 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10838 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10840 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10841 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10842 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10846 /* Optimize z * conj(z) for floating point complex numbers.
10847 Guarded by flag_unsafe_math_optimizations as non-finite
10848 imaginary components don't produce scalar results. */
10849 if (flag_unsafe_math_optimizations
10850 && TREE_CODE (arg0
) == CONJ_EXPR
10851 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10852 return fold_mult_zconjz (loc
, type
, arg1
);
10853 if (flag_unsafe_math_optimizations
10854 && TREE_CODE (arg1
) == CONJ_EXPR
10855 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10856 return fold_mult_zconjz (loc
, type
, arg0
);
10858 if (flag_unsafe_math_optimizations
)
10860 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10861 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10863 /* Optimizations of root(...)*root(...). */
10864 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10867 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10868 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10870 /* Optimize sqrt(x)*sqrt(x) as x. */
10871 if (BUILTIN_SQRT_P (fcode0
)
10872 && operand_equal_p (arg00
, arg10
, 0)
10873 && ! HONOR_SNANS (TYPE_MODE (type
)))
10876 /* Optimize root(x)*root(y) as root(x*y). */
10877 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10878 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10879 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10882 /* Optimize expN(x)*expN(y) as expN(x+y). */
10883 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10885 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10886 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10887 CALL_EXPR_ARG (arg0
, 0),
10888 CALL_EXPR_ARG (arg1
, 0));
10889 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10892 /* Optimizations of pow(...)*pow(...). */
10893 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10894 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10895 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10897 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10898 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10899 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10900 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10902 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10903 if (operand_equal_p (arg01
, arg11
, 0))
10905 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10906 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10908 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10911 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10912 if (operand_equal_p (arg00
, arg10
, 0))
10914 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10915 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10917 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10921 /* Optimize tan(x)*cos(x) as sin(x). */
10922 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10923 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10924 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10925 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10926 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10927 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10928 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10929 CALL_EXPR_ARG (arg1
, 0), 0))
10931 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10933 if (sinfn
!= NULL_TREE
)
10934 return build_call_expr_loc (loc
, sinfn
, 1,
10935 CALL_EXPR_ARG (arg0
, 0));
10938 /* Optimize x*pow(x,c) as pow(x,c+1). */
10939 if (fcode1
== BUILT_IN_POW
10940 || fcode1
== BUILT_IN_POWF
10941 || fcode1
== BUILT_IN_POWL
)
10943 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10944 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10945 if (TREE_CODE (arg11
) == REAL_CST
10946 && !TREE_OVERFLOW (arg11
)
10947 && operand_equal_p (arg0
, arg10
, 0))
10949 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10953 c
= TREE_REAL_CST (arg11
);
10954 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10955 arg
= build_real (type
, c
);
10956 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10960 /* Optimize pow(x,c)*x as pow(x,c+1). */
10961 if (fcode0
== BUILT_IN_POW
10962 || fcode0
== BUILT_IN_POWF
10963 || fcode0
== BUILT_IN_POWL
)
10965 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10966 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10967 if (TREE_CODE (arg01
) == REAL_CST
10968 && !TREE_OVERFLOW (arg01
)
10969 && operand_equal_p (arg1
, arg00
, 0))
10971 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10975 c
= TREE_REAL_CST (arg01
);
10976 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10977 arg
= build_real (type
, c
);
10978 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10982 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10983 if (!in_gimple_form
10985 && operand_equal_p (arg0
, arg1
, 0))
10987 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10991 tree arg
= build_real (type
, dconst2
);
10992 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11001 if (integer_all_onesp (arg1
))
11002 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11003 if (integer_zerop (arg1
))
11004 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11005 if (operand_equal_p (arg0
, arg1
, 0))
11006 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11008 /* ~X | X is -1. */
11009 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11010 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11012 t1
= build_zero_cst (type
);
11013 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11014 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11017 /* X | ~X is -1. */
11018 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11019 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11021 t1
= build_zero_cst (type
);
11022 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11023 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11026 /* Canonicalize (X & C1) | C2. */
11027 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11028 && TREE_CODE (arg1
) == INTEGER_CST
11029 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11031 double_int c1
, c2
, c3
, msk
;
11032 int width
= TYPE_PRECISION (type
), w
;
11033 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11034 c2
= tree_to_double_int (arg1
);
11036 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11037 if ((c1
& c2
) == c1
)
11038 return omit_one_operand_loc (loc
, type
, arg1
,
11039 TREE_OPERAND (arg0
, 0));
11041 msk
= double_int::mask (width
);
11043 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11044 if (msk
.and_not (c1
| c2
).is_zero ())
11045 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11046 TREE_OPERAND (arg0
, 0), arg1
);
11048 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11049 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11050 mode which allows further optimizations. */
11053 c3
= c1
.and_not (c2
);
11054 for (w
= BITS_PER_UNIT
;
11055 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11058 unsigned HOST_WIDE_INT mask
11059 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11060 if (((c1
.low
| c2
.low
) & mask
) == mask
11061 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11063 c3
= double_int::from_uhwi (mask
);
11068 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11069 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11070 TREE_OPERAND (arg0
, 0),
11071 double_int_to_tree (type
,
11076 /* (X & Y) | Y is (X, Y). */
11077 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11078 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11079 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11080 /* (X & Y) | X is (Y, X). */
11081 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11083 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11084 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11085 /* X | (X & Y) is (Y, X). */
11086 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11087 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11088 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11089 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11090 /* X | (Y & X) is (Y, X). */
11091 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11092 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11093 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11094 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11096 /* (X & ~Y) | (~X & Y) is X ^ Y */
11097 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11098 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11100 tree a0
, a1
, l0
, l1
, n0
, n1
;
11102 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11103 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11105 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11106 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11108 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11109 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11111 if ((operand_equal_p (n0
, a0
, 0)
11112 && operand_equal_p (n1
, a1
, 0))
11113 || (operand_equal_p (n0
, a1
, 0)
11114 && operand_equal_p (n1
, a0
, 0)))
11115 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11118 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11119 if (t1
!= NULL_TREE
)
11122 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11124 This results in more efficient code for machines without a NAND
11125 instruction. Combine will canonicalize to the first form
11126 which will allow use of NAND instructions provided by the
11127 backend if they exist. */
11128 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11129 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11132 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11133 build2 (BIT_AND_EXPR
, type
,
11134 fold_convert_loc (loc
, type
,
11135 TREE_OPERAND (arg0
, 0)),
11136 fold_convert_loc (loc
, type
,
11137 TREE_OPERAND (arg1
, 0))));
11140 /* See if this can be simplified into a rotate first. If that
11141 is unsuccessful continue in the association code. */
11145 if (integer_zerop (arg1
))
11146 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11147 if (integer_all_onesp (arg1
))
11148 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11149 if (operand_equal_p (arg0
, arg1
, 0))
11150 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11152 /* ~X ^ X is -1. */
11153 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11156 t1
= build_zero_cst (type
);
11157 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11158 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11161 /* X ^ ~X is -1. */
11162 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11163 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11165 t1
= build_zero_cst (type
);
11166 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11167 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11170 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11171 with a constant, and the two constants have no bits in common,
11172 we should treat this as a BIT_IOR_EXPR since this may produce more
11173 simplifications. */
11174 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11175 && TREE_CODE (arg1
) == BIT_AND_EXPR
11176 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11177 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11178 && integer_zerop (const_binop (BIT_AND_EXPR
,
11179 TREE_OPERAND (arg0
, 1),
11180 TREE_OPERAND (arg1
, 1))))
11182 code
= BIT_IOR_EXPR
;
11186 /* (X | Y) ^ X -> Y & ~ X*/
11187 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11188 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11190 tree t2
= TREE_OPERAND (arg0
, 1);
11191 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11193 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11194 fold_convert_loc (loc
, type
, t2
),
11195 fold_convert_loc (loc
, type
, t1
));
11199 /* (Y | X) ^ X -> Y & ~ X*/
11200 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11201 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11203 tree t2
= TREE_OPERAND (arg0
, 0);
11204 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11206 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11207 fold_convert_loc (loc
, type
, t2
),
11208 fold_convert_loc (loc
, type
, t1
));
11212 /* X ^ (X | Y) -> Y & ~ X*/
11213 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11214 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11216 tree t2
= TREE_OPERAND (arg1
, 1);
11217 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11219 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11220 fold_convert_loc (loc
, type
, t2
),
11221 fold_convert_loc (loc
, type
, t1
));
11225 /* X ^ (Y | X) -> Y & ~ X*/
11226 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11227 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11229 tree t2
= TREE_OPERAND (arg1
, 0);
11230 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11232 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11233 fold_convert_loc (loc
, type
, t2
),
11234 fold_convert_loc (loc
, type
, t1
));
11238 /* Convert ~X ^ ~Y to X ^ Y. */
11239 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11240 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11241 return fold_build2_loc (loc
, code
, type
,
11242 fold_convert_loc (loc
, type
,
11243 TREE_OPERAND (arg0
, 0)),
11244 fold_convert_loc (loc
, type
,
11245 TREE_OPERAND (arg1
, 0)));
11247 /* Convert ~X ^ C to X ^ ~C. */
11248 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11249 && TREE_CODE (arg1
) == INTEGER_CST
)
11250 return fold_build2_loc (loc
, code
, type
,
11251 fold_convert_loc (loc
, type
,
11252 TREE_OPERAND (arg0
, 0)),
11253 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11255 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11256 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11257 && integer_onep (TREE_OPERAND (arg0
, 1))
11258 && integer_onep (arg1
))
11259 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11260 build_zero_cst (TREE_TYPE (arg0
)));
11262 /* Fold (X & Y) ^ Y as ~X & Y. */
11263 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11264 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11266 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11267 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11268 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11269 fold_convert_loc (loc
, type
, arg1
));
11271 /* Fold (X & Y) ^ X as ~Y & X. */
11272 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11273 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11274 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11276 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11277 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11278 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11279 fold_convert_loc (loc
, type
, arg1
));
11281 /* Fold X ^ (X & Y) as X & ~Y. */
11282 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11283 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11285 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11286 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11287 fold_convert_loc (loc
, type
, arg0
),
11288 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11290 /* Fold X ^ (Y & X) as ~Y & X. */
11291 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11292 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11293 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11295 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11296 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11297 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11298 fold_convert_loc (loc
, type
, arg0
));
11301 /* See if this can be simplified into a rotate first. If that
11302 is unsuccessful continue in the association code. */
11306 if (integer_all_onesp (arg1
))
11307 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11308 if (integer_zerop (arg1
))
11309 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11310 if (operand_equal_p (arg0
, arg1
, 0))
11311 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11313 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11314 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11315 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11316 || (TREE_CODE (arg0
) == EQ_EXPR
11317 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11318 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11319 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11321 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11322 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11323 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11324 || (TREE_CODE (arg1
) == EQ_EXPR
11325 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11326 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11327 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11329 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11330 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11331 && TREE_CODE (arg1
) == INTEGER_CST
11332 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11334 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11335 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11336 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11337 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11338 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11340 fold_convert_loc (loc
, type
,
11341 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11342 type
, tmp2
, tmp3
));
11345 /* (X | Y) & Y is (X, Y). */
11346 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11347 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11348 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11349 /* (X | Y) & X is (Y, X). */
11350 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11351 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11352 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11353 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11354 /* X & (X | Y) is (Y, X). */
11355 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11356 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11357 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11358 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11359 /* X & (Y | X) is (Y, X). */
11360 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11361 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11362 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11363 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11365 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11366 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11367 && integer_onep (TREE_OPERAND (arg0
, 1))
11368 && integer_onep (arg1
))
11371 tem
= TREE_OPERAND (arg0
, 0);
11372 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11373 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11375 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11376 build_zero_cst (TREE_TYPE (tem
)));
11378 /* Fold ~X & 1 as (X & 1) == 0. */
11379 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11380 && integer_onep (arg1
))
11383 tem
= TREE_OPERAND (arg0
, 0);
11384 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11385 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11387 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11388 build_zero_cst (TREE_TYPE (tem
)));
11390 /* Fold !X & 1 as X == 0. */
11391 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11392 && integer_onep (arg1
))
11394 tem
= TREE_OPERAND (arg0
, 0);
11395 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11396 build_zero_cst (TREE_TYPE (tem
)));
11399 /* Fold (X ^ Y) & Y as ~X & Y. */
11400 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11401 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11403 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11404 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11405 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11406 fold_convert_loc (loc
, type
, arg1
));
11408 /* Fold (X ^ Y) & X as ~Y & X. */
11409 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11410 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11411 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11413 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11414 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11415 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11416 fold_convert_loc (loc
, type
, arg1
));
11418 /* Fold X & (X ^ Y) as X & ~Y. */
11419 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11420 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11422 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11423 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11424 fold_convert_loc (loc
, type
, arg0
),
11425 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11427 /* Fold X & (Y ^ X) as ~Y & X. */
11428 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11429 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11430 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11432 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11433 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11434 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11435 fold_convert_loc (loc
, type
, arg0
));
11438 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11439 multiple of 1 << CST. */
11440 if (TREE_CODE (arg1
) == INTEGER_CST
)
11442 double_int cst1
= tree_to_double_int (arg1
);
11443 double_int ncst1
= (-cst1
).ext(TYPE_PRECISION (TREE_TYPE (arg1
)),
11444 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11445 if ((cst1
& ncst1
) == ncst1
11446 && multiple_of_p (type
, arg0
,
11447 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11448 return fold_convert_loc (loc
, type
, arg0
);
11451 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11453 if (TREE_CODE (arg1
) == INTEGER_CST
11454 && TREE_CODE (arg0
) == MULT_EXPR
11455 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11458 = tree_to_double_int (TREE_OPERAND (arg0
, 1)).trailing_zeros ();
11461 double_int arg1mask
, masked
;
11462 arg1mask
= ~double_int::mask (arg1tz
);
11463 arg1mask
= arg1mask
.ext (TYPE_PRECISION (type
),
11464 TYPE_UNSIGNED (type
));
11465 masked
= arg1mask
& tree_to_double_int (arg1
);
11466 if (masked
.is_zero ())
11467 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11469 else if (masked
!= tree_to_double_int (arg1
))
11470 return fold_build2_loc (loc
, code
, type
, op0
,
11471 double_int_to_tree (type
, masked
));
11475 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11476 ((A & N) + B) & M -> (A + B) & M
11477 Similarly if (N & M) == 0,
11478 ((A | N) + B) & M -> (A + B) & M
11479 and for - instead of + (or unary - instead of +)
11480 and/or ^ instead of |.
11481 If B is constant and (B & M) == 0, fold into A & M. */
11482 if (host_integerp (arg1
, 1))
11484 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11485 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11486 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11487 && (TREE_CODE (arg0
) == PLUS_EXPR
11488 || TREE_CODE (arg0
) == MINUS_EXPR
11489 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11490 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11491 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11495 unsigned HOST_WIDE_INT cst0
;
11497 /* Now we know that arg0 is (C + D) or (C - D) or
11498 -C and arg1 (M) is == (1LL << cst) - 1.
11499 Store C into PMOP[0] and D into PMOP[1]. */
11500 pmop
[0] = TREE_OPERAND (arg0
, 0);
11502 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11504 pmop
[1] = TREE_OPERAND (arg0
, 1);
11508 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11509 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11513 for (; which
>= 0; which
--)
11514 switch (TREE_CODE (pmop
[which
]))
11519 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11522 /* tree_low_cst not used, because we don't care about
11524 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11526 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11531 else if (cst0
!= 0)
11533 /* If C or D is of the form (A & N) where
11534 (N & M) == M, or of the form (A | N) or
11535 (A ^ N) where (N & M) == 0, replace it with A. */
11536 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11539 /* If C or D is a N where (N & M) == 0, it can be
11540 omitted (assumed 0). */
11541 if ((TREE_CODE (arg0
) == PLUS_EXPR
11542 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11543 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11544 pmop
[which
] = NULL
;
11550 /* Only build anything new if we optimized one or both arguments
11552 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11553 || (TREE_CODE (arg0
) != NEGATE_EXPR
11554 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11556 tree utype
= TREE_TYPE (arg0
);
11557 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11559 /* Perform the operations in a type that has defined
11560 overflow behavior. */
11561 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11562 if (pmop
[0] != NULL
)
11563 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11564 if (pmop
[1] != NULL
)
11565 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11568 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11569 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11570 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11572 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11573 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11575 else if (pmop
[0] != NULL
)
11577 else if (pmop
[1] != NULL
)
11580 return build_int_cst (type
, 0);
11582 else if (pmop
[0] == NULL
)
11583 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11585 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11587 /* TEM is now the new binary +, - or unary - replacement. */
11588 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11589 fold_convert_loc (loc
, utype
, arg1
));
11590 return fold_convert_loc (loc
, type
, tem
);
11595 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11596 if (t1
!= NULL_TREE
)
11598 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11599 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11600 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11603 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11605 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11606 && (~TREE_INT_CST_LOW (arg1
)
11607 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11609 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11612 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11614 This results in more efficient code for machines without a NOR
11615 instruction. Combine will canonicalize to the first form
11616 which will allow use of NOR instructions provided by the
11617 backend if they exist. */
11618 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11619 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11621 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11622 build2 (BIT_IOR_EXPR
, type
,
11623 fold_convert_loc (loc
, type
,
11624 TREE_OPERAND (arg0
, 0)),
11625 fold_convert_loc (loc
, type
,
11626 TREE_OPERAND (arg1
, 0))));
11629 /* If arg0 is derived from the address of an object or function, we may
11630 be able to fold this expression using the object or function's
11632 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11634 unsigned HOST_WIDE_INT modulus
, residue
;
11635 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11637 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11638 integer_onep (arg1
));
11640 /* This works because modulus is a power of 2. If this weren't the
11641 case, we'd have to replace it by its greatest power-of-2
11642 divisor: modulus & -modulus. */
11644 return build_int_cst (type
, residue
& low
);
11647 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11648 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11649 if the new mask might be further optimized. */
11650 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11651 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11652 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11653 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11654 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11655 < TYPE_PRECISION (TREE_TYPE (arg0
))
11656 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11657 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11659 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11660 unsigned HOST_WIDE_INT mask
11661 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11662 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11663 tree shift_type
= TREE_TYPE (arg0
);
11665 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11666 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11667 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11668 && TYPE_PRECISION (TREE_TYPE (arg0
))
11669 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11671 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11672 tree arg00
= TREE_OPERAND (arg0
, 0);
11673 /* See if more bits can be proven as zero because of
11675 if (TREE_CODE (arg00
) == NOP_EXPR
11676 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11678 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11679 if (TYPE_PRECISION (inner_type
)
11680 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11681 && TYPE_PRECISION (inner_type
) < prec
)
11683 prec
= TYPE_PRECISION (inner_type
);
11684 /* See if we can shorten the right shift. */
11686 shift_type
= inner_type
;
11689 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11690 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11691 zerobits
<<= prec
- shiftc
;
11692 /* For arithmetic shift if sign bit could be set, zerobits
11693 can contain actually sign bits, so no transformation is
11694 possible, unless MASK masks them all away. In that
11695 case the shift needs to be converted into logical shift. */
11696 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11697 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11699 if ((mask
& zerobits
) == 0)
11700 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11706 /* ((X << 16) & 0xff00) is (X, 0). */
11707 if ((mask
& zerobits
) == mask
)
11708 return omit_one_operand_loc (loc
, type
,
11709 build_int_cst (type
, 0), arg0
);
11711 newmask
= mask
| zerobits
;
11712 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11716 /* Only do the transformation if NEWMASK is some integer
11718 for (prec
= BITS_PER_UNIT
;
11719 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11720 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11722 if (prec
< HOST_BITS_PER_WIDE_INT
11723 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11727 if (shift_type
!= TREE_TYPE (arg0
))
11729 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11730 fold_convert_loc (loc
, shift_type
,
11731 TREE_OPERAND (arg0
, 0)),
11732 TREE_OPERAND (arg0
, 1));
11733 tem
= fold_convert_loc (loc
, type
, tem
);
11737 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11738 if (!tree_int_cst_equal (newmaskt
, arg1
))
11739 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11747 /* Don't touch a floating-point divide by zero unless the mode
11748 of the constant can represent infinity. */
11749 if (TREE_CODE (arg1
) == REAL_CST
11750 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11751 && real_zerop (arg1
))
11754 /* Optimize A / A to 1.0 if we don't care about
11755 NaNs or Infinities. Skip the transformation
11756 for non-real operands. */
11757 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11758 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11759 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11760 && operand_equal_p (arg0
, arg1
, 0))
11762 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11764 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11767 /* The complex version of the above A / A optimization. */
11768 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11769 && operand_equal_p (arg0
, arg1
, 0))
11771 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11772 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11773 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11775 tree r
= build_real (elem_type
, dconst1
);
11776 /* omit_two_operands will call fold_convert for us. */
11777 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11781 /* (-A) / (-B) -> A / B */
11782 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11783 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11784 TREE_OPERAND (arg0
, 0),
11785 negate_expr (arg1
));
11786 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11787 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11788 negate_expr (arg0
),
11789 TREE_OPERAND (arg1
, 0));
11791 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11792 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11793 && real_onep (arg1
))
11794 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11796 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11797 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11798 && real_minus_onep (arg1
))
11799 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11800 negate_expr (arg0
)));
11802 /* If ARG1 is a constant, we can convert this to a multiply by the
11803 reciprocal. This does not have the same rounding properties,
11804 so only do this if -freciprocal-math. We can actually
11805 always safely do it if ARG1 is a power of two, but it's hard to
11806 tell if it is or not in a portable manner. */
11808 && (TREE_CODE (arg1
) == REAL_CST
11809 || (TREE_CODE (arg1
) == COMPLEX_CST
11810 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11811 || (TREE_CODE (arg1
) == VECTOR_CST
11812 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11814 if (flag_reciprocal_math
11815 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11816 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11817 /* Find the reciprocal if optimizing and the result is exact.
11818 TODO: Complex reciprocal not implemented. */
11819 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11821 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11824 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11827 /* Convert A/B/C to A/(B*C). */
11828 if (flag_reciprocal_math
11829 && TREE_CODE (arg0
) == RDIV_EXPR
)
11830 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11831 fold_build2_loc (loc
, MULT_EXPR
, type
,
11832 TREE_OPERAND (arg0
, 1), arg1
));
11834 /* Convert A/(B/C) to (A/B)*C. */
11835 if (flag_reciprocal_math
11836 && TREE_CODE (arg1
) == RDIV_EXPR
)
11837 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11838 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11839 TREE_OPERAND (arg1
, 0)),
11840 TREE_OPERAND (arg1
, 1));
11842 /* Convert C1/(X*C2) into (C1/C2)/X. */
11843 if (flag_reciprocal_math
11844 && TREE_CODE (arg1
) == MULT_EXPR
11845 && TREE_CODE (arg0
) == REAL_CST
11846 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11848 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11849 TREE_OPERAND (arg1
, 1));
11851 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11852 TREE_OPERAND (arg1
, 0));
11855 if (flag_unsafe_math_optimizations
)
11857 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11858 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11860 /* Optimize sin(x)/cos(x) as tan(x). */
11861 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11862 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11863 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11864 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11865 CALL_EXPR_ARG (arg1
, 0), 0))
11867 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11869 if (tanfn
!= NULL_TREE
)
11870 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11873 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11874 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11875 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11876 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11877 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11878 CALL_EXPR_ARG (arg1
, 0), 0))
11880 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11882 if (tanfn
!= NULL_TREE
)
11884 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11885 CALL_EXPR_ARG (arg0
, 0));
11886 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11887 build_real (type
, dconst1
), tmp
);
11891 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11892 NaNs or Infinities. */
11893 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11894 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11895 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11897 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11898 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11900 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11901 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11902 && operand_equal_p (arg00
, arg01
, 0))
11904 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11906 if (cosfn
!= NULL_TREE
)
11907 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11911 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11912 NaNs or Infinities. */
11913 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11914 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11915 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11917 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11918 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11920 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11921 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11922 && operand_equal_p (arg00
, arg01
, 0))
11924 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11926 if (cosfn
!= NULL_TREE
)
11928 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11929 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11930 build_real (type
, dconst1
),
11936 /* Optimize pow(x,c)/x as pow(x,c-1). */
11937 if (fcode0
== BUILT_IN_POW
11938 || fcode0
== BUILT_IN_POWF
11939 || fcode0
== BUILT_IN_POWL
)
11941 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11942 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11943 if (TREE_CODE (arg01
) == REAL_CST
11944 && !TREE_OVERFLOW (arg01
)
11945 && operand_equal_p (arg1
, arg00
, 0))
11947 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11951 c
= TREE_REAL_CST (arg01
);
11952 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11953 arg
= build_real (type
, c
);
11954 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11958 /* Optimize a/root(b/c) into a*root(c/b). */
11959 if (BUILTIN_ROOT_P (fcode1
))
11961 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11963 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11965 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11966 tree b
= TREE_OPERAND (rootarg
, 0);
11967 tree c
= TREE_OPERAND (rootarg
, 1);
11969 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11971 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11972 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11976 /* Optimize x/expN(y) into x*expN(-y). */
11977 if (BUILTIN_EXPONENT_P (fcode1
))
11979 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11980 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11981 arg1
= build_call_expr_loc (loc
,
11983 fold_convert_loc (loc
, type
, arg
));
11984 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11987 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11988 if (fcode1
== BUILT_IN_POW
11989 || fcode1
== BUILT_IN_POWF
11990 || fcode1
== BUILT_IN_POWL
)
11992 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11993 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11994 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11995 tree neg11
= fold_convert_loc (loc
, type
,
11996 negate_expr (arg11
));
11997 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11998 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12003 case TRUNC_DIV_EXPR
:
12004 /* Optimize (X & (-A)) / A where A is a power of 2,
12006 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12007 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12008 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12010 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12011 arg1
, TREE_OPERAND (arg0
, 1));
12012 if (sum
&& integer_zerop (sum
)) {
12013 unsigned long pow2
;
12015 if (TREE_INT_CST_LOW (arg1
))
12016 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12018 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12019 + HOST_BITS_PER_WIDE_INT
;
12021 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12022 TREE_OPERAND (arg0
, 0),
12023 build_int_cst (integer_type_node
, pow2
));
12029 case FLOOR_DIV_EXPR
:
12030 /* Simplify A / (B << N) where A and B are positive and B is
12031 a power of 2, to A >> (N + log2(B)). */
12032 strict_overflow_p
= false;
12033 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12034 && (TYPE_UNSIGNED (type
)
12035 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12037 tree sval
= TREE_OPERAND (arg1
, 0);
12038 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12040 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12041 unsigned long pow2
;
12043 if (TREE_INT_CST_LOW (sval
))
12044 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12046 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12047 + HOST_BITS_PER_WIDE_INT
;
12049 if (strict_overflow_p
)
12050 fold_overflow_warning (("assuming signed overflow does not "
12051 "occur when simplifying A / (B << N)"),
12052 WARN_STRICT_OVERFLOW_MISC
);
12054 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12056 build_int_cst (TREE_TYPE (sh_cnt
),
12058 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12059 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12063 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12064 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12065 if (INTEGRAL_TYPE_P (type
)
12066 && TYPE_UNSIGNED (type
)
12067 && code
== FLOOR_DIV_EXPR
)
12068 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12072 case ROUND_DIV_EXPR
:
12073 case CEIL_DIV_EXPR
:
12074 case EXACT_DIV_EXPR
:
12075 if (integer_onep (arg1
))
12076 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12077 if (integer_zerop (arg1
))
12079 /* X / -1 is -X. */
12080 if (!TYPE_UNSIGNED (type
)
12081 && TREE_CODE (arg1
) == INTEGER_CST
12082 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12083 && TREE_INT_CST_HIGH (arg1
) == -1)
12084 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12086 /* Convert -A / -B to A / B when the type is signed and overflow is
12088 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12089 && TREE_CODE (arg0
) == NEGATE_EXPR
12090 && negate_expr_p (arg1
))
12092 if (INTEGRAL_TYPE_P (type
))
12093 fold_overflow_warning (("assuming signed overflow does not occur "
12094 "when distributing negation across "
12096 WARN_STRICT_OVERFLOW_MISC
);
12097 return fold_build2_loc (loc
, code
, type
,
12098 fold_convert_loc (loc
, type
,
12099 TREE_OPERAND (arg0
, 0)),
12100 fold_convert_loc (loc
, type
,
12101 negate_expr (arg1
)));
12103 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12104 && TREE_CODE (arg1
) == NEGATE_EXPR
12105 && negate_expr_p (arg0
))
12107 if (INTEGRAL_TYPE_P (type
))
12108 fold_overflow_warning (("assuming signed overflow does not occur "
12109 "when distributing negation across "
12111 WARN_STRICT_OVERFLOW_MISC
);
12112 return fold_build2_loc (loc
, code
, type
,
12113 fold_convert_loc (loc
, type
,
12114 negate_expr (arg0
)),
12115 fold_convert_loc (loc
, type
,
12116 TREE_OPERAND (arg1
, 0)));
12119 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12120 operation, EXACT_DIV_EXPR.
12122 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12123 At one time others generated faster code, it's not clear if they do
12124 after the last round to changes to the DIV code in expmed.c. */
12125 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12126 && multiple_of_p (type
, arg0
, arg1
))
12127 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12129 strict_overflow_p
= false;
12130 if (TREE_CODE (arg1
) == INTEGER_CST
12131 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12132 &strict_overflow_p
)))
12134 if (strict_overflow_p
)
12135 fold_overflow_warning (("assuming signed overflow does not occur "
12136 "when simplifying division"),
12137 WARN_STRICT_OVERFLOW_MISC
);
12138 return fold_convert_loc (loc
, type
, tem
);
12143 case CEIL_MOD_EXPR
:
12144 case FLOOR_MOD_EXPR
:
12145 case ROUND_MOD_EXPR
:
12146 case TRUNC_MOD_EXPR
:
12147 /* X % 1 is always zero, but be sure to preserve any side
12149 if (integer_onep (arg1
))
12150 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12152 /* X % 0, return X % 0 unchanged so that we can get the
12153 proper warnings and errors. */
12154 if (integer_zerop (arg1
))
12157 /* 0 % X is always zero, but be sure to preserve any side
12158 effects in X. Place this after checking for X == 0. */
12159 if (integer_zerop (arg0
))
12160 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12162 /* X % -1 is zero. */
12163 if (!TYPE_UNSIGNED (type
)
12164 && TREE_CODE (arg1
) == INTEGER_CST
12165 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12166 && TREE_INT_CST_HIGH (arg1
) == -1)
12167 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12169 /* X % -C is the same as X % C. */
12170 if (code
== TRUNC_MOD_EXPR
12171 && !TYPE_UNSIGNED (type
)
12172 && TREE_CODE (arg1
) == INTEGER_CST
12173 && !TREE_OVERFLOW (arg1
)
12174 && TREE_INT_CST_HIGH (arg1
) < 0
12175 && !TYPE_OVERFLOW_TRAPS (type
)
12176 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12177 && !sign_bit_p (arg1
, arg1
))
12178 return fold_build2_loc (loc
, code
, type
,
12179 fold_convert_loc (loc
, type
, arg0
),
12180 fold_convert_loc (loc
, type
,
12181 negate_expr (arg1
)));
12183 /* X % -Y is the same as X % Y. */
12184 if (code
== TRUNC_MOD_EXPR
12185 && !TYPE_UNSIGNED (type
)
12186 && TREE_CODE (arg1
) == NEGATE_EXPR
12187 && !TYPE_OVERFLOW_TRAPS (type
))
12188 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12189 fold_convert_loc (loc
, type
,
12190 TREE_OPERAND (arg1
, 0)));
12192 strict_overflow_p
= false;
12193 if (TREE_CODE (arg1
) == INTEGER_CST
12194 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12195 &strict_overflow_p
)))
12197 if (strict_overflow_p
)
12198 fold_overflow_warning (("assuming signed overflow does not occur "
12199 "when simplifying modulus"),
12200 WARN_STRICT_OVERFLOW_MISC
);
12201 return fold_convert_loc (loc
, type
, tem
);
12204 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12205 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12206 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12207 && (TYPE_UNSIGNED (type
)
12208 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12211 /* Also optimize A % (C << N) where C is a power of 2,
12212 to A & ((C << N) - 1). */
12213 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12214 c
= TREE_OPERAND (arg1
, 0);
12216 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12219 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12220 build_int_cst (TREE_TYPE (arg1
), 1));
12221 if (strict_overflow_p
)
12222 fold_overflow_warning (("assuming signed overflow does not "
12223 "occur when simplifying "
12224 "X % (power of two)"),
12225 WARN_STRICT_OVERFLOW_MISC
);
12226 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12227 fold_convert_loc (loc
, type
, arg0
),
12228 fold_convert_loc (loc
, type
, mask
));
12236 if (integer_all_onesp (arg0
))
12237 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12241 /* Optimize -1 >> x for arithmetic right shifts. */
12242 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12243 && tree_expr_nonnegative_p (arg1
))
12244 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12245 /* ... fall through ... */
12249 if (integer_zerop (arg1
))
12250 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12251 if (integer_zerop (arg0
))
12252 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12254 /* Since negative shift count is not well-defined,
12255 don't try to compute it in the compiler. */
12256 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12259 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12260 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12261 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12262 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12263 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12265 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12266 + TREE_INT_CST_LOW (arg1
));
12268 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12269 being well defined. */
12270 if (low
>= TYPE_PRECISION (type
))
12272 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12273 low
= low
% TYPE_PRECISION (type
);
12274 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12275 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12276 TREE_OPERAND (arg0
, 0));
12278 low
= TYPE_PRECISION (type
) - 1;
12281 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12282 build_int_cst (type
, low
));
12285 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12286 into x & ((unsigned)-1 >> c) for unsigned types. */
12287 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12288 || (TYPE_UNSIGNED (type
)
12289 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12290 && host_integerp (arg1
, false)
12291 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12292 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12293 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12295 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12296 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12302 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12304 lshift
= build_int_cst (type
, -1);
12305 lshift
= int_const_binop (code
, lshift
, arg1
);
12307 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12311 /* Rewrite an LROTATE_EXPR by a constant into an
12312 RROTATE_EXPR by a new constant. */
12313 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12315 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12316 TYPE_PRECISION (type
));
12317 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12318 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12321 /* If we have a rotate of a bit operation with the rotate count and
12322 the second operand of the bit operation both constant,
12323 permute the two operations. */
12324 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12325 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12326 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12327 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12328 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12329 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12330 fold_build2_loc (loc
, code
, type
,
12331 TREE_OPERAND (arg0
, 0), arg1
),
12332 fold_build2_loc (loc
, code
, type
,
12333 TREE_OPERAND (arg0
, 1), arg1
));
12335 /* Two consecutive rotates adding up to the precision of the
12336 type can be ignored. */
12337 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12338 && TREE_CODE (arg0
) == RROTATE_EXPR
12339 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12340 && TREE_INT_CST_HIGH (arg1
) == 0
12341 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12342 && ((TREE_INT_CST_LOW (arg1
)
12343 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12344 == (unsigned int) TYPE_PRECISION (type
)))
12345 return TREE_OPERAND (arg0
, 0);
12347 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12348 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12349 if the latter can be further optimized. */
12350 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12351 && TREE_CODE (arg0
) == BIT_AND_EXPR
12352 && TREE_CODE (arg1
) == INTEGER_CST
12353 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12355 tree mask
= fold_build2_loc (loc
, code
, type
,
12356 fold_convert_loc (loc
, type
,
12357 TREE_OPERAND (arg0
, 1)),
12359 tree shift
= fold_build2_loc (loc
, code
, type
,
12360 fold_convert_loc (loc
, type
,
12361 TREE_OPERAND (arg0
, 0)),
12363 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12371 if (operand_equal_p (arg0
, arg1
, 0))
12372 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12373 if (INTEGRAL_TYPE_P (type
)
12374 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12375 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12376 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12382 if (operand_equal_p (arg0
, arg1
, 0))
12383 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12384 if (INTEGRAL_TYPE_P (type
)
12385 && TYPE_MAX_VALUE (type
)
12386 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12387 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12388 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12393 case TRUTH_ANDIF_EXPR
:
12394 /* Note that the operands of this must be ints
12395 and their values must be 0 or 1.
12396 ("true" is a fixed value perhaps depending on the language.) */
12397 /* If first arg is constant zero, return it. */
12398 if (integer_zerop (arg0
))
12399 return fold_convert_loc (loc
, type
, arg0
);
12400 case TRUTH_AND_EXPR
:
12401 /* If either arg is constant true, drop it. */
12402 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12403 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12404 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12405 /* Preserve sequence points. */
12406 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12407 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12408 /* If second arg is constant zero, result is zero, but first arg
12409 must be evaluated. */
12410 if (integer_zerop (arg1
))
12411 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12412 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12413 case will be handled here. */
12414 if (integer_zerop (arg0
))
12415 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12417 /* !X && X is always false. */
12418 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12419 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12420 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12421 /* X && !X is always false. */
12422 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12423 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12424 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12426 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12427 means A >= Y && A != MAX, but in this case we know that
12430 if (!TREE_SIDE_EFFECTS (arg0
)
12431 && !TREE_SIDE_EFFECTS (arg1
))
12433 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12434 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12435 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12437 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12438 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12439 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12442 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12448 case TRUTH_ORIF_EXPR
:
12449 /* Note that the operands of this must be ints
12450 and their values must be 0 or true.
12451 ("true" is a fixed value perhaps depending on the language.) */
12452 /* If first arg is constant true, return it. */
12453 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12454 return fold_convert_loc (loc
, type
, arg0
);
12455 case TRUTH_OR_EXPR
:
12456 /* If either arg is constant zero, drop it. */
12457 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12458 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12459 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12460 /* Preserve sequence points. */
12461 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12462 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12463 /* If second arg is constant true, result is true, but we must
12464 evaluate first arg. */
12465 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12466 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12467 /* Likewise for first arg, but note this only occurs here for
12469 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12470 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12472 /* !X || X is always true. */
12473 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12474 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12475 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12476 /* X || !X is always true. */
12477 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12478 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12479 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12481 /* (X && !Y) || (!X && Y) is X ^ Y */
12482 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12483 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12485 tree a0
, a1
, l0
, l1
, n0
, n1
;
12487 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12488 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12490 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12491 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12493 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12494 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12496 if ((operand_equal_p (n0
, a0
, 0)
12497 && operand_equal_p (n1
, a1
, 0))
12498 || (operand_equal_p (n0
, a1
, 0)
12499 && operand_equal_p (n1
, a0
, 0)))
12500 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12503 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12509 case TRUTH_XOR_EXPR
:
12510 /* If the second arg is constant zero, drop it. */
12511 if (integer_zerop (arg1
))
12512 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12513 /* If the second arg is constant true, this is a logical inversion. */
12514 if (integer_onep (arg1
))
12516 /* Only call invert_truthvalue if operand is a truth value. */
12517 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12518 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12520 tem
= invert_truthvalue_loc (loc
, arg0
);
12521 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12523 /* Identical arguments cancel to zero. */
12524 if (operand_equal_p (arg0
, arg1
, 0))
12525 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12527 /* !X ^ X is always true. */
12528 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12529 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12530 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12532 /* X ^ !X is always true. */
12533 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12534 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12535 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12544 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12545 if (tem
!= NULL_TREE
)
12548 /* bool_var != 0 becomes bool_var. */
12549 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12550 && code
== NE_EXPR
)
12551 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12553 /* bool_var == 1 becomes bool_var. */
12554 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12555 && code
== EQ_EXPR
)
12556 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12558 /* bool_var != 1 becomes !bool_var. */
12559 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12560 && code
== NE_EXPR
)
12561 return fold_convert_loc (loc
, type
,
12562 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12563 TREE_TYPE (arg0
), arg0
));
12565 /* bool_var == 0 becomes !bool_var. */
12566 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12567 && code
== EQ_EXPR
)
12568 return fold_convert_loc (loc
, type
,
12569 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12570 TREE_TYPE (arg0
), arg0
));
12572 /* !exp != 0 becomes !exp */
12573 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12574 && code
== NE_EXPR
)
12575 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12577 /* If this is an equality comparison of the address of two non-weak,
12578 unaliased symbols neither of which are extern (since we do not
12579 have access to attributes for externs), then we know the result. */
12580 if (TREE_CODE (arg0
) == ADDR_EXPR
12581 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12582 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12583 && ! lookup_attribute ("alias",
12584 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12585 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12586 && TREE_CODE (arg1
) == ADDR_EXPR
12587 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12588 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12589 && ! lookup_attribute ("alias",
12590 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12591 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12593 /* We know that we're looking at the address of two
12594 non-weak, unaliased, static _DECL nodes.
12596 It is both wasteful and incorrect to call operand_equal_p
12597 to compare the two ADDR_EXPR nodes. It is wasteful in that
12598 all we need to do is test pointer equality for the arguments
12599 to the two ADDR_EXPR nodes. It is incorrect to use
12600 operand_equal_p as that function is NOT equivalent to a
12601 C equality test. It can in fact return false for two
12602 objects which would test as equal using the C equality
12604 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12605 return constant_boolean_node (equal
12606 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12610 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12611 a MINUS_EXPR of a constant, we can convert it into a comparison with
12612 a revised constant as long as no overflow occurs. */
12613 if (TREE_CODE (arg1
) == INTEGER_CST
12614 && (TREE_CODE (arg0
) == PLUS_EXPR
12615 || TREE_CODE (arg0
) == MINUS_EXPR
)
12616 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12617 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12618 ? MINUS_EXPR
: PLUS_EXPR
,
12619 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12621 TREE_OPERAND (arg0
, 1)))
12622 && !TREE_OVERFLOW (tem
))
12623 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12625 /* Similarly for a NEGATE_EXPR. */
12626 if (TREE_CODE (arg0
) == NEGATE_EXPR
12627 && TREE_CODE (arg1
) == INTEGER_CST
12628 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12630 && TREE_CODE (tem
) == INTEGER_CST
12631 && !TREE_OVERFLOW (tem
))
12632 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12634 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12635 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12636 && TREE_CODE (arg1
) == INTEGER_CST
12637 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12638 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12639 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12640 fold_convert_loc (loc
,
12643 TREE_OPERAND (arg0
, 1)));
12645 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12646 if ((TREE_CODE (arg0
) == PLUS_EXPR
12647 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12648 || TREE_CODE (arg0
) == MINUS_EXPR
)
12649 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12652 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12653 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12655 tree val
= TREE_OPERAND (arg0
, 1);
12656 return omit_two_operands_loc (loc
, type
,
12657 fold_build2_loc (loc
, code
, type
,
12659 build_int_cst (TREE_TYPE (val
),
12661 TREE_OPERAND (arg0
, 0), arg1
);
12664 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12665 if (TREE_CODE (arg0
) == MINUS_EXPR
12666 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12667 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12670 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12672 return omit_two_operands_loc (loc
, type
,
12674 ? boolean_true_node
: boolean_false_node
,
12675 TREE_OPERAND (arg0
, 1), arg1
);
12678 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12679 for !=. Don't do this for ordered comparisons due to overflow. */
12680 if (TREE_CODE (arg0
) == MINUS_EXPR
12681 && integer_zerop (arg1
))
12682 return fold_build2_loc (loc
, code
, type
,
12683 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12685 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12686 if (TREE_CODE (arg0
) == ABS_EXPR
12687 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12688 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12690 /* If this is an EQ or NE comparison with zero and ARG0 is
12691 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12692 two operations, but the latter can be done in one less insn
12693 on machines that have only two-operand insns or on which a
12694 constant cannot be the first operand. */
12695 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12696 && integer_zerop (arg1
))
12698 tree arg00
= TREE_OPERAND (arg0
, 0);
12699 tree arg01
= TREE_OPERAND (arg0
, 1);
12700 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12701 && integer_onep (TREE_OPERAND (arg00
, 0)))
12703 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12704 arg01
, TREE_OPERAND (arg00
, 1));
12705 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12706 build_int_cst (TREE_TYPE (arg0
), 1));
12707 return fold_build2_loc (loc
, code
, type
,
12708 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12711 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12712 && integer_onep (TREE_OPERAND (arg01
, 0)))
12714 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12715 arg00
, TREE_OPERAND (arg01
, 1));
12716 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12717 build_int_cst (TREE_TYPE (arg0
), 1));
12718 return fold_build2_loc (loc
, code
, type
,
12719 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12724 /* If this is an NE or EQ comparison of zero against the result of a
12725 signed MOD operation whose second operand is a power of 2, make
12726 the MOD operation unsigned since it is simpler and equivalent. */
12727 if (integer_zerop (arg1
)
12728 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12729 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12730 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12731 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12732 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12733 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12735 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12736 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12737 fold_convert_loc (loc
, newtype
,
12738 TREE_OPERAND (arg0
, 0)),
12739 fold_convert_loc (loc
, newtype
,
12740 TREE_OPERAND (arg0
, 1)));
12742 return fold_build2_loc (loc
, code
, type
, newmod
,
12743 fold_convert_loc (loc
, newtype
, arg1
));
12746 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12747 C1 is a valid shift constant, and C2 is a power of two, i.e.
12749 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12750 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12751 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12753 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12754 && integer_zerop (arg1
))
12756 tree itype
= TREE_TYPE (arg0
);
12757 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12758 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12760 /* Check for a valid shift count. */
12761 if (TREE_INT_CST_HIGH (arg001
) == 0
12762 && TREE_INT_CST_LOW (arg001
) < prec
)
12764 tree arg01
= TREE_OPERAND (arg0
, 1);
12765 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12766 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12767 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12768 can be rewritten as (X & (C2 << C1)) != 0. */
12769 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12771 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12772 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12773 return fold_build2_loc (loc
, code
, type
, tem
,
12774 fold_convert_loc (loc
, itype
, arg1
));
12776 /* Otherwise, for signed (arithmetic) shifts,
12777 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12778 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12779 else if (!TYPE_UNSIGNED (itype
))
12780 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12781 arg000
, build_int_cst (itype
, 0));
12782 /* Otherwise, of unsigned (logical) shifts,
12783 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12784 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12786 return omit_one_operand_loc (loc
, type
,
12787 code
== EQ_EXPR
? integer_one_node
12788 : integer_zero_node
,
12793 /* If we have (A & C) == C where C is a power of 2, convert this into
12794 (A & C) != 0. Similarly for NE_EXPR. */
12795 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12796 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12797 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12798 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12799 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12800 integer_zero_node
));
12802 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12803 bit, then fold the expression into A < 0 or A >= 0. */
12804 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12808 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12809 Similarly for NE_EXPR. */
12810 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12811 && TREE_CODE (arg1
) == INTEGER_CST
12812 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12814 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12815 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12816 TREE_OPERAND (arg0
, 1));
12818 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12819 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12821 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12822 if (integer_nonzerop (dandnotc
))
12823 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12826 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12827 Similarly for NE_EXPR. */
12828 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12829 && TREE_CODE (arg1
) == INTEGER_CST
12830 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12832 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12834 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12835 TREE_OPERAND (arg0
, 1),
12836 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12837 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12838 if (integer_nonzerop (candnotd
))
12839 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12842 /* If this is a comparison of a field, we may be able to simplify it. */
12843 if ((TREE_CODE (arg0
) == COMPONENT_REF
12844 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12845 /* Handle the constant case even without -O
12846 to make sure the warnings are given. */
12847 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12849 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12854 /* Optimize comparisons of strlen vs zero to a compare of the
12855 first character of the string vs zero. To wit,
12856 strlen(ptr) == 0 => *ptr == 0
12857 strlen(ptr) != 0 => *ptr != 0
12858 Other cases should reduce to one of these two (or a constant)
12859 due to the return value of strlen being unsigned. */
12860 if (TREE_CODE (arg0
) == CALL_EXPR
12861 && integer_zerop (arg1
))
12863 tree fndecl
= get_callee_fndecl (arg0
);
12866 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12867 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12868 && call_expr_nargs (arg0
) == 1
12869 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12871 tree iref
= build_fold_indirect_ref_loc (loc
,
12872 CALL_EXPR_ARG (arg0
, 0));
12873 return fold_build2_loc (loc
, code
, type
, iref
,
12874 build_int_cst (TREE_TYPE (iref
), 0));
12878 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12879 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12880 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12881 && integer_zerop (arg1
)
12882 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12884 tree arg00
= TREE_OPERAND (arg0
, 0);
12885 tree arg01
= TREE_OPERAND (arg0
, 1);
12886 tree itype
= TREE_TYPE (arg00
);
12887 if (TREE_INT_CST_HIGH (arg01
) == 0
12888 && TREE_INT_CST_LOW (arg01
)
12889 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12891 if (TYPE_UNSIGNED (itype
))
12893 itype
= signed_type_for (itype
);
12894 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12896 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12897 type
, arg00
, build_int_cst (itype
, 0));
12901 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12902 if (integer_zerop (arg1
)
12903 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12904 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12905 TREE_OPERAND (arg0
, 1));
12907 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12908 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12909 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12910 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12911 build_zero_cst (TREE_TYPE (arg0
)));
12912 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12913 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12914 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12915 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12916 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12917 build_zero_cst (TREE_TYPE (arg0
)));
12919 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12920 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12921 && TREE_CODE (arg1
) == INTEGER_CST
12922 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12923 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12924 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12925 TREE_OPERAND (arg0
, 1), arg1
));
12927 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12928 (X & C) == 0 when C is a single bit. */
12929 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12930 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12931 && integer_zerop (arg1
)
12932 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12934 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12935 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12936 TREE_OPERAND (arg0
, 1));
12937 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12939 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12943 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12944 constant C is a power of two, i.e. a single bit. */
12945 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12946 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12947 && integer_zerop (arg1
)
12948 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12949 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12950 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12952 tree arg00
= TREE_OPERAND (arg0
, 0);
12953 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12954 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12957 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12958 when is C is a power of two, i.e. a single bit. */
12959 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12960 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12961 && integer_zerop (arg1
)
12962 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12963 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12964 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12966 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12967 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12968 arg000
, TREE_OPERAND (arg0
, 1));
12969 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12970 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12973 if (integer_zerop (arg1
)
12974 && tree_expr_nonzero_p (arg0
))
12976 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12977 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12980 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12981 if (TREE_CODE (arg0
) == NEGATE_EXPR
12982 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12983 return fold_build2_loc (loc
, code
, type
,
12984 TREE_OPERAND (arg0
, 0),
12985 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12986 TREE_OPERAND (arg1
, 0)));
12988 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12989 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12990 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12992 tree arg00
= TREE_OPERAND (arg0
, 0);
12993 tree arg01
= TREE_OPERAND (arg0
, 1);
12994 tree arg10
= TREE_OPERAND (arg1
, 0);
12995 tree arg11
= TREE_OPERAND (arg1
, 1);
12996 tree itype
= TREE_TYPE (arg0
);
12998 if (operand_equal_p (arg01
, arg11
, 0))
12999 return fold_build2_loc (loc
, code
, type
,
13000 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13001 fold_build2_loc (loc
,
13002 BIT_XOR_EXPR
, itype
,
13005 build_zero_cst (itype
));
13007 if (operand_equal_p (arg01
, arg10
, 0))
13008 return fold_build2_loc (loc
, code
, type
,
13009 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13010 fold_build2_loc (loc
,
13011 BIT_XOR_EXPR
, itype
,
13014 build_zero_cst (itype
));
13016 if (operand_equal_p (arg00
, arg11
, 0))
13017 return fold_build2_loc (loc
, code
, type
,
13018 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13019 fold_build2_loc (loc
,
13020 BIT_XOR_EXPR
, itype
,
13023 build_zero_cst (itype
));
13025 if (operand_equal_p (arg00
, arg10
, 0))
13026 return fold_build2_loc (loc
, code
, type
,
13027 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13028 fold_build2_loc (loc
,
13029 BIT_XOR_EXPR
, itype
,
13032 build_zero_cst (itype
));
13035 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13036 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13038 tree arg00
= TREE_OPERAND (arg0
, 0);
13039 tree arg01
= TREE_OPERAND (arg0
, 1);
13040 tree arg10
= TREE_OPERAND (arg1
, 0);
13041 tree arg11
= TREE_OPERAND (arg1
, 1);
13042 tree itype
= TREE_TYPE (arg0
);
13044 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13045 operand_equal_p guarantees no side-effects so we don't need
13046 to use omit_one_operand on Z. */
13047 if (operand_equal_p (arg01
, arg11
, 0))
13048 return fold_build2_loc (loc
, code
, type
, arg00
,
13049 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13051 if (operand_equal_p (arg01
, arg10
, 0))
13052 return fold_build2_loc (loc
, code
, type
, arg00
,
13053 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13055 if (operand_equal_p (arg00
, arg11
, 0))
13056 return fold_build2_loc (loc
, code
, type
, arg01
,
13057 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13059 if (operand_equal_p (arg00
, arg10
, 0))
13060 return fold_build2_loc (loc
, code
, type
, arg01
,
13061 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13064 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13065 if (TREE_CODE (arg01
) == INTEGER_CST
13066 && TREE_CODE (arg11
) == INTEGER_CST
)
13068 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13069 fold_convert_loc (loc
, itype
, arg11
));
13070 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13071 return fold_build2_loc (loc
, code
, type
, tem
,
13072 fold_convert_loc (loc
, itype
, arg10
));
13076 /* Attempt to simplify equality/inequality comparisons of complex
13077 values. Only lower the comparison if the result is known or
13078 can be simplified to a single scalar comparison. */
13079 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13080 || TREE_CODE (arg0
) == COMPLEX_CST
)
13081 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13082 || TREE_CODE (arg1
) == COMPLEX_CST
))
13084 tree real0
, imag0
, real1
, imag1
;
13087 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13089 real0
= TREE_OPERAND (arg0
, 0);
13090 imag0
= TREE_OPERAND (arg0
, 1);
13094 real0
= TREE_REALPART (arg0
);
13095 imag0
= TREE_IMAGPART (arg0
);
13098 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13100 real1
= TREE_OPERAND (arg1
, 0);
13101 imag1
= TREE_OPERAND (arg1
, 1);
13105 real1
= TREE_REALPART (arg1
);
13106 imag1
= TREE_IMAGPART (arg1
);
13109 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13110 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13112 if (integer_zerop (rcond
))
13114 if (code
== EQ_EXPR
)
13115 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13117 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13121 if (code
== NE_EXPR
)
13122 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13124 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13128 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13129 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13131 if (integer_zerop (icond
))
13133 if (code
== EQ_EXPR
)
13134 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13136 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13140 if (code
== NE_EXPR
)
13141 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13143 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13154 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13155 if (tem
!= NULL_TREE
)
13158 /* Transform comparisons of the form X +- C CMP X. */
13159 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13160 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13161 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13162 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13163 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13164 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13166 tree arg01
= TREE_OPERAND (arg0
, 1);
13167 enum tree_code code0
= TREE_CODE (arg0
);
13170 if (TREE_CODE (arg01
) == REAL_CST
)
13171 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13173 is_positive
= tree_int_cst_sgn (arg01
);
13175 /* (X - c) > X becomes false. */
13176 if (code
== GT_EXPR
13177 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13178 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13180 if (TREE_CODE (arg01
) == INTEGER_CST
13181 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13182 fold_overflow_warning (("assuming signed overflow does not "
13183 "occur when assuming that (X - c) > X "
13184 "is always false"),
13185 WARN_STRICT_OVERFLOW_ALL
);
13186 return constant_boolean_node (0, type
);
13189 /* Likewise (X + c) < X becomes false. */
13190 if (code
== LT_EXPR
13191 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13192 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13194 if (TREE_CODE (arg01
) == INTEGER_CST
13195 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13196 fold_overflow_warning (("assuming signed overflow does not "
13197 "occur when assuming that "
13198 "(X + c) < X is always false"),
13199 WARN_STRICT_OVERFLOW_ALL
);
13200 return constant_boolean_node (0, type
);
13203 /* Convert (X - c) <= X to true. */
13204 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13206 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13207 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13209 if (TREE_CODE (arg01
) == INTEGER_CST
13210 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13211 fold_overflow_warning (("assuming signed overflow does not "
13212 "occur when assuming that "
13213 "(X - c) <= X is always true"),
13214 WARN_STRICT_OVERFLOW_ALL
);
13215 return constant_boolean_node (1, type
);
13218 /* Convert (X + c) >= X to true. */
13219 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13221 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13222 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13224 if (TREE_CODE (arg01
) == INTEGER_CST
13225 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13226 fold_overflow_warning (("assuming signed overflow does not "
13227 "occur when assuming that "
13228 "(X + c) >= X is always true"),
13229 WARN_STRICT_OVERFLOW_ALL
);
13230 return constant_boolean_node (1, type
);
13233 if (TREE_CODE (arg01
) == INTEGER_CST
)
13235 /* Convert X + c > X and X - c < X to true for integers. */
13236 if (code
== GT_EXPR
13237 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13238 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13240 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13241 fold_overflow_warning (("assuming signed overflow does "
13242 "not occur when assuming that "
13243 "(X + c) > X is always true"),
13244 WARN_STRICT_OVERFLOW_ALL
);
13245 return constant_boolean_node (1, type
);
13248 if (code
== LT_EXPR
13249 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13250 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13252 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13253 fold_overflow_warning (("assuming signed overflow does "
13254 "not occur when assuming that "
13255 "(X - c) < X is always true"),
13256 WARN_STRICT_OVERFLOW_ALL
);
13257 return constant_boolean_node (1, type
);
13260 /* Convert X + c <= X and X - c >= X to false for integers. */
13261 if (code
== LE_EXPR
13262 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13263 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13265 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13266 fold_overflow_warning (("assuming signed overflow does "
13267 "not occur when assuming that "
13268 "(X + c) <= X is always false"),
13269 WARN_STRICT_OVERFLOW_ALL
);
13270 return constant_boolean_node (0, type
);
13273 if (code
== GE_EXPR
13274 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13275 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13277 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13278 fold_overflow_warning (("assuming signed overflow does "
13279 "not occur when assuming that "
13280 "(X - c) >= X is always false"),
13281 WARN_STRICT_OVERFLOW_ALL
);
13282 return constant_boolean_node (0, type
);
13287 /* Comparisons with the highest or lowest possible integer of
13288 the specified precision will have known values. */
13290 tree arg1_type
= TREE_TYPE (arg1
);
13291 unsigned int width
= TYPE_PRECISION (arg1_type
);
13293 if (TREE_CODE (arg1
) == INTEGER_CST
13294 && width
<= HOST_BITS_PER_DOUBLE_INT
13295 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13297 HOST_WIDE_INT signed_max_hi
;
13298 unsigned HOST_WIDE_INT signed_max_lo
;
13299 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13301 if (width
<= HOST_BITS_PER_WIDE_INT
)
13303 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13308 if (TYPE_UNSIGNED (arg1_type
))
13310 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13316 max_lo
= signed_max_lo
;
13317 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13323 width
-= HOST_BITS_PER_WIDE_INT
;
13324 signed_max_lo
= -1;
13325 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13330 if (TYPE_UNSIGNED (arg1_type
))
13332 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13337 max_hi
= signed_max_hi
;
13338 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13342 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13343 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13347 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13350 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13353 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13356 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13358 /* The GE_EXPR and LT_EXPR cases above are not normally
13359 reached because of previous transformations. */
13364 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13366 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13370 arg1
= const_binop (PLUS_EXPR
, arg1
,
13371 build_int_cst (TREE_TYPE (arg1
), 1));
13372 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13373 fold_convert_loc (loc
,
13374 TREE_TYPE (arg1
), arg0
),
13377 arg1
= const_binop (PLUS_EXPR
, arg1
,
13378 build_int_cst (TREE_TYPE (arg1
), 1));
13379 return fold_build2_loc (loc
, NE_EXPR
, type
,
13380 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13386 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13388 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13392 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13395 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13398 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13401 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13406 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13408 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13412 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13413 return fold_build2_loc (loc
, NE_EXPR
, type
,
13414 fold_convert_loc (loc
,
13415 TREE_TYPE (arg1
), arg0
),
13418 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13419 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13420 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13427 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13428 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13429 && TYPE_UNSIGNED (arg1_type
)
13430 /* We will flip the signedness of the comparison operator
13431 associated with the mode of arg1, so the sign bit is
13432 specified by this mode. Check that arg1 is the signed
13433 max associated with this sign bit. */
13434 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13435 /* signed_type does not work on pointer types. */
13436 && INTEGRAL_TYPE_P (arg1_type
))
13438 /* The following case also applies to X < signed_max+1
13439 and X >= signed_max+1 because previous transformations. */
13440 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13443 st
= signed_type_for (TREE_TYPE (arg1
));
13444 return fold_build2_loc (loc
,
13445 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13446 type
, fold_convert_loc (loc
, st
, arg0
),
13447 build_int_cst (st
, 0));
13453 /* If we are comparing an ABS_EXPR with a constant, we can
13454 convert all the cases into explicit comparisons, but they may
13455 well not be faster than doing the ABS and one comparison.
13456 But ABS (X) <= C is a range comparison, which becomes a subtraction
13457 and a comparison, and is probably faster. */
13458 if (code
== LE_EXPR
13459 && TREE_CODE (arg1
) == INTEGER_CST
13460 && TREE_CODE (arg0
) == ABS_EXPR
13461 && ! TREE_SIDE_EFFECTS (arg0
)
13462 && (0 != (tem
= negate_expr (arg1
)))
13463 && TREE_CODE (tem
) == INTEGER_CST
13464 && !TREE_OVERFLOW (tem
))
13465 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13466 build2 (GE_EXPR
, type
,
13467 TREE_OPERAND (arg0
, 0), tem
),
13468 build2 (LE_EXPR
, type
,
13469 TREE_OPERAND (arg0
, 0), arg1
));
13471 /* Convert ABS_EXPR<x> >= 0 to true. */
13472 strict_overflow_p
= false;
13473 if (code
== GE_EXPR
13474 && (integer_zerop (arg1
)
13475 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13476 && real_zerop (arg1
)))
13477 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13479 if (strict_overflow_p
)
13480 fold_overflow_warning (("assuming signed overflow does not occur "
13481 "when simplifying comparison of "
13482 "absolute value and zero"),
13483 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13484 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13487 /* Convert ABS_EXPR<x> < 0 to false. */
13488 strict_overflow_p
= false;
13489 if (code
== LT_EXPR
13490 && (integer_zerop (arg1
) || real_zerop (arg1
))
13491 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13493 if (strict_overflow_p
)
13494 fold_overflow_warning (("assuming signed overflow does not occur "
13495 "when simplifying comparison of "
13496 "absolute value and zero"),
13497 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13498 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13501 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13502 and similarly for >= into !=. */
13503 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13504 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13505 && TREE_CODE (arg1
) == LSHIFT_EXPR
13506 && integer_onep (TREE_OPERAND (arg1
, 0)))
13507 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13508 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13509 TREE_OPERAND (arg1
, 1)),
13510 build_zero_cst (TREE_TYPE (arg0
)));
13512 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13513 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13514 && CONVERT_EXPR_P (arg1
)
13515 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13516 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13518 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13519 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13520 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13521 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13522 build_zero_cst (TREE_TYPE (arg0
)));
13527 case UNORDERED_EXPR
:
13535 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13537 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13538 if (t1
!= NULL_TREE
)
13542 /* If the first operand is NaN, the result is constant. */
13543 if (TREE_CODE (arg0
) == REAL_CST
13544 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13545 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13547 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13548 ? integer_zero_node
13549 : integer_one_node
;
13550 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13553 /* If the second operand is NaN, the result is constant. */
13554 if (TREE_CODE (arg1
) == REAL_CST
13555 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13556 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13558 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13559 ? integer_zero_node
13560 : integer_one_node
;
13561 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13564 /* Simplify unordered comparison of something with itself. */
13565 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13566 && operand_equal_p (arg0
, arg1
, 0))
13567 return constant_boolean_node (1, type
);
13569 if (code
== LTGT_EXPR
13570 && !flag_trapping_math
13571 && operand_equal_p (arg0
, arg1
, 0))
13572 return constant_boolean_node (0, type
);
13574 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13576 tree targ0
= strip_float_extensions (arg0
);
13577 tree targ1
= strip_float_extensions (arg1
);
13578 tree newtype
= TREE_TYPE (targ0
);
13580 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13581 newtype
= TREE_TYPE (targ1
);
13583 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13584 return fold_build2_loc (loc
, code
, type
,
13585 fold_convert_loc (loc
, newtype
, targ0
),
13586 fold_convert_loc (loc
, newtype
, targ1
));
13591 case COMPOUND_EXPR
:
13592 /* When pedantic, a compound expression can be neither an lvalue
13593 nor an integer constant expression. */
13594 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13596 /* Don't let (0, 0) be null pointer constant. */
13597 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13598 : fold_convert_loc (loc
, type
, arg1
);
13599 return pedantic_non_lvalue_loc (loc
, tem
);
13602 if ((TREE_CODE (arg0
) == REAL_CST
13603 && TREE_CODE (arg1
) == REAL_CST
)
13604 || (TREE_CODE (arg0
) == INTEGER_CST
13605 && TREE_CODE (arg1
) == INTEGER_CST
))
13606 return build_complex (type
, arg0
, arg1
);
13607 if (TREE_CODE (arg0
) == REALPART_EXPR
13608 && TREE_CODE (arg1
) == IMAGPART_EXPR
13609 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13610 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13611 TREE_OPERAND (arg1
, 0), 0))
13612 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13613 TREE_OPERAND (arg1
, 0));
13617 /* An ASSERT_EXPR should never be passed to fold_binary. */
13618 gcc_unreachable ();
13620 case VEC_PACK_TRUNC_EXPR
:
13621 case VEC_PACK_FIX_TRUNC_EXPR
:
13623 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13626 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13627 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13628 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13631 elts
= XALLOCAVEC (tree
, nelts
);
13632 if (!vec_cst_ctor_to_array (arg0
, elts
)
13633 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13636 for (i
= 0; i
< nelts
; i
++)
13638 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13639 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13640 TREE_TYPE (type
), elts
[i
]);
13641 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13645 return build_vector (type
, elts
);
13648 case VEC_WIDEN_MULT_LO_EXPR
:
13649 case VEC_WIDEN_MULT_HI_EXPR
:
13650 case VEC_WIDEN_MULT_EVEN_EXPR
:
13651 case VEC_WIDEN_MULT_ODD_EXPR
:
13653 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13654 unsigned int out
, ofs
, scale
;
13657 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13658 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13659 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13662 elts
= XALLOCAVEC (tree
, nelts
* 4);
13663 if (!vec_cst_ctor_to_array (arg0
, elts
)
13664 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13667 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13668 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13669 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13670 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13671 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13672 scale
= 1, ofs
= 0;
13673 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13674 scale
= 1, ofs
= 1;
13676 for (out
= 0; out
< nelts
; out
++)
13678 unsigned int in1
= (out
<< scale
) + ofs
;
13679 unsigned int in2
= in1
+ nelts
* 2;
13682 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13683 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13685 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13687 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13688 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13692 return build_vector (type
, elts
);
13697 } /* switch (code) */
13700 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13701 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13705 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13707 switch (TREE_CODE (*tp
))
13713 *walk_subtrees
= 0;
13715 /* ... fall through ... */
13722 /* Return whether the sub-tree ST contains a label which is accessible from
13723 outside the sub-tree. */
13726 contains_label_p (tree st
)
13729 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13732 /* Fold a ternary expression of code CODE and type TYPE with operands
13733 OP0, OP1, and OP2. Return the folded expression if folding is
13734 successful. Otherwise, return NULL_TREE. */
13737 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13738 tree op0
, tree op1
, tree op2
)
13741 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13742 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13744 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13745 && TREE_CODE_LENGTH (code
) == 3);
13747 /* Strip any conversions that don't change the mode. This is safe
13748 for every expression, except for a comparison expression because
13749 its signedness is derived from its operands. So, in the latter
13750 case, only strip conversions that don't change the signedness.
13752 Note that this is done as an internal manipulation within the
13753 constant folder, in order to find the simplest representation of
13754 the arguments so that their form can be studied. In any cases,
13755 the appropriate type conversions should be put back in the tree
13756 that will get out of the constant folder. */
13777 case COMPONENT_REF
:
13778 if (TREE_CODE (arg0
) == CONSTRUCTOR
13779 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13781 unsigned HOST_WIDE_INT idx
;
13783 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13790 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13791 so all simple results must be passed through pedantic_non_lvalue. */
13792 if (TREE_CODE (arg0
) == INTEGER_CST
)
13794 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13795 tem
= integer_zerop (arg0
) ? op2
: op1
;
13796 /* Only optimize constant conditions when the selected branch
13797 has the same type as the COND_EXPR. This avoids optimizing
13798 away "c ? x : throw", where the throw has a void type.
13799 Avoid throwing away that operand which contains label. */
13800 if ((!TREE_SIDE_EFFECTS (unused_op
)
13801 || !contains_label_p (unused_op
))
13802 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13803 || VOID_TYPE_P (type
)))
13804 return pedantic_non_lvalue_loc (loc
, tem
);
13807 if (operand_equal_p (arg1
, op2
, 0))
13808 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13810 /* If we have A op B ? A : C, we may be able to convert this to a
13811 simpler expression, depending on the operation and the values
13812 of B and C. Signed zeros prevent all of these transformations,
13813 for reasons given above each one.
13815 Also try swapping the arguments and inverting the conditional. */
13816 if (COMPARISON_CLASS_P (arg0
)
13817 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13818 arg1
, TREE_OPERAND (arg0
, 1))
13819 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13821 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13826 if (COMPARISON_CLASS_P (arg0
)
13827 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13829 TREE_OPERAND (arg0
, 1))
13830 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13832 location_t loc0
= expr_location_or (arg0
, loc
);
13833 tem
= fold_truth_not_expr (loc0
, arg0
);
13834 if (tem
&& COMPARISON_CLASS_P (tem
))
13836 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13842 /* If the second operand is simpler than the third, swap them
13843 since that produces better jump optimization results. */
13844 if (truth_value_p (TREE_CODE (arg0
))
13845 && tree_swap_operands_p (op1
, op2
, false))
13847 location_t loc0
= expr_location_or (arg0
, loc
);
13848 /* See if this can be inverted. If it can't, possibly because
13849 it was a floating-point inequality comparison, don't do
13851 tem
= fold_truth_not_expr (loc0
, arg0
);
13853 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13856 /* Convert A ? 1 : 0 to simply A. */
13857 if (integer_onep (op1
)
13858 && integer_zerop (op2
)
13859 /* If we try to convert OP0 to our type, the
13860 call to fold will try to move the conversion inside
13861 a COND, which will recurse. In that case, the COND_EXPR
13862 is probably the best choice, so leave it alone. */
13863 && type
== TREE_TYPE (arg0
))
13864 return pedantic_non_lvalue_loc (loc
, arg0
);
13866 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13867 over COND_EXPR in cases such as floating point comparisons. */
13868 if (integer_zerop (op1
)
13869 && integer_onep (op2
)
13870 && truth_value_p (TREE_CODE (arg0
)))
13871 return pedantic_non_lvalue_loc (loc
,
13872 fold_convert_loc (loc
, type
,
13873 invert_truthvalue_loc (loc
,
13876 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13877 if (TREE_CODE (arg0
) == LT_EXPR
13878 && integer_zerop (TREE_OPERAND (arg0
, 1))
13879 && integer_zerop (op2
)
13880 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13882 /* sign_bit_p only checks ARG1 bits within A's precision.
13883 If <sign bit of A> has wider type than A, bits outside
13884 of A's precision in <sign bit of A> need to be checked.
13885 If they are all 0, this optimization needs to be done
13886 in unsigned A's type, if they are all 1 in signed A's type,
13887 otherwise this can't be done. */
13888 if (TYPE_PRECISION (TREE_TYPE (tem
))
13889 < TYPE_PRECISION (TREE_TYPE (arg1
))
13890 && TYPE_PRECISION (TREE_TYPE (tem
))
13891 < TYPE_PRECISION (type
))
13893 unsigned HOST_WIDE_INT mask_lo
;
13894 HOST_WIDE_INT mask_hi
;
13895 int inner_width
, outer_width
;
13898 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13899 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13900 if (outer_width
> TYPE_PRECISION (type
))
13901 outer_width
= TYPE_PRECISION (type
);
13903 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13905 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13906 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
13912 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13913 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13915 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13917 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13918 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13922 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13923 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13925 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13926 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13928 tem_type
= signed_type_for (TREE_TYPE (tem
));
13929 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13931 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13932 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13934 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13935 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13943 fold_convert_loc (loc
, type
,
13944 fold_build2_loc (loc
, BIT_AND_EXPR
,
13945 TREE_TYPE (tem
), tem
,
13946 fold_convert_loc (loc
,
13951 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13952 already handled above. */
13953 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13954 && integer_onep (TREE_OPERAND (arg0
, 1))
13955 && integer_zerop (op2
)
13956 && integer_pow2p (arg1
))
13958 tree tem
= TREE_OPERAND (arg0
, 0);
13960 if (TREE_CODE (tem
) == RSHIFT_EXPR
13961 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13962 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13963 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13964 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13965 TREE_OPERAND (tem
, 0), arg1
);
13968 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13969 is probably obsolete because the first operand should be a
13970 truth value (that's why we have the two cases above), but let's
13971 leave it in until we can confirm this for all front-ends. */
13972 if (integer_zerop (op2
)
13973 && TREE_CODE (arg0
) == NE_EXPR
13974 && integer_zerop (TREE_OPERAND (arg0
, 1))
13975 && integer_pow2p (arg1
)
13976 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13977 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13978 arg1
, OEP_ONLY_CONST
))
13979 return pedantic_non_lvalue_loc (loc
,
13980 fold_convert_loc (loc
, type
,
13981 TREE_OPERAND (arg0
, 0)));
13983 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13984 if (integer_zerop (op2
)
13985 && truth_value_p (TREE_CODE (arg0
))
13986 && truth_value_p (TREE_CODE (arg1
)))
13987 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13988 fold_convert_loc (loc
, type
, arg0
),
13991 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13992 if (integer_onep (op2
)
13993 && truth_value_p (TREE_CODE (arg0
))
13994 && truth_value_p (TREE_CODE (arg1
)))
13996 location_t loc0
= expr_location_or (arg0
, loc
);
13997 /* Only perform transformation if ARG0 is easily inverted. */
13998 tem
= fold_truth_not_expr (loc0
, arg0
);
14000 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14001 fold_convert_loc (loc
, type
, tem
),
14005 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14006 if (integer_zerop (arg1
)
14007 && truth_value_p (TREE_CODE (arg0
))
14008 && truth_value_p (TREE_CODE (op2
)))
14010 location_t loc0
= expr_location_or (arg0
, loc
);
14011 /* Only perform transformation if ARG0 is easily inverted. */
14012 tem
= fold_truth_not_expr (loc0
, arg0
);
14014 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14015 fold_convert_loc (loc
, type
, tem
),
14019 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14020 if (integer_onep (arg1
)
14021 && truth_value_p (TREE_CODE (arg0
))
14022 && truth_value_p (TREE_CODE (op2
)))
14023 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14024 fold_convert_loc (loc
, type
, arg0
),
14030 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14031 of fold_ternary on them. */
14032 gcc_unreachable ();
14034 case BIT_FIELD_REF
:
14035 if ((TREE_CODE (arg0
) == VECTOR_CST
14036 || (TREE_CODE (arg0
) == CONSTRUCTOR
14037 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14038 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14039 || (TREE_CODE (type
) == VECTOR_TYPE
14040 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14042 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14043 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14044 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14045 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14048 && (idx
% width
) == 0
14049 && (n
% width
) == 0
14050 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14054 if (TREE_CODE (type
) == VECTOR_TYPE
)
14056 if (TREE_CODE (arg0
) == VECTOR_CST
)
14058 tree
*vals
= XALLOCAVEC (tree
, n
);
14060 for (i
= 0; i
< n
; ++i
)
14061 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14062 return build_vector (type
, vals
);
14066 VEC(constructor_elt
, gc
) *vals
;
14068 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14069 return build_constructor (type
, NULL
);
14070 vals
= VEC_alloc (constructor_elt
, gc
, n
);
14071 for (i
= 0; i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14073 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14075 (arg0
, idx
+ i
)->value
);
14076 return build_constructor (type
, vals
);
14081 if (TREE_CODE (arg0
) == VECTOR_CST
)
14082 return VECTOR_CST_ELT (arg0
, idx
);
14083 else if (idx
< CONSTRUCTOR_NELTS (arg0
))
14084 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14085 return build_zero_cst (type
);
14090 /* A bit-field-ref that referenced the full argument can be stripped. */
14091 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14092 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14093 && integer_zerop (op2
))
14094 return fold_convert_loc (loc
, type
, arg0
);
14096 /* On constants we can use native encode/interpret to constant
14097 fold (nearly) all BIT_FIELD_REFs. */
14098 if (CONSTANT_CLASS_P (arg0
)
14099 && can_native_interpret_type_p (type
)
14100 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14101 /* This limitation should not be necessary, we just need to
14102 round this up to mode size. */
14103 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14104 /* Need bit-shifting of the buffer to relax the following. */
14105 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14107 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14108 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14109 unsigned HOST_WIDE_INT clen
;
14110 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14111 /* ??? We cannot tell native_encode_expr to start at
14112 some random byte only. So limit us to a reasonable amount
14116 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14117 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14119 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14121 tree v
= native_interpret_expr (type
,
14122 b
+ bitpos
/ BITS_PER_UNIT
,
14123 bitsize
/ BITS_PER_UNIT
);
14133 /* For integers we can decompose the FMA if possible. */
14134 if (TREE_CODE (arg0
) == INTEGER_CST
14135 && TREE_CODE (arg1
) == INTEGER_CST
)
14136 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14137 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14138 if (integer_zerop (arg2
))
14139 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14141 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14143 case VEC_PERM_EXPR
:
14144 if (TREE_CODE (arg2
) == VECTOR_CST
)
14146 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14147 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14149 bool need_mask_canon
= false;
14150 bool all_in_vec0
= true;
14151 bool all_in_vec1
= true;
14152 bool maybe_identity
= true;
14153 bool single_arg
= (op0
== op1
);
14154 bool changed
= false;
14156 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14157 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14158 for (i
= 0; i
< nelts
; i
++)
14160 tree val
= VECTOR_CST_ELT (arg2
, i
);
14161 if (TREE_CODE (val
) != INTEGER_CST
)
14164 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14165 if (TREE_INT_CST_HIGH (val
)
14166 || ((unsigned HOST_WIDE_INT
)
14167 TREE_INT_CST_LOW (val
) != sel
[i
]))
14168 need_mask_canon
= true;
14170 if (sel
[i
] < nelts
)
14171 all_in_vec1
= false;
14173 all_in_vec0
= false;
14175 if ((sel
[i
] & (nelts
-1)) != i
)
14176 maybe_identity
= false;
14179 if (maybe_identity
)
14189 else if (all_in_vec1
)
14192 for (i
= 0; i
< nelts
; i
++)
14194 need_mask_canon
= true;
14197 if ((TREE_CODE (op0
) == VECTOR_CST
14198 || TREE_CODE (op0
) == CONSTRUCTOR
)
14199 && (TREE_CODE (op1
) == VECTOR_CST
14200 || TREE_CODE (op1
) == CONSTRUCTOR
))
14202 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14203 if (t
!= NULL_TREE
)
14207 if (op0
== op1
&& !single_arg
)
14210 if (need_mask_canon
&& arg2
== op2
)
14212 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14213 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14214 for (i
= 0; i
< nelts
; i
++)
14215 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14216 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14221 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14227 } /* switch (code) */
14230 /* Perform constant folding and related simplification of EXPR.
14231 The related simplifications include x*1 => x, x*0 => 0, etc.,
14232 and application of the associative law.
14233 NOP_EXPR conversions may be removed freely (as long as we
14234 are careful not to change the type of the overall expression).
14235 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14236 but we can constant-fold them if they have constant operands. */
14238 #ifdef ENABLE_FOLD_CHECKING
14239 # define fold(x) fold_1 (x)
14240 static tree
fold_1 (tree
);
14246 const tree t
= expr
;
14247 enum tree_code code
= TREE_CODE (t
);
14248 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14250 location_t loc
= EXPR_LOCATION (expr
);
14252 /* Return right away if a constant. */
14253 if (kind
== tcc_constant
)
14256 /* CALL_EXPR-like objects with variable numbers of operands are
14257 treated specially. */
14258 if (kind
== tcc_vl_exp
)
14260 if (code
== CALL_EXPR
)
14262 tem
= fold_call_expr (loc
, expr
, false);
14263 return tem
? tem
: expr
;
14268 if (IS_EXPR_CODE_CLASS (kind
))
14270 tree type
= TREE_TYPE (t
);
14271 tree op0
, op1
, op2
;
14273 switch (TREE_CODE_LENGTH (code
))
14276 op0
= TREE_OPERAND (t
, 0);
14277 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14278 return tem
? tem
: expr
;
14280 op0
= TREE_OPERAND (t
, 0);
14281 op1
= TREE_OPERAND (t
, 1);
14282 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14283 return tem
? tem
: expr
;
14285 op0
= TREE_OPERAND (t
, 0);
14286 op1
= TREE_OPERAND (t
, 1);
14287 op2
= TREE_OPERAND (t
, 2);
14288 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14289 return tem
? tem
: expr
;
14299 tree op0
= TREE_OPERAND (t
, 0);
14300 tree op1
= TREE_OPERAND (t
, 1);
14302 if (TREE_CODE (op1
) == INTEGER_CST
14303 && TREE_CODE (op0
) == CONSTRUCTOR
14304 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14306 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
14307 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
14308 unsigned HOST_WIDE_INT begin
= 0;
14310 /* Find a matching index by means of a binary search. */
14311 while (begin
!= end
)
14313 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14314 tree index
= VEC_index (constructor_elt
, elts
, middle
).index
;
14316 if (TREE_CODE (index
) == INTEGER_CST
14317 && tree_int_cst_lt (index
, op1
))
14318 begin
= middle
+ 1;
14319 else if (TREE_CODE (index
) == INTEGER_CST
14320 && tree_int_cst_lt (op1
, index
))
14322 else if (TREE_CODE (index
) == RANGE_EXPR
14323 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14324 begin
= middle
+ 1;
14325 else if (TREE_CODE (index
) == RANGE_EXPR
14326 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14329 return VEC_index (constructor_elt
, elts
, middle
).value
;
14337 return fold (DECL_INITIAL (t
));
14341 } /* switch (code) */
14344 #ifdef ENABLE_FOLD_CHECKING
14347 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
14348 static void fold_check_failed (const_tree
, const_tree
);
14349 void print_fold_checksum (const_tree
);
14351 /* When --enable-checking=fold, compute a digest of expr before
14352 and after actual fold call to see if fold did not accidentally
14353 change original expr. */
14359 struct md5_ctx ctx
;
14360 unsigned char checksum_before
[16], checksum_after
[16];
14363 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14364 md5_init_ctx (&ctx
);
14365 fold_checksum_tree (expr
, &ctx
, ht
);
14366 md5_finish_ctx (&ctx
, checksum_before
);
14369 ret
= fold_1 (expr
);
14371 md5_init_ctx (&ctx
);
14372 fold_checksum_tree (expr
, &ctx
, ht
);
14373 md5_finish_ctx (&ctx
, checksum_after
);
14376 if (memcmp (checksum_before
, checksum_after
, 16))
14377 fold_check_failed (expr
, ret
);
14383 print_fold_checksum (const_tree expr
)
14385 struct md5_ctx ctx
;
14386 unsigned char checksum
[16], cnt
;
14389 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14390 md5_init_ctx (&ctx
);
14391 fold_checksum_tree (expr
, &ctx
, ht
);
14392 md5_finish_ctx (&ctx
, checksum
);
14394 for (cnt
= 0; cnt
< 16; ++cnt
)
14395 fprintf (stderr
, "%02x", checksum
[cnt
]);
14396 putc ('\n', stderr
);
14400 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14402 internal_error ("fold check: original tree changed by fold");
14406 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
14409 enum tree_code code
;
14410 union tree_node buf
;
14416 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
14419 *slot
= CONST_CAST_TREE (expr
);
14420 code
= TREE_CODE (expr
);
14421 if (TREE_CODE_CLASS (code
) == tcc_declaration
14422 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14424 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14425 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14426 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14427 expr
= (tree
) &buf
;
14429 else if (TREE_CODE_CLASS (code
) == tcc_type
14430 && (TYPE_POINTER_TO (expr
)
14431 || TYPE_REFERENCE_TO (expr
)
14432 || TYPE_CACHED_VALUES_P (expr
)
14433 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14434 || TYPE_NEXT_VARIANT (expr
)))
14436 /* Allow these fields to be modified. */
14438 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14439 expr
= tmp
= (tree
) &buf
;
14440 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14441 TYPE_POINTER_TO (tmp
) = NULL
;
14442 TYPE_REFERENCE_TO (tmp
) = NULL
;
14443 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14444 if (TYPE_CACHED_VALUES_P (tmp
))
14446 TYPE_CACHED_VALUES_P (tmp
) = 0;
14447 TYPE_CACHED_VALUES (tmp
) = NULL
;
14450 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14451 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14452 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14453 if (TREE_CODE_CLASS (code
) != tcc_type
14454 && TREE_CODE_CLASS (code
) != tcc_declaration
14455 && code
!= TREE_LIST
14456 && code
!= SSA_NAME
14457 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14458 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14459 switch (TREE_CODE_CLASS (code
))
14465 md5_process_bytes (TREE_STRING_POINTER (expr
),
14466 TREE_STRING_LENGTH (expr
), ctx
);
14469 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14470 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14473 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14474 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14480 case tcc_exceptional
:
14484 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14485 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14486 expr
= TREE_CHAIN (expr
);
14487 goto recursive_label
;
14490 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14491 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14497 case tcc_expression
:
14498 case tcc_reference
:
14499 case tcc_comparison
:
14502 case tcc_statement
:
14504 len
= TREE_OPERAND_LENGTH (expr
);
14505 for (i
= 0; i
< len
; ++i
)
14506 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14508 case tcc_declaration
:
14509 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14510 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14511 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14513 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14514 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14515 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14516 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14517 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14519 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14520 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14522 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14524 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14525 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14526 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14530 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14531 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14532 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14533 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14534 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14535 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14536 if (INTEGRAL_TYPE_P (expr
)
14537 || SCALAR_FLOAT_TYPE_P (expr
))
14539 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14540 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14542 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14543 if (TREE_CODE (expr
) == RECORD_TYPE
14544 || TREE_CODE (expr
) == UNION_TYPE
14545 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14546 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14547 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14554 /* Helper function for outputting the checksum of a tree T. When
14555 debugging with gdb, you can "define mynext" to be "next" followed
14556 by "call debug_fold_checksum (op0)", then just trace down till the
14559 DEBUG_FUNCTION
void
14560 debug_fold_checksum (const_tree t
)
14563 unsigned char checksum
[16];
14564 struct md5_ctx ctx
;
14565 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14567 md5_init_ctx (&ctx
);
14568 fold_checksum_tree (t
, &ctx
, ht
);
14569 md5_finish_ctx (&ctx
, checksum
);
14572 for (i
= 0; i
< 16; i
++)
14573 fprintf (stderr
, "%d ", checksum
[i
]);
14575 fprintf (stderr
, "\n");
14580 /* Fold a unary tree expression with code CODE of type TYPE with an
14581 operand OP0. LOC is the location of the resulting expression.
14582 Return a folded expression if successful. Otherwise, return a tree
14583 expression with code CODE of type TYPE with an operand OP0. */
14586 fold_build1_stat_loc (location_t loc
,
14587 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14590 #ifdef ENABLE_FOLD_CHECKING
14591 unsigned char checksum_before
[16], checksum_after
[16];
14592 struct md5_ctx ctx
;
14595 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14596 md5_init_ctx (&ctx
);
14597 fold_checksum_tree (op0
, &ctx
, ht
);
14598 md5_finish_ctx (&ctx
, checksum_before
);
14602 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14604 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14606 #ifdef ENABLE_FOLD_CHECKING
14607 md5_init_ctx (&ctx
);
14608 fold_checksum_tree (op0
, &ctx
, ht
);
14609 md5_finish_ctx (&ctx
, checksum_after
);
14612 if (memcmp (checksum_before
, checksum_after
, 16))
14613 fold_check_failed (op0
, tem
);
14618 /* Fold a binary tree expression with code CODE of type TYPE with
14619 operands OP0 and OP1. LOC is the location of the resulting
14620 expression. Return a folded expression if successful. Otherwise,
14621 return a tree expression with code CODE of type TYPE with operands
14625 fold_build2_stat_loc (location_t loc
,
14626 enum tree_code code
, tree type
, tree op0
, tree op1
14630 #ifdef ENABLE_FOLD_CHECKING
14631 unsigned char checksum_before_op0
[16],
14632 checksum_before_op1
[16],
14633 checksum_after_op0
[16],
14634 checksum_after_op1
[16];
14635 struct md5_ctx ctx
;
14638 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14639 md5_init_ctx (&ctx
);
14640 fold_checksum_tree (op0
, &ctx
, ht
);
14641 md5_finish_ctx (&ctx
, checksum_before_op0
);
14644 md5_init_ctx (&ctx
);
14645 fold_checksum_tree (op1
, &ctx
, ht
);
14646 md5_finish_ctx (&ctx
, checksum_before_op1
);
14650 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14652 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14654 #ifdef ENABLE_FOLD_CHECKING
14655 md5_init_ctx (&ctx
);
14656 fold_checksum_tree (op0
, &ctx
, ht
);
14657 md5_finish_ctx (&ctx
, checksum_after_op0
);
14660 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14661 fold_check_failed (op0
, tem
);
14663 md5_init_ctx (&ctx
);
14664 fold_checksum_tree (op1
, &ctx
, ht
);
14665 md5_finish_ctx (&ctx
, checksum_after_op1
);
14668 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14669 fold_check_failed (op1
, tem
);
14674 /* Fold a ternary tree expression with code CODE of type TYPE with
14675 operands OP0, OP1, and OP2. Return a folded expression if
14676 successful. Otherwise, return a tree expression with code CODE of
14677 type TYPE with operands OP0, OP1, and OP2. */
14680 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14681 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14684 #ifdef ENABLE_FOLD_CHECKING
14685 unsigned char checksum_before_op0
[16],
14686 checksum_before_op1
[16],
14687 checksum_before_op2
[16],
14688 checksum_after_op0
[16],
14689 checksum_after_op1
[16],
14690 checksum_after_op2
[16];
14691 struct md5_ctx ctx
;
14694 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14695 md5_init_ctx (&ctx
);
14696 fold_checksum_tree (op0
, &ctx
, ht
);
14697 md5_finish_ctx (&ctx
, checksum_before_op0
);
14700 md5_init_ctx (&ctx
);
14701 fold_checksum_tree (op1
, &ctx
, ht
);
14702 md5_finish_ctx (&ctx
, checksum_before_op1
);
14705 md5_init_ctx (&ctx
);
14706 fold_checksum_tree (op2
, &ctx
, ht
);
14707 md5_finish_ctx (&ctx
, checksum_before_op2
);
14711 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14712 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14714 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14716 #ifdef ENABLE_FOLD_CHECKING
14717 md5_init_ctx (&ctx
);
14718 fold_checksum_tree (op0
, &ctx
, ht
);
14719 md5_finish_ctx (&ctx
, checksum_after_op0
);
14722 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14723 fold_check_failed (op0
, tem
);
14725 md5_init_ctx (&ctx
);
14726 fold_checksum_tree (op1
, &ctx
, ht
);
14727 md5_finish_ctx (&ctx
, checksum_after_op1
);
14730 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14731 fold_check_failed (op1
, tem
);
14733 md5_init_ctx (&ctx
);
14734 fold_checksum_tree (op2
, &ctx
, ht
);
14735 md5_finish_ctx (&ctx
, checksum_after_op2
);
14738 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14739 fold_check_failed (op2
, tem
);
14744 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14745 arguments in ARGARRAY, and a null static chain.
14746 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14747 of type TYPE from the given operands as constructed by build_call_array. */
14750 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14751 int nargs
, tree
*argarray
)
14754 #ifdef ENABLE_FOLD_CHECKING
14755 unsigned char checksum_before_fn
[16],
14756 checksum_before_arglist
[16],
14757 checksum_after_fn
[16],
14758 checksum_after_arglist
[16];
14759 struct md5_ctx ctx
;
14763 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14764 md5_init_ctx (&ctx
);
14765 fold_checksum_tree (fn
, &ctx
, ht
);
14766 md5_finish_ctx (&ctx
, checksum_before_fn
);
14769 md5_init_ctx (&ctx
);
14770 for (i
= 0; i
< nargs
; i
++)
14771 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14772 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14776 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14778 #ifdef ENABLE_FOLD_CHECKING
14779 md5_init_ctx (&ctx
);
14780 fold_checksum_tree (fn
, &ctx
, ht
);
14781 md5_finish_ctx (&ctx
, checksum_after_fn
);
14784 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14785 fold_check_failed (fn
, tem
);
14787 md5_init_ctx (&ctx
);
14788 for (i
= 0; i
< nargs
; i
++)
14789 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14790 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14793 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14794 fold_check_failed (NULL_TREE
, tem
);
14799 /* Perform constant folding and related simplification of initializer
14800 expression EXPR. These behave identically to "fold_buildN" but ignore
14801 potential run-time traps and exceptions that fold must preserve. */
14803 #define START_FOLD_INIT \
14804 int saved_signaling_nans = flag_signaling_nans;\
14805 int saved_trapping_math = flag_trapping_math;\
14806 int saved_rounding_math = flag_rounding_math;\
14807 int saved_trapv = flag_trapv;\
14808 int saved_folding_initializer = folding_initializer;\
14809 flag_signaling_nans = 0;\
14810 flag_trapping_math = 0;\
14811 flag_rounding_math = 0;\
14813 folding_initializer = 1;
14815 #define END_FOLD_INIT \
14816 flag_signaling_nans = saved_signaling_nans;\
14817 flag_trapping_math = saved_trapping_math;\
14818 flag_rounding_math = saved_rounding_math;\
14819 flag_trapv = saved_trapv;\
14820 folding_initializer = saved_folding_initializer;
14823 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14824 tree type
, tree op
)
14829 result
= fold_build1_loc (loc
, code
, type
, op
);
14836 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14837 tree type
, tree op0
, tree op1
)
14842 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14849 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14850 tree type
, tree op0
, tree op1
, tree op2
)
14855 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14862 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14863 int nargs
, tree
*argarray
)
14868 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14874 #undef START_FOLD_INIT
14875 #undef END_FOLD_INIT
14877 /* Determine if first argument is a multiple of second argument. Return 0 if
14878 it is not, or we cannot easily determined it to be.
14880 An example of the sort of thing we care about (at this point; this routine
14881 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14882 fold cases do now) is discovering that
14884 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14890 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14892 This code also handles discovering that
14894 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14896 is a multiple of 8 so we don't have to worry about dealing with a
14897 possible remainder.
14899 Note that we *look* inside a SAVE_EXPR only to determine how it was
14900 calculated; it is not safe for fold to do much of anything else with the
14901 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14902 at run time. For example, the latter example above *cannot* be implemented
14903 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14904 evaluation time of the original SAVE_EXPR is not necessarily the same at
14905 the time the new expression is evaluated. The only optimization of this
14906 sort that would be valid is changing
14908 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14912 SAVE_EXPR (I) * SAVE_EXPR (J)
14914 (where the same SAVE_EXPR (J) is used in the original and the
14915 transformed version). */
14918 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14920 if (operand_equal_p (top
, bottom
, 0))
14923 if (TREE_CODE (type
) != INTEGER_TYPE
)
14926 switch (TREE_CODE (top
))
14929 /* Bitwise and provides a power of two multiple. If the mask is
14930 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14931 if (!integer_pow2p (bottom
))
14936 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14937 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14941 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14942 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14945 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14949 op1
= TREE_OPERAND (top
, 1);
14950 /* const_binop may not detect overflow correctly,
14951 so check for it explicitly here. */
14952 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14953 > TREE_INT_CST_LOW (op1
)
14954 && TREE_INT_CST_HIGH (op1
) == 0
14955 && 0 != (t1
= fold_convert (type
,
14956 const_binop (LSHIFT_EXPR
,
14959 && !TREE_OVERFLOW (t1
))
14960 return multiple_of_p (type
, t1
, bottom
);
14965 /* Can't handle conversions from non-integral or wider integral type. */
14966 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14967 || (TYPE_PRECISION (type
)
14968 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14971 /* .. fall through ... */
14974 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14977 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14978 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14981 if (TREE_CODE (bottom
) != INTEGER_CST
14982 || integer_zerop (bottom
)
14983 || (TYPE_UNSIGNED (type
)
14984 && (tree_int_cst_sgn (top
) < 0
14985 || tree_int_cst_sgn (bottom
) < 0)))
14987 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14995 /* Return true if CODE or TYPE is known to be non-negative. */
14998 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15000 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15001 && truth_value_p (code
))
15002 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15003 have a signed:1 type (where the value is -1 and 0). */
15008 /* Return true if (CODE OP0) is known to be non-negative. If the return
15009 value is based on the assumption that signed overflow is undefined,
15010 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15011 *STRICT_OVERFLOW_P. */
15014 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15015 bool *strict_overflow_p
)
15017 if (TYPE_UNSIGNED (type
))
15023 /* We can't return 1 if flag_wrapv is set because
15024 ABS_EXPR<INT_MIN> = INT_MIN. */
15025 if (!INTEGRAL_TYPE_P (type
))
15027 if (TYPE_OVERFLOW_UNDEFINED (type
))
15029 *strict_overflow_p
= true;
15034 case NON_LVALUE_EXPR
:
15036 case FIX_TRUNC_EXPR
:
15037 return tree_expr_nonnegative_warnv_p (op0
,
15038 strict_overflow_p
);
15042 tree inner_type
= TREE_TYPE (op0
);
15043 tree outer_type
= type
;
15045 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15047 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15048 return tree_expr_nonnegative_warnv_p (op0
,
15049 strict_overflow_p
);
15050 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15052 if (TYPE_UNSIGNED (inner_type
))
15054 return tree_expr_nonnegative_warnv_p (op0
,
15055 strict_overflow_p
);
15058 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
15060 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15061 return tree_expr_nonnegative_warnv_p (op0
,
15062 strict_overflow_p
);
15063 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15064 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15065 && TYPE_UNSIGNED (inner_type
);
15071 return tree_simple_nonnegative_warnv_p (code
, type
);
15074 /* We don't know sign of `t', so be conservative and return false. */
15078 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15079 value is based on the assumption that signed overflow is undefined,
15080 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15081 *STRICT_OVERFLOW_P. */
15084 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15085 tree op1
, bool *strict_overflow_p
)
15087 if (TYPE_UNSIGNED (type
))
15092 case POINTER_PLUS_EXPR
:
15094 if (FLOAT_TYPE_P (type
))
15095 return (tree_expr_nonnegative_warnv_p (op0
,
15097 && tree_expr_nonnegative_warnv_p (op1
,
15098 strict_overflow_p
));
15100 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15101 both unsigned and at least 2 bits shorter than the result. */
15102 if (TREE_CODE (type
) == INTEGER_TYPE
15103 && TREE_CODE (op0
) == NOP_EXPR
15104 && TREE_CODE (op1
) == NOP_EXPR
)
15106 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15107 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15108 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15109 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15111 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15112 TYPE_PRECISION (inner2
)) + 1;
15113 return prec
< TYPE_PRECISION (type
);
15119 if (FLOAT_TYPE_P (type
))
15121 /* x * x for floating point x is always non-negative. */
15122 if (operand_equal_p (op0
, op1
, 0))
15124 return (tree_expr_nonnegative_warnv_p (op0
,
15126 && tree_expr_nonnegative_warnv_p (op1
,
15127 strict_overflow_p
));
15130 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15131 both unsigned and their total bits is shorter than the result. */
15132 if (TREE_CODE (type
) == INTEGER_TYPE
15133 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15134 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15136 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15137 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15139 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15140 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15143 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15144 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15146 if (TREE_CODE (op0
) == INTEGER_CST
)
15147 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15149 if (TREE_CODE (op1
) == INTEGER_CST
)
15150 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15152 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15153 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15155 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15156 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15157 : TYPE_PRECISION (inner0
);
15159 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15160 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15161 : TYPE_PRECISION (inner1
);
15163 return precision0
+ precision1
< TYPE_PRECISION (type
);
15170 return (tree_expr_nonnegative_warnv_p (op0
,
15172 || tree_expr_nonnegative_warnv_p (op1
,
15173 strict_overflow_p
));
15179 case TRUNC_DIV_EXPR
:
15180 case CEIL_DIV_EXPR
:
15181 case FLOOR_DIV_EXPR
:
15182 case ROUND_DIV_EXPR
:
15183 return (tree_expr_nonnegative_warnv_p (op0
,
15185 && tree_expr_nonnegative_warnv_p (op1
,
15186 strict_overflow_p
));
15188 case TRUNC_MOD_EXPR
:
15189 case CEIL_MOD_EXPR
:
15190 case FLOOR_MOD_EXPR
:
15191 case ROUND_MOD_EXPR
:
15192 return tree_expr_nonnegative_warnv_p (op0
,
15193 strict_overflow_p
);
15195 return tree_simple_nonnegative_warnv_p (code
, type
);
15198 /* We don't know sign of `t', so be conservative and return false. */
15202 /* Return true if T is known to be non-negative. If the return
15203 value is based on the assumption that signed overflow is undefined,
15204 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15205 *STRICT_OVERFLOW_P. */
15208 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15210 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15213 switch (TREE_CODE (t
))
15216 return tree_int_cst_sgn (t
) >= 0;
15219 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15222 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15225 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15227 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15228 strict_overflow_p
));
15230 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15233 /* We don't know sign of `t', so be conservative and return false. */
15237 /* Return true if T is known to be non-negative. If the return
15238 value is based on the assumption that signed overflow is undefined,
15239 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15240 *STRICT_OVERFLOW_P. */
15243 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15244 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15246 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15247 switch (DECL_FUNCTION_CODE (fndecl
))
15249 CASE_FLT_FN (BUILT_IN_ACOS
):
15250 CASE_FLT_FN (BUILT_IN_ACOSH
):
15251 CASE_FLT_FN (BUILT_IN_CABS
):
15252 CASE_FLT_FN (BUILT_IN_COSH
):
15253 CASE_FLT_FN (BUILT_IN_ERFC
):
15254 CASE_FLT_FN (BUILT_IN_EXP
):
15255 CASE_FLT_FN (BUILT_IN_EXP10
):
15256 CASE_FLT_FN (BUILT_IN_EXP2
):
15257 CASE_FLT_FN (BUILT_IN_FABS
):
15258 CASE_FLT_FN (BUILT_IN_FDIM
):
15259 CASE_FLT_FN (BUILT_IN_HYPOT
):
15260 CASE_FLT_FN (BUILT_IN_POW10
):
15261 CASE_INT_FN (BUILT_IN_FFS
):
15262 CASE_INT_FN (BUILT_IN_PARITY
):
15263 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15264 case BUILT_IN_BSWAP32
:
15265 case BUILT_IN_BSWAP64
:
15269 CASE_FLT_FN (BUILT_IN_SQRT
):
15270 /* sqrt(-0.0) is -0.0. */
15271 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15273 return tree_expr_nonnegative_warnv_p (arg0
,
15274 strict_overflow_p
);
15276 CASE_FLT_FN (BUILT_IN_ASINH
):
15277 CASE_FLT_FN (BUILT_IN_ATAN
):
15278 CASE_FLT_FN (BUILT_IN_ATANH
):
15279 CASE_FLT_FN (BUILT_IN_CBRT
):
15280 CASE_FLT_FN (BUILT_IN_CEIL
):
15281 CASE_FLT_FN (BUILT_IN_ERF
):
15282 CASE_FLT_FN (BUILT_IN_EXPM1
):
15283 CASE_FLT_FN (BUILT_IN_FLOOR
):
15284 CASE_FLT_FN (BUILT_IN_FMOD
):
15285 CASE_FLT_FN (BUILT_IN_FREXP
):
15286 CASE_FLT_FN (BUILT_IN_ICEIL
):
15287 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15288 CASE_FLT_FN (BUILT_IN_IRINT
):
15289 CASE_FLT_FN (BUILT_IN_IROUND
):
15290 CASE_FLT_FN (BUILT_IN_LCEIL
):
15291 CASE_FLT_FN (BUILT_IN_LDEXP
):
15292 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15293 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15294 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15295 CASE_FLT_FN (BUILT_IN_LLRINT
):
15296 CASE_FLT_FN (BUILT_IN_LLROUND
):
15297 CASE_FLT_FN (BUILT_IN_LRINT
):
15298 CASE_FLT_FN (BUILT_IN_LROUND
):
15299 CASE_FLT_FN (BUILT_IN_MODF
):
15300 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15301 CASE_FLT_FN (BUILT_IN_RINT
):
15302 CASE_FLT_FN (BUILT_IN_ROUND
):
15303 CASE_FLT_FN (BUILT_IN_SCALB
):
15304 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15305 CASE_FLT_FN (BUILT_IN_SCALBN
):
15306 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15307 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15308 CASE_FLT_FN (BUILT_IN_SINH
):
15309 CASE_FLT_FN (BUILT_IN_TANH
):
15310 CASE_FLT_FN (BUILT_IN_TRUNC
):
15311 /* True if the 1st argument is nonnegative. */
15312 return tree_expr_nonnegative_warnv_p (arg0
,
15313 strict_overflow_p
);
15315 CASE_FLT_FN (BUILT_IN_FMAX
):
15316 /* True if the 1st OR 2nd arguments are nonnegative. */
15317 return (tree_expr_nonnegative_warnv_p (arg0
,
15319 || (tree_expr_nonnegative_warnv_p (arg1
,
15320 strict_overflow_p
)));
15322 CASE_FLT_FN (BUILT_IN_FMIN
):
15323 /* True if the 1st AND 2nd arguments are nonnegative. */
15324 return (tree_expr_nonnegative_warnv_p (arg0
,
15326 && (tree_expr_nonnegative_warnv_p (arg1
,
15327 strict_overflow_p
)));
15329 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15330 /* True if the 2nd argument is nonnegative. */
15331 return tree_expr_nonnegative_warnv_p (arg1
,
15332 strict_overflow_p
);
15334 CASE_FLT_FN (BUILT_IN_POWI
):
15335 /* True if the 1st argument is nonnegative or the second
15336 argument is an even integer. */
15337 if (TREE_CODE (arg1
) == INTEGER_CST
15338 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15340 return tree_expr_nonnegative_warnv_p (arg0
,
15341 strict_overflow_p
);
15343 CASE_FLT_FN (BUILT_IN_POW
):
15344 /* True if the 1st argument is nonnegative or the second
15345 argument is an even integer valued real. */
15346 if (TREE_CODE (arg1
) == REAL_CST
)
15351 c
= TREE_REAL_CST (arg1
);
15352 n
= real_to_integer (&c
);
15355 REAL_VALUE_TYPE cint
;
15356 real_from_integer (&cint
, VOIDmode
, n
,
15357 n
< 0 ? -1 : 0, 0);
15358 if (real_identical (&c
, &cint
))
15362 return tree_expr_nonnegative_warnv_p (arg0
,
15363 strict_overflow_p
);
15368 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15372 /* Return true if T is known to be non-negative. If the return
15373 value is based on the assumption that signed overflow is undefined,
15374 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15375 *STRICT_OVERFLOW_P. */
15378 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15380 enum tree_code code
= TREE_CODE (t
);
15381 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15388 tree temp
= TARGET_EXPR_SLOT (t
);
15389 t
= TARGET_EXPR_INITIAL (t
);
15391 /* If the initializer is non-void, then it's a normal expression
15392 that will be assigned to the slot. */
15393 if (!VOID_TYPE_P (t
))
15394 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15396 /* Otherwise, the initializer sets the slot in some way. One common
15397 way is an assignment statement at the end of the initializer. */
15400 if (TREE_CODE (t
) == BIND_EXPR
)
15401 t
= expr_last (BIND_EXPR_BODY (t
));
15402 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15403 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15404 t
= expr_last (TREE_OPERAND (t
, 0));
15405 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15410 if (TREE_CODE (t
) == MODIFY_EXPR
15411 && TREE_OPERAND (t
, 0) == temp
)
15412 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15413 strict_overflow_p
);
15420 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15421 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15423 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15424 get_callee_fndecl (t
),
15427 strict_overflow_p
);
15429 case COMPOUND_EXPR
:
15431 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15432 strict_overflow_p
);
15434 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15435 strict_overflow_p
);
15437 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15438 strict_overflow_p
);
15441 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15445 /* We don't know sign of `t', so be conservative and return false. */
15449 /* Return true if T is known to be non-negative. If the return
15450 value is based on the assumption that signed overflow is undefined,
15451 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15452 *STRICT_OVERFLOW_P. */
15455 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15457 enum tree_code code
;
15458 if (t
== error_mark_node
)
15461 code
= TREE_CODE (t
);
15462 switch (TREE_CODE_CLASS (code
))
15465 case tcc_comparison
:
15466 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15468 TREE_OPERAND (t
, 0),
15469 TREE_OPERAND (t
, 1),
15470 strict_overflow_p
);
15473 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15475 TREE_OPERAND (t
, 0),
15476 strict_overflow_p
);
15479 case tcc_declaration
:
15480 case tcc_reference
:
15481 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15489 case TRUTH_AND_EXPR
:
15490 case TRUTH_OR_EXPR
:
15491 case TRUTH_XOR_EXPR
:
15492 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15494 TREE_OPERAND (t
, 0),
15495 TREE_OPERAND (t
, 1),
15496 strict_overflow_p
);
15497 case TRUTH_NOT_EXPR
:
15498 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15500 TREE_OPERAND (t
, 0),
15501 strict_overflow_p
);
15508 case WITH_SIZE_EXPR
:
15510 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15513 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15517 /* Return true if `t' is known to be non-negative. Handle warnings
15518 about undefined signed overflow. */
15521 tree_expr_nonnegative_p (tree t
)
15523 bool ret
, strict_overflow_p
;
15525 strict_overflow_p
= false;
15526 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15527 if (strict_overflow_p
)
15528 fold_overflow_warning (("assuming signed overflow does not occur when "
15529 "determining that expression is always "
15531 WARN_STRICT_OVERFLOW_MISC
);
15536 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15537 For floating point we further ensure that T is not denormal.
15538 Similar logic is present in nonzero_address in rtlanal.h.
15540 If the return value is based on the assumption that signed overflow
15541 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15542 change *STRICT_OVERFLOW_P. */
15545 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15546 bool *strict_overflow_p
)
15551 return tree_expr_nonzero_warnv_p (op0
,
15552 strict_overflow_p
);
15556 tree inner_type
= TREE_TYPE (op0
);
15557 tree outer_type
= type
;
15559 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15560 && tree_expr_nonzero_warnv_p (op0
,
15561 strict_overflow_p
));
15565 case NON_LVALUE_EXPR
:
15566 return tree_expr_nonzero_warnv_p (op0
,
15567 strict_overflow_p
);
15576 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15577 For floating point we further ensure that T is not denormal.
15578 Similar logic is present in nonzero_address in rtlanal.h.
15580 If the return value is based on the assumption that signed overflow
15581 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15582 change *STRICT_OVERFLOW_P. */
15585 tree_binary_nonzero_warnv_p (enum tree_code code
,
15588 tree op1
, bool *strict_overflow_p
)
15590 bool sub_strict_overflow_p
;
15593 case POINTER_PLUS_EXPR
:
15595 if (TYPE_OVERFLOW_UNDEFINED (type
))
15597 /* With the presence of negative values it is hard
15598 to say something. */
15599 sub_strict_overflow_p
= false;
15600 if (!tree_expr_nonnegative_warnv_p (op0
,
15601 &sub_strict_overflow_p
)
15602 || !tree_expr_nonnegative_warnv_p (op1
,
15603 &sub_strict_overflow_p
))
15605 /* One of operands must be positive and the other non-negative. */
15606 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15607 overflows, on a twos-complement machine the sum of two
15608 nonnegative numbers can never be zero. */
15609 return (tree_expr_nonzero_warnv_p (op0
,
15611 || tree_expr_nonzero_warnv_p (op1
,
15612 strict_overflow_p
));
15617 if (TYPE_OVERFLOW_UNDEFINED (type
))
15619 if (tree_expr_nonzero_warnv_p (op0
,
15621 && tree_expr_nonzero_warnv_p (op1
,
15622 strict_overflow_p
))
15624 *strict_overflow_p
= true;
15631 sub_strict_overflow_p
= false;
15632 if (tree_expr_nonzero_warnv_p (op0
,
15633 &sub_strict_overflow_p
)
15634 && tree_expr_nonzero_warnv_p (op1
,
15635 &sub_strict_overflow_p
))
15637 if (sub_strict_overflow_p
)
15638 *strict_overflow_p
= true;
15643 sub_strict_overflow_p
= false;
15644 if (tree_expr_nonzero_warnv_p (op0
,
15645 &sub_strict_overflow_p
))
15647 if (sub_strict_overflow_p
)
15648 *strict_overflow_p
= true;
15650 /* When both operands are nonzero, then MAX must be too. */
15651 if (tree_expr_nonzero_warnv_p (op1
,
15652 strict_overflow_p
))
15655 /* MAX where operand 0 is positive is positive. */
15656 return tree_expr_nonnegative_warnv_p (op0
,
15657 strict_overflow_p
);
15659 /* MAX where operand 1 is positive is positive. */
15660 else if (tree_expr_nonzero_warnv_p (op1
,
15661 &sub_strict_overflow_p
)
15662 && tree_expr_nonnegative_warnv_p (op1
,
15663 &sub_strict_overflow_p
))
15665 if (sub_strict_overflow_p
)
15666 *strict_overflow_p
= true;
15672 return (tree_expr_nonzero_warnv_p (op1
,
15674 || tree_expr_nonzero_warnv_p (op0
,
15675 strict_overflow_p
));
15684 /* Return true when T is an address and is known to be nonzero.
15685 For floating point we further ensure that T is not denormal.
15686 Similar logic is present in nonzero_address in rtlanal.h.
15688 If the return value is based on the assumption that signed overflow
15689 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15690 change *STRICT_OVERFLOW_P. */
15693 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15695 bool sub_strict_overflow_p
;
15696 switch (TREE_CODE (t
))
15699 return !integer_zerop (t
);
15703 tree base
= TREE_OPERAND (t
, 0);
15704 if (!DECL_P (base
))
15705 base
= get_base_address (base
);
15710 /* Weak declarations may link to NULL. Other things may also be NULL
15711 so protect with -fdelete-null-pointer-checks; but not variables
15712 allocated on the stack. */
15714 && (flag_delete_null_pointer_checks
15715 || (DECL_CONTEXT (base
)
15716 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15717 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15718 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15720 /* Constants are never weak. */
15721 if (CONSTANT_CLASS_P (base
))
15728 sub_strict_overflow_p
= false;
15729 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15730 &sub_strict_overflow_p
)
15731 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15732 &sub_strict_overflow_p
))
15734 if (sub_strict_overflow_p
)
15735 *strict_overflow_p
= true;
15746 /* Return true when T is an address and is known to be nonzero.
15747 For floating point we further ensure that T is not denormal.
15748 Similar logic is present in nonzero_address in rtlanal.h.
15750 If the return value is based on the assumption that signed overflow
15751 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15752 change *STRICT_OVERFLOW_P. */
15755 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15757 tree type
= TREE_TYPE (t
);
15758 enum tree_code code
;
15760 /* Doing something useful for floating point would need more work. */
15761 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15764 code
= TREE_CODE (t
);
15765 switch (TREE_CODE_CLASS (code
))
15768 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15769 strict_overflow_p
);
15771 case tcc_comparison
:
15772 return tree_binary_nonzero_warnv_p (code
, type
,
15773 TREE_OPERAND (t
, 0),
15774 TREE_OPERAND (t
, 1),
15775 strict_overflow_p
);
15777 case tcc_declaration
:
15778 case tcc_reference
:
15779 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15787 case TRUTH_NOT_EXPR
:
15788 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15789 strict_overflow_p
);
15791 case TRUTH_AND_EXPR
:
15792 case TRUTH_OR_EXPR
:
15793 case TRUTH_XOR_EXPR
:
15794 return tree_binary_nonzero_warnv_p (code
, type
,
15795 TREE_OPERAND (t
, 0),
15796 TREE_OPERAND (t
, 1),
15797 strict_overflow_p
);
15804 case WITH_SIZE_EXPR
:
15806 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15808 case COMPOUND_EXPR
:
15811 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15812 strict_overflow_p
);
15815 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15816 strict_overflow_p
);
15819 return alloca_call_p (t
);
15827 /* Return true when T is an address and is known to be nonzero.
15828 Handle warnings about undefined signed overflow. */
15831 tree_expr_nonzero_p (tree t
)
15833 bool ret
, strict_overflow_p
;
15835 strict_overflow_p
= false;
15836 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15837 if (strict_overflow_p
)
15838 fold_overflow_warning (("assuming signed overflow does not occur when "
15839 "determining that expression is always "
15841 WARN_STRICT_OVERFLOW_MISC
);
15845 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15846 attempt to fold the expression to a constant without modifying TYPE,
15849 If the expression could be simplified to a constant, then return
15850 the constant. If the expression would not be simplified to a
15851 constant, then return NULL_TREE. */
15854 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15856 tree tem
= fold_binary (code
, type
, op0
, op1
);
15857 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15860 /* Given the components of a unary expression CODE, TYPE and OP0,
15861 attempt to fold the expression to a constant without modifying
15864 If the expression could be simplified to a constant, then return
15865 the constant. If the expression would not be simplified to a
15866 constant, then return NULL_TREE. */
15869 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15871 tree tem
= fold_unary (code
, type
, op0
);
15872 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15875 /* If EXP represents referencing an element in a constant string
15876 (either via pointer arithmetic or array indexing), return the
15877 tree representing the value accessed, otherwise return NULL. */
15880 fold_read_from_constant_string (tree exp
)
15882 if ((TREE_CODE (exp
) == INDIRECT_REF
15883 || TREE_CODE (exp
) == ARRAY_REF
)
15884 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15886 tree exp1
= TREE_OPERAND (exp
, 0);
15889 location_t loc
= EXPR_LOCATION (exp
);
15891 if (TREE_CODE (exp
) == INDIRECT_REF
)
15892 string
= string_constant (exp1
, &index
);
15895 tree low_bound
= array_ref_low_bound (exp
);
15896 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15898 /* Optimize the special-case of a zero lower bound.
15900 We convert the low_bound to sizetype to avoid some problems
15901 with constant folding. (E.g. suppose the lower bound is 1,
15902 and its mode is QI. Without the conversion,l (ARRAY
15903 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15904 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15905 if (! integer_zerop (low_bound
))
15906 index
= size_diffop_loc (loc
, index
,
15907 fold_convert_loc (loc
, sizetype
, low_bound
));
15913 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15914 && TREE_CODE (string
) == STRING_CST
15915 && TREE_CODE (index
) == INTEGER_CST
15916 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15917 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15919 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15920 return build_int_cst_type (TREE_TYPE (exp
),
15921 (TREE_STRING_POINTER (string
)
15922 [TREE_INT_CST_LOW (index
)]));
15927 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15928 an integer constant, real, or fixed-point constant.
15930 TYPE is the type of the result. */
15933 fold_negate_const (tree arg0
, tree type
)
15935 tree t
= NULL_TREE
;
15937 switch (TREE_CODE (arg0
))
15941 double_int val
= tree_to_double_int (arg0
);
15942 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15944 t
= force_fit_type_double (type
, val
, 1,
15945 (overflow
| TREE_OVERFLOW (arg0
))
15946 && !TYPE_UNSIGNED (type
));
15951 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15956 FIXED_VALUE_TYPE f
;
15957 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15958 &(TREE_FIXED_CST (arg0
)), NULL
,
15959 TYPE_SATURATING (type
));
15960 t
= build_fixed (type
, f
);
15961 /* Propagate overflow flags. */
15962 if (overflow_p
| TREE_OVERFLOW (arg0
))
15963 TREE_OVERFLOW (t
) = 1;
15968 gcc_unreachable ();
15974 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15975 an integer constant or real constant.
15977 TYPE is the type of the result. */
15980 fold_abs_const (tree arg0
, tree type
)
15982 tree t
= NULL_TREE
;
15984 switch (TREE_CODE (arg0
))
15988 double_int val
= tree_to_double_int (arg0
);
15990 /* If the value is unsigned or non-negative, then the absolute value
15991 is the same as the ordinary value. */
15992 if (TYPE_UNSIGNED (type
)
15993 || !val
.is_negative ())
15996 /* If the value is negative, then the absolute value is
16002 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
16003 t
= force_fit_type_double (type
, val
, -1,
16004 overflow
| TREE_OVERFLOW (arg0
));
16010 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16011 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16017 gcc_unreachable ();
16023 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16024 constant. TYPE is the type of the result. */
16027 fold_not_const (const_tree arg0
, tree type
)
16031 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16033 val
= ~tree_to_double_int (arg0
);
16034 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16037 /* Given CODE, a relational operator, the target type, TYPE and two
16038 constant operands OP0 and OP1, return the result of the
16039 relational operation. If the result is not a compile time
16040 constant, then return NULL_TREE. */
16043 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16045 int result
, invert
;
16047 /* From here on, the only cases we handle are when the result is
16048 known to be a constant. */
16050 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16052 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16053 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16055 /* Handle the cases where either operand is a NaN. */
16056 if (real_isnan (c0
) || real_isnan (c1
))
16066 case UNORDERED_EXPR
:
16080 if (flag_trapping_math
)
16086 gcc_unreachable ();
16089 return constant_boolean_node (result
, type
);
16092 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16095 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16097 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16098 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16099 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16102 /* Handle equality/inequality of complex constants. */
16103 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16105 tree rcond
= fold_relational_const (code
, type
,
16106 TREE_REALPART (op0
),
16107 TREE_REALPART (op1
));
16108 tree icond
= fold_relational_const (code
, type
,
16109 TREE_IMAGPART (op0
),
16110 TREE_IMAGPART (op1
));
16111 if (code
== EQ_EXPR
)
16112 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16113 else if (code
== NE_EXPR
)
16114 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16119 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16121 To compute GT, swap the arguments and do LT.
16122 To compute GE, do LT and invert the result.
16123 To compute LE, swap the arguments, do LT and invert the result.
16124 To compute NE, do EQ and invert the result.
16126 Therefore, the code below must handle only EQ and LT. */
16128 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16133 code
= swap_tree_comparison (code
);
16136 /* Note that it is safe to invert for real values here because we
16137 have already handled the one case that it matters. */
16140 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16143 code
= invert_tree_comparison (code
, false);
16146 /* Compute a result for LT or EQ if args permit;
16147 Otherwise return T. */
16148 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16150 if (code
== EQ_EXPR
)
16151 result
= tree_int_cst_equal (op0
, op1
);
16152 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16153 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16155 result
= INT_CST_LT (op0
, op1
);
16162 return constant_boolean_node (result
, type
);
16165 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16166 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16170 fold_build_cleanup_point_expr (tree type
, tree expr
)
16172 /* If the expression does not have side effects then we don't have to wrap
16173 it with a cleanup point expression. */
16174 if (!TREE_SIDE_EFFECTS (expr
))
16177 /* If the expression is a return, check to see if the expression inside the
16178 return has no side effects or the right hand side of the modify expression
16179 inside the return. If either don't have side effects set we don't need to
16180 wrap the expression in a cleanup point expression. Note we don't check the
16181 left hand side of the modify because it should always be a return decl. */
16182 if (TREE_CODE (expr
) == RETURN_EXPR
)
16184 tree op
= TREE_OPERAND (expr
, 0);
16185 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16187 op
= TREE_OPERAND (op
, 1);
16188 if (!TREE_SIDE_EFFECTS (op
))
16192 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16195 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16196 of an indirection through OP0, or NULL_TREE if no simplification is
16200 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16206 subtype
= TREE_TYPE (sub
);
16207 if (!POINTER_TYPE_P (subtype
))
16210 if (TREE_CODE (sub
) == ADDR_EXPR
)
16212 tree op
= TREE_OPERAND (sub
, 0);
16213 tree optype
= TREE_TYPE (op
);
16214 /* *&CONST_DECL -> to the value of the const decl. */
16215 if (TREE_CODE (op
) == CONST_DECL
)
16216 return DECL_INITIAL (op
);
16217 /* *&p => p; make sure to handle *&"str"[cst] here. */
16218 if (type
== optype
)
16220 tree fop
= fold_read_from_constant_string (op
);
16226 /* *(foo *)&fooarray => fooarray[0] */
16227 else if (TREE_CODE (optype
) == ARRAY_TYPE
16228 && type
== TREE_TYPE (optype
)
16229 && (!in_gimple_form
16230 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16232 tree type_domain
= TYPE_DOMAIN (optype
);
16233 tree min_val
= size_zero_node
;
16234 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16235 min_val
= TYPE_MIN_VALUE (type_domain
);
16237 && TREE_CODE (min_val
) != INTEGER_CST
)
16239 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16240 NULL_TREE
, NULL_TREE
);
16242 /* *(foo *)&complexfoo => __real__ complexfoo */
16243 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16244 && type
== TREE_TYPE (optype
))
16245 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16246 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16247 else if (TREE_CODE (optype
) == VECTOR_TYPE
16248 && type
== TREE_TYPE (optype
))
16250 tree part_width
= TYPE_SIZE (type
);
16251 tree index
= bitsize_int (0);
16252 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16256 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16257 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16259 tree op00
= TREE_OPERAND (sub
, 0);
16260 tree op01
= TREE_OPERAND (sub
, 1);
16263 if (TREE_CODE (op00
) == ADDR_EXPR
)
16266 op00
= TREE_OPERAND (op00
, 0);
16267 op00type
= TREE_TYPE (op00
);
16269 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16270 if (TREE_CODE (op00type
) == VECTOR_TYPE
16271 && type
== TREE_TYPE (op00type
))
16273 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16274 tree part_width
= TYPE_SIZE (type
);
16275 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16276 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16277 tree index
= bitsize_int (indexi
);
16279 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16280 return fold_build3_loc (loc
,
16281 BIT_FIELD_REF
, type
, op00
,
16282 part_width
, index
);
16285 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16286 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16287 && type
== TREE_TYPE (op00type
))
16289 tree size
= TYPE_SIZE_UNIT (type
);
16290 if (tree_int_cst_equal (size
, op01
))
16291 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16293 /* ((foo *)&fooarray)[1] => fooarray[1] */
16294 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16295 && type
== TREE_TYPE (op00type
))
16297 tree type_domain
= TYPE_DOMAIN (op00type
);
16298 tree min_val
= size_zero_node
;
16299 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16300 min_val
= TYPE_MIN_VALUE (type_domain
);
16301 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16302 TYPE_SIZE_UNIT (type
));
16303 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16304 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16305 NULL_TREE
, NULL_TREE
);
16310 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16311 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16312 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16313 && (!in_gimple_form
16314 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16317 tree min_val
= size_zero_node
;
16318 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16319 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16320 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16321 min_val
= TYPE_MIN_VALUE (type_domain
);
16323 && TREE_CODE (min_val
) != INTEGER_CST
)
16325 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16332 /* Builds an expression for an indirection through T, simplifying some
16336 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16338 tree type
= TREE_TYPE (TREE_TYPE (t
));
16339 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16344 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16347 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16350 fold_indirect_ref_loc (location_t loc
, tree t
)
16352 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16360 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16361 whose result is ignored. The type of the returned tree need not be
16362 the same as the original expression. */
16365 fold_ignored_result (tree t
)
16367 if (!TREE_SIDE_EFFECTS (t
))
16368 return integer_zero_node
;
16371 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16374 t
= TREE_OPERAND (t
, 0);
16378 case tcc_comparison
:
16379 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16380 t
= TREE_OPERAND (t
, 0);
16381 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16382 t
= TREE_OPERAND (t
, 1);
16387 case tcc_expression
:
16388 switch (TREE_CODE (t
))
16390 case COMPOUND_EXPR
:
16391 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16393 t
= TREE_OPERAND (t
, 0);
16397 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16398 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16400 t
= TREE_OPERAND (t
, 0);
16413 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16414 This can only be applied to objects of a sizetype. */
16417 round_up_loc (location_t loc
, tree value
, int divisor
)
16419 tree div
= NULL_TREE
;
16421 gcc_assert (divisor
> 0);
16425 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16426 have to do anything. Only do this when we are not given a const,
16427 because in that case, this check is more expensive than just
16429 if (TREE_CODE (value
) != INTEGER_CST
)
16431 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16433 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16437 /* If divisor is a power of two, simplify this to bit manipulation. */
16438 if (divisor
== (divisor
& -divisor
))
16440 if (TREE_CODE (value
) == INTEGER_CST
)
16442 double_int val
= tree_to_double_int (value
);
16445 if ((val
.low
& (divisor
- 1)) == 0)
16448 overflow_p
= TREE_OVERFLOW (value
);
16449 val
.low
&= ~(divisor
- 1);
16450 val
.low
+= divisor
;
16458 return force_fit_type_double (TREE_TYPE (value
), val
,
16465 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16466 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16467 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16468 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16474 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16475 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16476 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16482 /* Likewise, but round down. */
16485 round_down_loc (location_t loc
, tree value
, int divisor
)
16487 tree div
= NULL_TREE
;
16489 gcc_assert (divisor
> 0);
16493 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16494 have to do anything. Only do this when we are not given a const,
16495 because in that case, this check is more expensive than just
16497 if (TREE_CODE (value
) != INTEGER_CST
)
16499 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16501 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16505 /* If divisor is a power of two, simplify this to bit manipulation. */
16506 if (divisor
== (divisor
& -divisor
))
16510 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16511 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16516 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16517 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16518 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16524 /* Returns the pointer to the base of the object addressed by EXP and
16525 extracts the information about the offset of the access, storing it
16526 to PBITPOS and POFFSET. */
16529 split_address_to_core_and_offset (tree exp
,
16530 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16533 enum machine_mode mode
;
16534 int unsignedp
, volatilep
;
16535 HOST_WIDE_INT bitsize
;
16536 location_t loc
= EXPR_LOCATION (exp
);
16538 if (TREE_CODE (exp
) == ADDR_EXPR
)
16540 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16541 poffset
, &mode
, &unsignedp
, &volatilep
,
16543 core
= build_fold_addr_expr_loc (loc
, core
);
16549 *poffset
= NULL_TREE
;
16555 /* Returns true if addresses of E1 and E2 differ by a constant, false
16556 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16559 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16562 HOST_WIDE_INT bitpos1
, bitpos2
;
16563 tree toffset1
, toffset2
, tdiff
, type
;
16565 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16566 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16568 if (bitpos1
% BITS_PER_UNIT
!= 0
16569 || bitpos2
% BITS_PER_UNIT
!= 0
16570 || !operand_equal_p (core1
, core2
, 0))
16573 if (toffset1
&& toffset2
)
16575 type
= TREE_TYPE (toffset1
);
16576 if (type
!= TREE_TYPE (toffset2
))
16577 toffset2
= fold_convert (type
, toffset2
);
16579 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16580 if (!cst_and_fits_in_hwi (tdiff
))
16583 *diff
= int_cst_value (tdiff
);
16585 else if (toffset1
|| toffset2
)
16587 /* If only one of the offsets is non-constant, the difference cannot
16594 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16598 /* Simplify the floating point expression EXP when the sign of the
16599 result is not significant. Return NULL_TREE if no simplification
16603 fold_strip_sign_ops (tree exp
)
16606 location_t loc
= EXPR_LOCATION (exp
);
16608 switch (TREE_CODE (exp
))
16612 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16613 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16617 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16619 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16620 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16621 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16622 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16623 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16624 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16627 case COMPOUND_EXPR
:
16628 arg0
= TREE_OPERAND (exp
, 0);
16629 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16631 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16635 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16636 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16638 return fold_build3_loc (loc
,
16639 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16640 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16641 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16646 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16649 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16650 /* Strip copysign function call, return the 1st argument. */
16651 arg0
= CALL_EXPR_ARG (exp
, 0);
16652 arg1
= CALL_EXPR_ARG (exp
, 1);
16653 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16656 /* Strip sign ops from the argument of "odd" math functions. */
16657 if (negate_mathfn_p (fcode
))
16659 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16661 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);