1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "diagnostic-core.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
65 int folding_initializer
= 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code
{
89 static bool negate_mathfn_p (enum built_in_function
);
90 static bool negate_expr_p (tree
);
91 static tree
negate_expr (tree
);
92 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
93 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
94 static tree
const_binop (enum tree_code
, tree
, tree
);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
101 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (location_t
, tree
, tree
,
103 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
104 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
106 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (const_tree
, int);
111 static tree
sign_bit_p (tree
, const_tree
);
112 static int simple_operand_p (const_tree
);
113 static bool simple_operand_p_2 (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
range_predecessor (tree
);
116 static tree
range_successor (tree
);
117 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
118 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
119 static tree
unextend (tree
, int, int, tree
);
120 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
122 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
123 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
124 static tree
fold_binary_op_with_conditional_arg (location_t
,
125 enum tree_code
, tree
,
128 static tree
fold_mathfn_compare (location_t
,
129 enum built_in_function
, enum tree_code
,
131 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
132 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
133 static bool reorder_operands_p (const_tree
, const_tree
);
134 static tree
fold_negate_const (tree
, tree
);
135 static tree
fold_not_const (const_tree
, tree
);
136 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
143 expr_location_or (tree t
, location_t loc
)
145 location_t tloc
= EXPR_LOCATION (t
);
146 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
153 protected_set_expr_location_unshare (tree x
, location_t loc
)
155 if (CAN_HAVE_LOCATION_P (x
)
156 && EXPR_LOCATION (x
) != loc
157 && !(TREE_CODE (x
) == SAVE_EXPR
158 || TREE_CODE (x
) == TARGET_EXPR
159 || TREE_CODE (x
) == BIND_EXPR
))
162 SET_EXPR_LOCATION (x
, loc
);
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
172 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
182 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
186 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
200 static int fold_deferring_overflow_warnings
;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning
;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings
;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
233 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
238 gcc_assert (fold_deferring_overflow_warnings
> 0);
239 --fold_deferring_overflow_warnings
;
240 if (fold_deferring_overflow_warnings
> 0)
242 if (fold_deferred_overflow_warning
!= NULL
244 && code
< (int) fold_deferred_overflow_code
)
245 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
249 warnmsg
= fold_deferred_overflow_warning
;
250 fold_deferred_overflow_warning
= NULL
;
252 if (!issue
|| warnmsg
== NULL
)
255 if (gimple_no_warning_p (stmt
))
258 /* Use the smallest code level when deciding to issue the
260 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
261 code
= fold_deferred_overflow_code
;
263 if (!issue_strict_overflow_warning (code
))
267 locus
= input_location
;
269 locus
= gimple_location (stmt
);
270 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
273 /* Stop deferring overflow warnings, ignoring any deferred
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL
, 0);
282 /* Whether we are deferring overflow warnings. */
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings
> 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
294 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
296 if (fold_deferring_overflow_warnings
> 0)
298 if (fold_deferred_overflow_warning
== NULL
299 || wc
< fold_deferred_overflow_code
)
301 fold_deferred_overflow_warning
= gmsgid
;
302 fold_deferred_overflow_code
= wc
;
305 else if (issue_strict_overflow_warning (wc
))
306 warning (OPT_Wstrict_overflow
, gmsgid
);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
313 negate_mathfn_p (enum built_in_function code
)
317 CASE_FLT_FN (BUILT_IN_ASIN
):
318 CASE_FLT_FN (BUILT_IN_ASINH
):
319 CASE_FLT_FN (BUILT_IN_ATAN
):
320 CASE_FLT_FN (BUILT_IN_ATANH
):
321 CASE_FLT_FN (BUILT_IN_CASIN
):
322 CASE_FLT_FN (BUILT_IN_CASINH
):
323 CASE_FLT_FN (BUILT_IN_CATAN
):
324 CASE_FLT_FN (BUILT_IN_CATANH
):
325 CASE_FLT_FN (BUILT_IN_CBRT
):
326 CASE_FLT_FN (BUILT_IN_CPROJ
):
327 CASE_FLT_FN (BUILT_IN_CSIN
):
328 CASE_FLT_FN (BUILT_IN_CSINH
):
329 CASE_FLT_FN (BUILT_IN_CTAN
):
330 CASE_FLT_FN (BUILT_IN_CTANH
):
331 CASE_FLT_FN (BUILT_IN_ERF
):
332 CASE_FLT_FN (BUILT_IN_LLROUND
):
333 CASE_FLT_FN (BUILT_IN_LROUND
):
334 CASE_FLT_FN (BUILT_IN_ROUND
):
335 CASE_FLT_FN (BUILT_IN_SIN
):
336 CASE_FLT_FN (BUILT_IN_SINH
):
337 CASE_FLT_FN (BUILT_IN_TAN
):
338 CASE_FLT_FN (BUILT_IN_TANH
):
339 CASE_FLT_FN (BUILT_IN_TRUNC
):
342 CASE_FLT_FN (BUILT_IN_LLRINT
):
343 CASE_FLT_FN (BUILT_IN_LRINT
):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
345 CASE_FLT_FN (BUILT_IN_RINT
):
346 return !flag_rounding_math
;
354 /* Check whether we may negate an integer constant T without causing
358 may_negate_without_overflow_p (const_tree t
)
360 unsigned HOST_WIDE_INT val
;
364 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
366 type
= TREE_TYPE (t
);
367 if (TYPE_UNSIGNED (type
))
370 prec
= TYPE_PRECISION (type
);
371 if (prec
> HOST_BITS_PER_WIDE_INT
)
373 if (TREE_INT_CST_LOW (t
) != 0)
375 prec
-= HOST_BITS_PER_WIDE_INT
;
376 val
= TREE_INT_CST_HIGH (t
);
379 val
= TREE_INT_CST_LOW (t
);
380 if (prec
< HOST_BITS_PER_WIDE_INT
)
381 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
382 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t
)
396 type
= TREE_TYPE (t
);
399 switch (TREE_CODE (t
))
402 if (TYPE_OVERFLOW_WRAPS (type
))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t
);
408 return (INTEGRAL_TYPE_P (type
)
409 && TYPE_OVERFLOW_WRAPS (type
));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
421 return negate_expr_p (TREE_REALPART (t
))
422 && negate_expr_p (TREE_IMAGPART (t
));
425 return negate_expr_p (TREE_OPERAND (t
, 0))
426 && negate_expr_p (TREE_OPERAND (t
, 1));
429 return negate_expr_p (TREE_OPERAND (t
, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t
, 1))
437 && reorder_operands_p (TREE_OPERAND (t
, 0),
438 TREE_OPERAND (t
, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t
, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
447 && reorder_operands_p (TREE_OPERAND (t
, 0),
448 TREE_OPERAND (t
, 1));
451 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
458 return negate_expr_p (TREE_OPERAND (t
, 1))
459 || negate_expr_p (TREE_OPERAND (t
, 0));
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type
) == REAL_TYPE
)
482 tree tem
= strip_float_extensions (t
);
484 return negate_expr_p (tem
);
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t
)))
491 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
498 tree op1
= TREE_OPERAND (t
, 1);
499 if (TREE_INT_CST_HIGH (op1
) == 0
500 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
501 == TREE_INT_CST_LOW (op1
))
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
518 fold_negate_expr (location_t loc
, tree t
)
520 tree type
= TREE_TYPE (t
);
523 switch (TREE_CODE (t
))
525 /* Convert - (~A) to A + 1. */
527 if (INTEGRAL_TYPE_P (type
))
528 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
529 build_int_cst (type
, 1));
533 tem
= fold_negate_const (t
, type
);
534 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
535 || !TYPE_OVERFLOW_TRAPS (type
))
540 tem
= fold_negate_const (t
, type
);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
547 tem
= fold_negate_const (t
, type
);
552 tree rpart
= negate_expr (TREE_REALPART (t
));
553 tree ipart
= negate_expr (TREE_IMAGPART (t
));
555 if ((TREE_CODE (rpart
) == REAL_CST
556 && TREE_CODE (ipart
) == REAL_CST
)
557 || (TREE_CODE (rpart
) == INTEGER_CST
558 && TREE_CODE (ipart
) == INTEGER_CST
))
559 return build_complex (type
, rpart
, ipart
);
564 if (negate_expr_p (t
))
565 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
566 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
567 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
571 if (negate_expr_p (t
))
572 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
573 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
577 return TREE_OPERAND (t
, 0);
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t
, 1))
585 && reorder_operands_p (TREE_OPERAND (t
, 0),
586 TREE_OPERAND (t
, 1)))
588 tem
= negate_expr (TREE_OPERAND (t
, 1));
589 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
590 tem
, TREE_OPERAND (t
, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t
, 0)))
596 tem
= negate_expr (TREE_OPERAND (t
, 0));
597 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
598 tem
, TREE_OPERAND (t
, 1));
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
607 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
608 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
609 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
613 if (TYPE_UNSIGNED (type
))
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
621 tem
= TREE_OPERAND (t
, 1);
622 if (negate_expr_p (tem
))
623 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
624 TREE_OPERAND (t
, 0), negate_expr (tem
));
625 tem
= TREE_OPERAND (t
, 0);
626 if (negate_expr_p (tem
))
627 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
628 negate_expr (tem
), TREE_OPERAND (t
, 1));
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
642 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
644 const char * const warnmsg
= G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem
= TREE_OPERAND (t
, 1);
647 if (negate_expr_p (tem
))
649 if (INTEGRAL_TYPE_P (type
)
650 && (TREE_CODE (tem
) != INTEGER_CST
651 || integer_onep (tem
)))
652 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
653 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
654 TREE_OPERAND (t
, 0), negate_expr (tem
));
656 tem
= TREE_OPERAND (t
, 0);
657 if (negate_expr_p (tem
))
659 if (INTEGRAL_TYPE_P (type
)
660 && (TREE_CODE (tem
) != INTEGER_CST
661 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
662 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
663 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
664 negate_expr (tem
), TREE_OPERAND (t
, 1));
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type
) == REAL_TYPE
)
673 tem
= strip_float_extensions (t
);
674 if (tem
!= t
&& negate_expr_p (tem
))
675 return fold_convert_loc (loc
, type
, negate_expr (tem
));
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t
))
682 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
686 fndecl
= get_callee_fndecl (t
);
687 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
688 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
696 tree op1
= TREE_OPERAND (t
, 1);
697 if (TREE_INT_CST_HIGH (op1
) == 0
698 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
699 == TREE_INT_CST_LOW (op1
))
701 tree ntype
= TYPE_UNSIGNED (type
)
702 ? signed_type_for (type
)
703 : unsigned_type_for (type
);
704 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
705 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
706 return fold_convert_loc (loc
, type
, temp
);
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 loc
= EXPR_LOCATION (t
);
732 type
= TREE_TYPE (t
);
735 tem
= fold_negate_expr (loc
, t
);
737 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
738 return fold_convert_loc (loc
, type
, tem
);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
762 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
763 tree
*minus_litp
, int negate_p
)
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in
);
774 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
775 || TREE_CODE (in
) == FIXED_CST
)
777 else if (TREE_CODE (in
) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
785 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
787 tree op0
= TREE_OPERAND (in
, 0);
788 tree op1
= TREE_OPERAND (in
, 1);
789 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
790 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
794 || TREE_CODE (op0
) == FIXED_CST
)
795 *litp
= op0
, op0
= 0;
796 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
797 || TREE_CODE (op1
) == FIXED_CST
)
798 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
800 if (op0
!= 0 && TREE_CONSTANT (op0
))
801 *conp
= op0
, op0
= 0;
802 else if (op1
!= 0 && TREE_CONSTANT (op1
))
803 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0
!= 0 && op1
!= 0)
812 var
= op1
, neg_var_p
= neg1_p
;
814 /* Now do any needed negations. */
816 *minus_litp
= *litp
, *litp
= 0;
818 *conp
= negate_expr (*conp
);
820 var
= negate_expr (var
);
822 else if (TREE_CODE (in
) == BIT_NOT_EXPR
823 && code
== PLUS_EXPR
)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp
= build_one_cst (TREE_TYPE (in
));
827 var
= negate_expr (TREE_OPERAND (in
, 0));
829 else if (TREE_CONSTANT (in
))
837 *minus_litp
= *litp
, *litp
= 0;
838 else if (*minus_litp
)
839 *litp
= *minus_litp
, *minus_litp
= 0;
840 *conp
= negate_expr (*conp
);
841 var
= negate_expr (var
);
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
853 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
864 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
866 if (code
== PLUS_EXPR
)
868 if (TREE_CODE (t1
) == NEGATE_EXPR
)
869 return build2_loc (loc
, MINUS_EXPR
, type
,
870 fold_convert_loc (loc
, type
, t2
),
871 fold_convert_loc (loc
, type
,
872 TREE_OPERAND (t1
, 0)));
873 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
874 return build2_loc (loc
, MINUS_EXPR
, type
,
875 fold_convert_loc (loc
, type
, t1
),
876 fold_convert_loc (loc
, type
,
877 TREE_OPERAND (t2
, 0)));
878 else if (integer_zerop (t2
))
879 return fold_convert_loc (loc
, type
, t1
);
881 else if (code
== MINUS_EXPR
)
883 if (integer_zerop (t2
))
884 return fold_convert_loc (loc
, type
, t1
);
887 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
888 fold_convert_loc (loc
, type
, t2
));
891 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
892 fold_convert_loc (loc
, type
, t2
));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
899 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
901 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
903 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
918 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
919 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
920 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
929 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
932 double_int op1
, op2
, res
, tmp
;
934 tree type
= TREE_TYPE (arg1
);
935 bool uns
= TYPE_UNSIGNED (type
);
936 bool overflow
= false;
938 op1
= tree_to_double_int (arg1
);
939 op2
= tree_to_double_int (arg2
);
956 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
967 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
971 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
975 res
= op1
.add_with_sign (op2
, false, &overflow
);
979 res
= op1
.sub_with_overflow (op2
, &overflow
);
983 res
= op1
.mul_with_sign (op2
, false, &overflow
);
986 case MULT_HIGHPART_EXPR
:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
992 res
= tmp
.rshift (TYPE_PRECISION (type
), TYPE_PRECISION (type
), !uns
);
996 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
998 /* This is a shortcut for a common special case. */
999 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1000 && !TREE_OVERFLOW (arg1
)
1001 && !TREE_OVERFLOW (arg2
)
1002 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1004 if (code
== CEIL_DIV_EXPR
)
1005 op1
.low
+= op2
.low
- 1;
1007 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR
:
1021 if (op1
== op2
&& !op1
.is_zero ())
1023 res
= double_int_one
;
1026 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1029 case TRUNC_MOD_EXPR
:
1030 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1031 /* This is a shortcut for a common special case. */
1032 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1033 && !TREE_OVERFLOW (arg1
)
1034 && !TREE_OVERFLOW (arg2
)
1035 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1037 if (code
== CEIL_MOD_EXPR
)
1038 op1
.low
+= op2
.low
- 1;
1039 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR
:
1048 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1052 res
= op1
.min (op2
, uns
);
1056 res
= op1
.max (op2
, uns
);
1063 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1065 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1071 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1073 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1082 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1084 /* Sanity check for the recursive cases. */
1091 if (TREE_CODE (arg1
) == INTEGER_CST
)
1092 return int_const_binop (code
, arg1
, arg2
);
1094 if (TREE_CODE (arg1
) == REAL_CST
)
1096 enum machine_mode mode
;
1099 REAL_VALUE_TYPE value
;
1100 REAL_VALUE_TYPE result
;
1104 /* The following codes are handled by real_arithmetic. */
1119 d1
= TREE_REAL_CST (arg1
);
1120 d2
= TREE_REAL_CST (arg2
);
1122 type
= TREE_TYPE (arg1
);
1123 mode
= TYPE_MODE (type
);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode
)
1128 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code
== RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2
, dconst0
)
1135 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1
))
1142 else if (REAL_VALUE_ISNAN (d2
))
1145 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1146 real_convert (&result
, mode
, &value
);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode
)
1152 && REAL_VALUE_ISINF (result
)
1153 && !REAL_VALUE_ISINF (d1
)
1154 && !REAL_VALUE_ISINF (d2
))
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1163 && (inexact
|| !real_identical (&result
, &value
)))
1166 t
= build_real (type
, result
);
1168 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1172 if (TREE_CODE (arg1
) == FIXED_CST
)
1174 FIXED_VALUE_TYPE f1
;
1175 FIXED_VALUE_TYPE f2
;
1176 FIXED_VALUE_TYPE result
;
1181 /* The following codes are handled by fixed_arithmetic. */
1187 case TRUNC_DIV_EXPR
:
1188 f2
= TREE_FIXED_CST (arg2
);
1193 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1194 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1202 f1
= TREE_FIXED_CST (arg1
);
1203 type
= TREE_TYPE (arg1
);
1204 sat_p
= TYPE_SATURATING (type
);
1205 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1206 t
= build_fixed (type
, result
);
1207 /* Propagate overflow flags. */
1208 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1209 TREE_OVERFLOW (t
) = 1;
1213 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1215 tree type
= TREE_TYPE (arg1
);
1216 tree r1
= TREE_REALPART (arg1
);
1217 tree i1
= TREE_IMAGPART (arg1
);
1218 tree r2
= TREE_REALPART (arg2
);
1219 tree i2
= TREE_IMAGPART (arg2
);
1226 real
= const_binop (code
, r1
, r2
);
1227 imag
= const_binop (code
, i1
, i2
);
1231 if (COMPLEX_FLOAT_TYPE_P (type
))
1232 return do_mpc_arg2 (arg1
, arg2
, type
,
1233 /* do_nonfinite= */ folding_initializer
,
1236 real
= const_binop (MINUS_EXPR
,
1237 const_binop (MULT_EXPR
, r1
, r2
),
1238 const_binop (MULT_EXPR
, i1
, i2
));
1239 imag
= const_binop (PLUS_EXPR
,
1240 const_binop (MULT_EXPR
, r1
, i2
),
1241 const_binop (MULT_EXPR
, i1
, r2
));
1245 if (COMPLEX_FLOAT_TYPE_P (type
))
1246 return do_mpc_arg2 (arg1
, arg2
, type
,
1247 /* do_nonfinite= */ folding_initializer
,
1250 case TRUNC_DIV_EXPR
:
1252 case FLOOR_DIV_EXPR
:
1253 case ROUND_DIV_EXPR
:
1254 if (flag_complex_method
== 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1264 = const_binop (PLUS_EXPR
,
1265 const_binop (MULT_EXPR
, r2
, r2
),
1266 const_binop (MULT_EXPR
, i2
, i2
));
1268 = const_binop (PLUS_EXPR
,
1269 const_binop (MULT_EXPR
, r1
, r2
),
1270 const_binop (MULT_EXPR
, i1
, i2
));
1272 = const_binop (MINUS_EXPR
,
1273 const_binop (MULT_EXPR
, i1
, r2
),
1274 const_binop (MULT_EXPR
, r1
, i2
));
1276 real
= const_binop (code
, t1
, magsquared
);
1277 imag
= const_binop (code
, t2
, magsquared
);
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1287 fold_abs_const (r2
, TREE_TYPE (type
)),
1288 fold_abs_const (i2
, TREE_TYPE (type
)));
1290 if (integer_nonzerop (compare
))
1292 /* In the TRUE branch, we compute
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1299 tree ratio
= const_binop (code
, r2
, i2
);
1300 tree div
= const_binop (PLUS_EXPR
, i2
,
1301 const_binop (MULT_EXPR
, r2
, ratio
));
1302 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1303 real
= const_binop (PLUS_EXPR
, real
, i1
);
1304 real
= const_binop (code
, real
, div
);
1306 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1307 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1308 imag
= const_binop (code
, imag
, div
);
1312 /* In the FALSE branch, we compute
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1319 tree ratio
= const_binop (code
, i2
, r2
);
1320 tree div
= const_binop (PLUS_EXPR
, r2
,
1321 const_binop (MULT_EXPR
, i2
, ratio
));
1323 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1324 real
= const_binop (PLUS_EXPR
, real
, r1
);
1325 real
= const_binop (code
, real
, div
);
1327 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1328 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1329 imag
= const_binop (code
, imag
, div
);
1339 return build_complex (type
, real
, imag
);
1342 if (TREE_CODE (arg1
) == VECTOR_CST
1343 && TREE_CODE (arg2
) == VECTOR_CST
)
1345 tree type
= TREE_TYPE(arg1
);
1346 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1347 tree
*elts
= XALLOCAVEC (tree
, count
);
1349 for (i
= 0; i
< count
; i
++)
1351 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1352 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1354 elts
[i
] = const_binop (code
, elem1
, elem2
);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts
[i
] == NULL_TREE
)
1362 return build_vector (type
, elts
);
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1371 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1373 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1382 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1384 tree type
= TREE_TYPE (arg0
);
1386 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1387 return error_mark_node
;
1389 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1395 /* And some specific cases even faster than that. */
1396 if (code
== PLUS_EXPR
)
1398 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1400 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1403 else if (code
== MINUS_EXPR
)
1405 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1408 else if (code
== MULT_EXPR
)
1410 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1420 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1428 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1430 tree type
= TREE_TYPE (arg0
);
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type
))
1438 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1440 if (type
== sizetype
)
1442 else if (type
== bitsizetype
)
1443 ctype
= sbitsizetype
;
1445 ctype
= signed_type_for (type
);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1451 return size_binop_loc (loc
, MINUS_EXPR
,
1452 fold_convert_loc (loc
, ctype
, arg0
),
1453 fold_convert_loc (loc
, ctype
, arg1
));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0
, arg1
))
1460 return build_int_cst (ctype
, 0);
1461 else if (tree_int_cst_lt (arg1
, arg0
))
1462 return fold_convert_loc (loc
, ctype
,
1463 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1465 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1466 fold_convert_loc (loc
, ctype
,
1467 size_binop_loc (loc
,
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1476 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1484 (TREE_INT_CST_HIGH (arg1
) < 0
1485 && (TYPE_UNSIGNED (type
)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1487 | TREE_OVERFLOW (arg1
));
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1496 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1512 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1516 case FIX_TRUNC_EXPR
:
1517 real_trunc (&r
, VOIDmode
, &x
);
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r
))
1528 val
= double_int_zero
;
1531 /* See if R is less than the lower bound or greater than the
1536 tree lt
= TYPE_MIN_VALUE (type
);
1537 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1538 if (REAL_VALUES_LESS (r
, l
))
1541 val
= tree_to_double_int (lt
);
1547 tree ut
= TYPE_MAX_VALUE (type
);
1550 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1551 if (REAL_VALUES_LESS (u
, r
))
1554 val
= tree_to_double_int (ut
);
1560 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1562 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1570 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1573 double_int temp
, temp_trunc
;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp
= TREE_FIXED_CST (arg1
).data
;
1578 mode
= TREE_FIXED_CST (arg1
).mode
;
1579 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1581 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1582 HOST_BITS_PER_DOUBLE_INT
,
1583 SIGNED_FIXED_POINT_MODE_P (mode
));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1587 HOST_BITS_PER_DOUBLE_INT
,
1588 SIGNED_FIXED_POINT_MODE_P (mode
));
1592 temp
= double_int_zero
;
1593 temp_trunc
= double_int_zero
;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1599 && temp_trunc
.is_negative ()
1600 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1601 temp
+= double_int_one
;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t
= force_fit_type_double (type
, temp
, -1,
1606 (temp
.is_negative ()
1607 && (TYPE_UNSIGNED (type
)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1609 | TREE_OVERFLOW (arg1
));
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1618 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1620 REAL_VALUE_TYPE value
;
1623 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1624 t
= build_real (type
, value
);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1632 TREE_OVERFLOW (t
) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1634 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1635 TREE_OVERFLOW (t
) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1639 && REAL_VALUE_ISINF (value
)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1641 TREE_OVERFLOW (t
) = 1;
1643 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1651 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1653 REAL_VALUE_TYPE value
;
1656 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1657 t
= build_real (type
, value
);
1659 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1667 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1669 FIXED_VALUE_TYPE value
;
1673 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1674 TYPE_SATURATING (type
));
1675 t
= build_fixed (type
, value
);
1677 /* Propagate overflow flags. */
1678 if (overflow_p
| TREE_OVERFLOW (arg1
))
1679 TREE_OVERFLOW (t
) = 1;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1687 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1689 FIXED_VALUE_TYPE value
;
1693 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1694 TREE_INT_CST (arg1
),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1696 TYPE_SATURATING (type
));
1697 t
= build_fixed (type
, value
);
1699 /* Propagate overflow flags. */
1700 if (overflow_p
| TREE_OVERFLOW (arg1
))
1701 TREE_OVERFLOW (t
) = 1;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1709 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1711 FIXED_VALUE_TYPE value
;
1715 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1716 &TREE_REAL_CST (arg1
),
1717 TYPE_SATURATING (type
));
1718 t
= build_fixed (type
, value
);
1720 /* Propagate overflow flags. */
1721 if (overflow_p
| TREE_OVERFLOW (arg1
))
1722 TREE_OVERFLOW (t
) = 1;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1730 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1732 if (TREE_TYPE (arg1
) == type
)
1735 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1736 || TREE_CODE (type
) == OFFSET_TYPE
)
1738 if (TREE_CODE (arg1
) == INTEGER_CST
)
1739 return fold_convert_const_int_from_int (type
, arg1
);
1740 else if (TREE_CODE (arg1
) == REAL_CST
)
1741 return fold_convert_const_int_from_real (code
, type
, arg1
);
1742 else if (TREE_CODE (arg1
) == FIXED_CST
)
1743 return fold_convert_const_int_from_fixed (type
, arg1
);
1745 else if (TREE_CODE (type
) == REAL_TYPE
)
1747 if (TREE_CODE (arg1
) == INTEGER_CST
)
1748 return build_real_from_int_cst (type
, arg1
);
1749 else if (TREE_CODE (arg1
) == REAL_CST
)
1750 return fold_convert_const_real_from_real (type
, arg1
);
1751 else if (TREE_CODE (arg1
) == FIXED_CST
)
1752 return fold_convert_const_real_from_fixed (type
, arg1
);
1754 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1756 if (TREE_CODE (arg1
) == FIXED_CST
)
1757 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1758 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1759 return fold_convert_const_fixed_from_int (type
, arg1
);
1760 else if (TREE_CODE (arg1
) == REAL_CST
)
1761 return fold_convert_const_fixed_from_real (type
, arg1
);
1766 /* Construct a vector of zero elements of vector type TYPE. */
1769 build_zero_vector (tree type
)
1773 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1774 return build_vector_from_val (type
, t
);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1780 fold_convertible_p (const_tree type
, const_tree arg
)
1782 tree orig
= TREE_TYPE (arg
);
1787 if (TREE_CODE (arg
) == ERROR_MARK
1788 || TREE_CODE (type
) == ERROR_MARK
1789 || TREE_CODE (orig
) == ERROR_MARK
)
1792 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1795 switch (TREE_CODE (type
))
1797 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1798 case POINTER_TYPE
: case REFERENCE_TYPE
:
1800 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1801 || TREE_CODE (orig
) == OFFSET_TYPE
)
1803 return (TREE_CODE (orig
) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1807 case FIXED_POINT_TYPE
:
1811 return TREE_CODE (type
) == TREE_CODE (orig
);
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1822 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1824 tree orig
= TREE_TYPE (arg
);
1830 if (TREE_CODE (arg
) == ERROR_MARK
1831 || TREE_CODE (type
) == ERROR_MARK
1832 || TREE_CODE (orig
) == ERROR_MARK
)
1833 return error_mark_node
;
1835 switch (TREE_CODE (type
))
1838 case REFERENCE_TYPE
:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig
)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1843 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1846 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1848 if (TREE_CODE (arg
) == INTEGER_CST
)
1850 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1851 if (tem
!= NULL_TREE
)
1854 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1855 || TREE_CODE (orig
) == OFFSET_TYPE
)
1856 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1857 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1858 return fold_convert_loc (loc
, type
,
1859 fold_build1_loc (loc
, REALPART_EXPR
,
1860 TREE_TYPE (orig
), arg
));
1861 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1863 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1866 if (TREE_CODE (arg
) == INTEGER_CST
)
1868 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1869 if (tem
!= NULL_TREE
)
1872 else if (TREE_CODE (arg
) == REAL_CST
)
1874 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1875 if (tem
!= NULL_TREE
)
1878 else if (TREE_CODE (arg
) == FIXED_CST
)
1880 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1881 if (tem
!= NULL_TREE
)
1885 switch (TREE_CODE (orig
))
1888 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1889 case POINTER_TYPE
: case REFERENCE_TYPE
:
1890 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1893 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1895 case FIXED_POINT_TYPE
:
1896 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1899 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1900 return fold_convert_loc (loc
, type
, tem
);
1906 case FIXED_POINT_TYPE
:
1907 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1908 || TREE_CODE (arg
) == REAL_CST
)
1910 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1911 if (tem
!= NULL_TREE
)
1912 goto fold_convert_exit
;
1915 switch (TREE_CODE (orig
))
1917 case FIXED_POINT_TYPE
:
1922 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1925 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1926 return fold_convert_loc (loc
, type
, tem
);
1933 switch (TREE_CODE (orig
))
1936 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1937 case POINTER_TYPE
: case REFERENCE_TYPE
:
1939 case FIXED_POINT_TYPE
:
1940 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1941 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1942 fold_convert_loc (loc
, TREE_TYPE (type
),
1943 integer_zero_node
));
1948 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1950 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1951 TREE_OPERAND (arg
, 0));
1952 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1953 TREE_OPERAND (arg
, 1));
1954 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1957 arg
= save_expr (arg
);
1958 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1959 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1960 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1961 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1962 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1970 if (integer_zerop (arg
))
1971 return build_zero_vector (type
);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1974 || TREE_CODE (orig
) == VECTOR_TYPE
);
1975 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
1978 tem
= fold_ignored_result (arg
);
1979 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
1982 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1983 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1987 protected_set_expr_location_unshare (tem
, loc
);
1991 /* Return false if expr can be assumed not to be an lvalue, true
1995 maybe_lvalue_p (const_tree x
)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x
))
2011 case ARRAY_RANGE_REF
:
2017 case PREINCREMENT_EXPR
:
2018 case PREDECREMENT_EXPR
:
2020 case TRY_CATCH_EXPR
:
2021 case WITH_CLEANUP_EXPR
:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2042 non_lvalue_loc (location_t loc
, tree x
)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2049 if (! maybe_lvalue_p (x
))
2051 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues
;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2063 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2065 if (pedantic_lvalues
)
2066 return non_lvalue_loc (loc
, x
);
2068 return protected_set_expr_location_unshare (x
, loc
);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2077 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2079 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2080 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2090 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2092 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2094 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2096 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2110 return UNORDERED_EXPR
;
2111 case UNORDERED_EXPR
:
2112 return ORDERED_EXPR
;
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2122 swap_tree_comparison (enum tree_code code
)
2129 case UNORDERED_EXPR
:
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code
)
2177 return COMPCODE_ORD
;
2178 case UNORDERED_EXPR
:
2179 return COMPCODE_UNORD
;
2181 return COMPCODE_UNLT
;
2183 return COMPCODE_UNEQ
;
2185 return COMPCODE_UNLE
;
2187 return COMPCODE_UNGT
;
2189 return COMPCODE_LTGT
;
2191 return COMPCODE_UNGE
;
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code
)
2219 return ORDERED_EXPR
;
2220 case COMPCODE_UNORD
:
2221 return UNORDERED_EXPR
;
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2246 combine_comparisons (location_t loc
,
2247 enum tree_code code
, enum tree_code lcode
,
2248 enum tree_code rcode
, tree truth_type
,
2249 tree ll_arg
, tree lr_arg
)
2251 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2252 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2253 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2258 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2259 compcode
= lcompcode
& rcompcode
;
2262 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2263 compcode
= lcompcode
| rcompcode
;
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode
&= ~COMPCODE_UNORD
;
2275 if (compcode
== COMPCODE_LTGT
)
2276 compcode
= COMPCODE_NE
;
2277 else if (compcode
== COMPCODE_ORD
)
2278 compcode
= COMPCODE_TRUE
;
2280 else if (flag_trapping_math
)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2285 && (lcompcode
!= COMPCODE_EQ
)
2286 && (lcompcode
!= COMPCODE_ORD
);
2287 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2288 && (rcompcode
!= COMPCODE_EQ
)
2289 && (rcompcode
!= COMPCODE_ORD
);
2290 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2291 && (compcode
!= COMPCODE_EQ
)
2292 && (compcode
!= COMPCODE_ORD
);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2301 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2307 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap
|| rtrap
) != trap
)
2315 if (compcode
== COMPCODE_TRUE
)
2316 return constant_boolean_node (true, truth_type
);
2317 else if (compcode
== COMPCODE_FALSE
)
2318 return constant_boolean_node (false, truth_type
);
2321 enum tree_code tcode
;
2323 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2324 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2359 || TREE_TYPE (arg0
) == error_mark_node
2360 || TREE_TYPE (arg1
) == error_mark_node
)
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2371 return tree_int_cst_equal (arg0
, arg1
);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2388 /* If both types don't have the same precision, then it is not safe
2390 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2400 && COMPARISON_CLASS_P (arg0
)
2401 && COMPARISON_CLASS_P (arg1
))
2403 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2405 if (TREE_CODE (arg0
) == swap_code
)
2406 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2407 TREE_OPERAND (arg1
, 1), flags
)
2408 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2409 TREE_OPERAND (arg1
, 0), flags
);
2412 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2430 && (TREE_CODE (arg0
) == SAVE_EXPR
2431 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2432 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2438 switch (TREE_CODE (arg0
))
2441 return tree_int_cst_equal (arg0
, arg1
);
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2445 TREE_FIXED_CST (arg1
));
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2449 TREE_REAL_CST (arg1
)))
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0
) && real_zerop (arg1
))
2466 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2469 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2472 VECTOR_CST_ELT (arg1
, i
), flags
))
2479 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2481 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2485 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2486 && ! memcmp (TREE_STRING_POINTER (arg0
),
2487 TREE_STRING_POINTER (arg1
),
2488 TREE_STRING_LENGTH (arg0
)));
2491 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2492 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2493 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2498 if (flags
& OEP_ONLY_CONST
)
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0
))
2519 case FIX_TRUNC_EXPR
:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2531 case tcc_comparison
:
2533 if (OP_SAME (0) && OP_SAME (1))
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0
))
2538 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2539 TREE_OPERAND (arg1
, 1), flags
)
2540 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2541 TREE_OPERAND (arg1
, 0), flags
));
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal. */
2546 if (TREE_SIDE_EFFECTS (arg0
)
2547 || TREE_SIDE_EFFECTS (arg1
))
2550 switch (TREE_CODE (arg0
))
2557 case TARGET_MEM_REF
:
2558 /* Require equal extra operands and then fall through to MEM_REF
2559 handling of the two common operands. */
2560 if (!OP_SAME_WITH_NULL (2)
2561 || !OP_SAME_WITH_NULL (3)
2562 || !OP_SAME_WITH_NULL (4))
2566 /* Require equal access sizes, and similar pointer types.
2567 We can have incomplete types for array references of
2568 variable-sized arrays from the Fortran frontent
2570 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2571 || (TYPE_SIZE (TREE_TYPE (arg0
))
2572 && TYPE_SIZE (TREE_TYPE (arg1
))
2573 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2574 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2575 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2576 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2577 && OP_SAME (0) && OP_SAME (1));
2580 case ARRAY_RANGE_REF
:
2581 /* Operands 2 and 3 may be null.
2582 Compare the array index by value if it is constant first as we
2583 may have different types but same value here. */
2585 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2586 TREE_OPERAND (arg1
, 1))
2588 && OP_SAME_WITH_NULL (2)
2589 && OP_SAME_WITH_NULL (3));
2592 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2593 may be NULL when we're called to compare MEM_EXPRs. */
2594 return OP_SAME_WITH_NULL (0)
2596 && OP_SAME_WITH_NULL (2);
2599 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2605 case tcc_expression
:
2606 switch (TREE_CODE (arg0
))
2609 case TRUTH_NOT_EXPR
:
2612 case TRUTH_ANDIF_EXPR
:
2613 case TRUTH_ORIF_EXPR
:
2614 return OP_SAME (0) && OP_SAME (1);
2617 case WIDEN_MULT_PLUS_EXPR
:
2618 case WIDEN_MULT_MINUS_EXPR
:
2621 /* The multiplcation operands are commutative. */
2624 case TRUTH_AND_EXPR
:
2626 case TRUTH_XOR_EXPR
:
2627 if (OP_SAME (0) && OP_SAME (1))
2630 /* Otherwise take into account this is a commutative operation. */
2631 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2632 TREE_OPERAND (arg1
, 1), flags
)
2633 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2634 TREE_OPERAND (arg1
, 0), flags
));
2639 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2646 switch (TREE_CODE (arg0
))
2649 /* If the CALL_EXPRs call different functions, then they
2650 clearly can not be equal. */
2651 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2656 unsigned int cef
= call_expr_flags (arg0
);
2657 if (flags
& OEP_PURE_SAME
)
2658 cef
&= ECF_CONST
| ECF_PURE
;
2665 /* Now see if all the arguments are the same. */
2667 const_call_expr_arg_iterator iter0
, iter1
;
2669 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2670 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2672 a0
= next_const_call_expr_arg (&iter0
),
2673 a1
= next_const_call_expr_arg (&iter1
))
2674 if (! operand_equal_p (a0
, a1
, flags
))
2677 /* If we get here and both argument lists are exhausted
2678 then the CALL_EXPRs are equal. */
2679 return ! (a0
|| a1
);
2685 case tcc_declaration
:
2686 /* Consider __builtin_sqrt equal to sqrt. */
2687 return (TREE_CODE (arg0
) == FUNCTION_DECL
2688 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2689 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2690 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2697 #undef OP_SAME_WITH_NULL
2700 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2701 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2703 When in doubt, return 0. */
2706 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2708 int unsignedp1
, unsignedpo
;
2709 tree primarg0
, primarg1
, primother
;
2710 unsigned int correct_width
;
2712 if (operand_equal_p (arg0
, arg1
, 0))
2715 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2716 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2719 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2720 and see if the inner values are the same. This removes any
2721 signedness comparison, which doesn't matter here. */
2722 primarg0
= arg0
, primarg1
= arg1
;
2723 STRIP_NOPS (primarg0
);
2724 STRIP_NOPS (primarg1
);
2725 if (operand_equal_p (primarg0
, primarg1
, 0))
2728 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2729 actual comparison operand, ARG0.
2731 First throw away any conversions to wider types
2732 already present in the operands. */
2734 primarg1
= get_narrower (arg1
, &unsignedp1
);
2735 primother
= get_narrower (other
, &unsignedpo
);
2737 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2738 if (unsignedp1
== unsignedpo
2739 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2740 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2742 tree type
= TREE_TYPE (arg0
);
2744 /* Make sure shorter operand is extended the right way
2745 to match the longer operand. */
2746 primarg1
= fold_convert (signed_or_unsigned_type_for
2747 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2749 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2756 /* See if ARG is an expression that is either a comparison or is performing
2757 arithmetic on comparisons. The comparisons must only be comparing
2758 two different values, which will be stored in *CVAL1 and *CVAL2; if
2759 they are nonzero it means that some operands have already been found.
2760 No variables may be used anywhere else in the expression except in the
2761 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2762 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2764 If this is true, return 1. Otherwise, return zero. */
2767 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2769 enum tree_code code
= TREE_CODE (arg
);
2770 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2775 else if (tclass
== tcc_expression
2776 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2777 || code
== COMPOUND_EXPR
))
2778 tclass
= tcc_binary
;
2780 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2783 /* If we've already found a CVAL1 or CVAL2, this expression is
2784 two complex to handle. */
2785 if (*cval1
|| *cval2
)
2795 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2798 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2799 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2800 cval1
, cval2
, save_p
));
2805 case tcc_expression
:
2806 if (code
== COND_EXPR
)
2807 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2808 cval1
, cval2
, save_p
)
2809 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2810 cval1
, cval2
, save_p
)
2811 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2812 cval1
, cval2
, save_p
));
2815 case tcc_comparison
:
2816 /* First see if we can handle the first operand, then the second. For
2817 the second operand, we know *CVAL1 can't be zero. It must be that
2818 one side of the comparison is each of the values; test for the
2819 case where this isn't true by failing if the two operands
2822 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2823 TREE_OPERAND (arg
, 1), 0))
2827 *cval1
= TREE_OPERAND (arg
, 0);
2828 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2830 else if (*cval2
== 0)
2831 *cval2
= TREE_OPERAND (arg
, 0);
2832 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2837 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2839 else if (*cval2
== 0)
2840 *cval2
= TREE_OPERAND (arg
, 1);
2841 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2853 /* ARG is a tree that is known to contain just arithmetic operations and
2854 comparisons. Evaluate the operations in the tree substituting NEW0 for
2855 any occurrence of OLD0 as an operand of a comparison and likewise for
2859 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2860 tree old1
, tree new1
)
2862 tree type
= TREE_TYPE (arg
);
2863 enum tree_code code
= TREE_CODE (arg
);
2864 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2866 /* We can handle some of the tcc_expression cases here. */
2867 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2869 else if (tclass
== tcc_expression
2870 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2871 tclass
= tcc_binary
;
2876 return fold_build1_loc (loc
, code
, type
,
2877 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2878 old0
, new0
, old1
, new1
));
2881 return fold_build2_loc (loc
, code
, type
,
2882 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2883 old0
, new0
, old1
, new1
),
2884 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2885 old0
, new0
, old1
, new1
));
2887 case tcc_expression
:
2891 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2895 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2899 return fold_build3_loc (loc
, code
, type
,
2900 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2901 old0
, new0
, old1
, new1
),
2902 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2903 old0
, new0
, old1
, new1
),
2904 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2905 old0
, new0
, old1
, new1
));
2909 /* Fall through - ??? */
2911 case tcc_comparison
:
2913 tree arg0
= TREE_OPERAND (arg
, 0);
2914 tree arg1
= TREE_OPERAND (arg
, 1);
2916 /* We need to check both for exact equality and tree equality. The
2917 former will be true if the operand has a side-effect. In that
2918 case, we know the operand occurred exactly once. */
2920 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2922 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2925 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2927 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2930 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2938 /* Return a tree for the case when the result of an expression is RESULT
2939 converted to TYPE and OMITTED was previously an operand of the expression
2940 but is now not needed (e.g., we folded OMITTED * 0).
2942 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2943 the conversion of RESULT to TYPE. */
2946 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2948 tree t
= fold_convert_loc (loc
, type
, result
);
2950 /* If the resulting operand is an empty statement, just return the omitted
2951 statement casted to void. */
2952 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2953 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2954 fold_ignored_result (omitted
));
2956 if (TREE_SIDE_EFFECTS (omitted
))
2957 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2958 fold_ignored_result (omitted
), t
);
2960 return non_lvalue_loc (loc
, t
);
2963 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2966 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2969 tree t
= fold_convert_loc (loc
, type
, result
);
2971 /* If the resulting operand is an empty statement, just return the omitted
2972 statement casted to void. */
2973 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2974 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2975 fold_ignored_result (omitted
));
2977 if (TREE_SIDE_EFFECTS (omitted
))
2978 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2979 fold_ignored_result (omitted
), t
);
2981 return pedantic_non_lvalue_loc (loc
, t
);
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2986 of the expression but are now not needed.
2988 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2989 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2990 evaluated before OMITTED2. Otherwise, if neither has side effects,
2991 just do the conversion of RESULT to TYPE. */
2994 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
2995 tree omitted1
, tree omitted2
)
2997 tree t
= fold_convert_loc (loc
, type
, result
);
2999 if (TREE_SIDE_EFFECTS (omitted2
))
3000 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3001 if (TREE_SIDE_EFFECTS (omitted1
))
3002 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3004 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3008 /* Return a simplified tree node for the truth-negation of ARG. This
3009 never alters ARG itself. We assume that ARG is an operation that
3010 returns a truth value (0 or 1).
3012 FIXME: one would think we would fold the result, but it causes
3013 problems with the dominator optimizer. */
3016 fold_truth_not_expr (location_t loc
, tree arg
)
3018 tree type
= TREE_TYPE (arg
);
3019 enum tree_code code
= TREE_CODE (arg
);
3020 location_t loc1
, loc2
;
3022 /* If this is a comparison, we can simply invert it, except for
3023 floating-point non-equality comparisons, in which case we just
3024 enclose a TRUTH_NOT_EXPR around what we have. */
3026 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3028 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3029 if (FLOAT_TYPE_P (op_type
)
3030 && flag_trapping_math
3031 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3032 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3035 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3036 if (code
== ERROR_MARK
)
3039 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3040 TREE_OPERAND (arg
, 1));
3046 return constant_boolean_node (integer_zerop (arg
), type
);
3048 case TRUTH_AND_EXPR
:
3049 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3050 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3051 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3052 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3053 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3056 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3057 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3058 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3059 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3060 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3062 case TRUTH_XOR_EXPR
:
3063 /* Here we can invert either operand. We invert the first operand
3064 unless the second operand is a TRUTH_NOT_EXPR in which case our
3065 result is the XOR of the first operand with the inside of the
3066 negation of the second operand. */
3068 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3069 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3070 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3072 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3073 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3074 TREE_OPERAND (arg
, 1));
3076 case TRUTH_ANDIF_EXPR
:
3077 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3078 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3079 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3080 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3081 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3083 case TRUTH_ORIF_EXPR
:
3084 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3085 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3086 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3087 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3088 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3090 case TRUTH_NOT_EXPR
:
3091 return TREE_OPERAND (arg
, 0);
3095 tree arg1
= TREE_OPERAND (arg
, 1);
3096 tree arg2
= TREE_OPERAND (arg
, 2);
3098 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3099 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3101 /* A COND_EXPR may have a throw as one operand, which
3102 then has void type. Just leave void operands
3104 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3105 VOID_TYPE_P (TREE_TYPE (arg1
))
3106 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3107 VOID_TYPE_P (TREE_TYPE (arg2
))
3108 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3112 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3113 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3114 TREE_OPERAND (arg
, 0),
3115 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3117 case NON_LVALUE_EXPR
:
3118 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3119 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3122 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3123 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3125 /* ... fall through ... */
3128 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3129 return build1_loc (loc
, TREE_CODE (arg
), type
,
3130 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3133 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3135 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3138 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3140 case CLEANUP_POINT_EXPR
:
3141 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3142 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3143 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3150 /* Return a simplified tree node for the truth-negation of ARG. This
3151 never alters ARG itself. We assume that ARG is an operation that
3152 returns a truth value (0 or 1).
3154 FIXME: one would think we would fold the result, but it causes
3155 problems with the dominator optimizer. */
3158 invert_truthvalue_loc (location_t loc
, tree arg
)
3162 if (TREE_CODE (arg
) == ERROR_MARK
)
3165 tem
= fold_truth_not_expr (loc
, arg
);
3167 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3172 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3173 operands are another bit-wise operation with a common input. If so,
3174 distribute the bit operations to save an operation and possibly two if
3175 constants are involved. For example, convert
3176 (A | B) & (A | C) into A | (B & C)
3177 Further simplification will occur if B and C are constants.
3179 If this optimization cannot be done, 0 will be returned. */
3182 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3183 tree arg0
, tree arg1
)
3188 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3189 || TREE_CODE (arg0
) == code
3190 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3191 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3194 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3196 common
= TREE_OPERAND (arg0
, 0);
3197 left
= TREE_OPERAND (arg0
, 1);
3198 right
= TREE_OPERAND (arg1
, 1);
3200 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3202 common
= TREE_OPERAND (arg0
, 0);
3203 left
= TREE_OPERAND (arg0
, 1);
3204 right
= TREE_OPERAND (arg1
, 0);
3206 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3208 common
= TREE_OPERAND (arg0
, 1);
3209 left
= TREE_OPERAND (arg0
, 0);
3210 right
= TREE_OPERAND (arg1
, 1);
3212 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3214 common
= TREE_OPERAND (arg0
, 1);
3215 left
= TREE_OPERAND (arg0
, 0);
3216 right
= TREE_OPERAND (arg1
, 0);
3221 common
= fold_convert_loc (loc
, type
, common
);
3222 left
= fold_convert_loc (loc
, type
, left
);
3223 right
= fold_convert_loc (loc
, type
, right
);
3224 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3225 fold_build2_loc (loc
, code
, type
, left
, right
));
3228 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3229 with code CODE. This optimization is unsafe. */
3231 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3232 tree arg0
, tree arg1
)
3234 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3235 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3239 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3240 TREE_OPERAND (arg1
, 1), 0))
3241 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3242 fold_build2_loc (loc
, code
, type
,
3243 TREE_OPERAND (arg0
, 0),
3244 TREE_OPERAND (arg1
, 0)),
3245 TREE_OPERAND (arg0
, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3249 TREE_OPERAND (arg1
, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3253 REAL_VALUE_TYPE r0
, r1
;
3254 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3255 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3257 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3259 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3260 real_arithmetic (&r0
, code
, &r0
, &r1
);
3261 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3262 TREE_OPERAND (arg0
, 0),
3263 build_real (type
, r0
));
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3273 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3274 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3276 tree result
, bftype
;
3280 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3282 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3283 && host_integerp (size
, 0)
3284 && tree_low_cst (size
, 0) == bitsize
)
3285 return fold_convert_loc (loc
, type
, inner
);
3289 if (TYPE_PRECISION (bftype
) != bitsize
3290 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3291 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3293 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3294 size_int (bitsize
), bitsize_int (bitpos
));
3297 result
= fold_convert_loc (loc
, type
, result
);
3302 /* Optimize a bit-field compare.
3304 There are two cases: First is a compare against a constant and the
3305 second is a comparison of two items where the fields are at the same
3306 bit position relative to the start of a chunk (byte, halfword, word)
3307 large enough to contain it. In these cases we can avoid the shift
3308 implicit in bitfield extractions.
3310 For constants, we emit a compare of the shifted constant with the
3311 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3312 compared. For two fields at the same position, we do the ANDs with the
3313 similar mask and compare the result of the ANDs.
3315 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3316 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3317 are the left and right operands of the comparison, respectively.
3319 If the optimization described above can be done, we return the resulting
3320 tree. Otherwise we return zero. */
3323 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3324 tree compare_type
, tree lhs
, tree rhs
)
3326 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3327 tree type
= TREE_TYPE (lhs
);
3328 tree signed_type
, unsigned_type
;
3329 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3330 enum machine_mode lmode
, rmode
, nmode
;
3331 int lunsignedp
, runsignedp
;
3332 int lvolatilep
= 0, rvolatilep
= 0;
3333 tree linner
, rinner
= NULL_TREE
;
3337 /* In the strict volatile bitfields case, doing code changes here may prevent
3338 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3339 if (flag_strict_volatile_bitfields
> 0)
3342 /* Get all the information about the extractions being done. If the bit size
3343 if the same as the size of the underlying object, we aren't doing an
3344 extraction at all and so can do nothing. We also don't want to
3345 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3346 then will no longer be able to replace it. */
3347 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3348 &lunsignedp
, &lvolatilep
, false);
3349 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3350 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3355 /* If this is not a constant, we can only do something if bit positions,
3356 sizes, and signedness are the same. */
3357 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3358 &runsignedp
, &rvolatilep
, false);
3360 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3361 || lunsignedp
!= runsignedp
|| offset
!= 0
3362 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3366 /* See if we can find a mode to refer to this field. We should be able to,
3367 but fail if we can't. */
3369 && GET_MODE_BITSIZE (lmode
) > 0
3370 && flag_strict_volatile_bitfields
> 0)
3373 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3374 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3375 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3376 TYPE_ALIGN (TREE_TYPE (rinner
))),
3377 word_mode
, lvolatilep
|| rvolatilep
);
3378 if (nmode
== VOIDmode
)
3381 /* Set signed and unsigned types of the precision of this mode for the
3383 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3384 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3386 /* Compute the bit position and size for the new reference and our offset
3387 within it. If the new reference is the same size as the original, we
3388 won't optimize anything, so return zero. */
3389 nbitsize
= GET_MODE_BITSIZE (nmode
);
3390 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3392 if (nbitsize
== lbitsize
)
3395 if (BYTES_BIG_ENDIAN
)
3396 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3398 /* Make the mask to be used against the extracted field. */
3399 mask
= build_int_cst_type (unsigned_type
, -1);
3400 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3401 mask
= const_binop (RSHIFT_EXPR
, mask
,
3402 size_int (nbitsize
- lbitsize
- lbitpos
));
3405 /* If not comparing with constant, just rework the comparison
3407 return fold_build2_loc (loc
, code
, compare_type
,
3408 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3409 make_bit_field_ref (loc
, linner
,
3414 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3415 make_bit_field_ref (loc
, rinner
,
3421 /* Otherwise, we are handling the constant case. See if the constant is too
3422 big for the field. Warn and return a tree of for 0 (false) if so. We do
3423 this not only for its own sake, but to avoid having to test for this
3424 error case below. If we didn't, we might generate wrong code.
3426 For unsigned fields, the constant shifted right by the field length should
3427 be all zero. For signed fields, the high-order bits should agree with
3432 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3433 fold_convert_loc (loc
,
3434 unsigned_type
, rhs
),
3435 size_int (lbitsize
))))
3437 warning (0, "comparison is always %d due to width of bit-field",
3439 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3444 tree tem
= const_binop (RSHIFT_EXPR
,
3445 fold_convert_loc (loc
, signed_type
, rhs
),
3446 size_int (lbitsize
- 1));
3447 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3449 warning (0, "comparison is always %d due to width of bit-field",
3451 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3455 /* Single-bit compares should always be against zero. */
3456 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3458 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3459 rhs
= build_int_cst (type
, 0);
3462 /* Make a new bitfield reference, shift the constant over the
3463 appropriate number of bits and mask it with the computed mask
3464 (in case this was a signed field). If we changed it, make a new one. */
3465 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3468 TREE_SIDE_EFFECTS (lhs
) = 1;
3469 TREE_THIS_VOLATILE (lhs
) = 1;
3472 rhs
= const_binop (BIT_AND_EXPR
,
3473 const_binop (LSHIFT_EXPR
,
3474 fold_convert_loc (loc
, unsigned_type
, rhs
),
3475 size_int (lbitpos
)),
3478 lhs
= build2_loc (loc
, code
, compare_type
,
3479 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3483 /* Subroutine for fold_truth_andor_1: decode a field reference.
3485 If EXP is a comparison reference, we return the innermost reference.
3487 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3488 set to the starting bit number.
3490 If the innermost field can be completely contained in a mode-sized
3491 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3493 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3494 otherwise it is not changed.
3496 *PUNSIGNEDP is set to the signedness of the field.
3498 *PMASK is set to the mask used. This is either contained in a
3499 BIT_AND_EXPR or derived from the width of the field.
3501 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3503 Return 0 if this is not a component reference or is one that we can't
3504 do anything with. */
3507 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3508 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3509 int *punsignedp
, int *pvolatilep
,
3510 tree
*pmask
, tree
*pand_mask
)
3512 tree outer_type
= 0;
3514 tree mask
, inner
, offset
;
3516 unsigned int precision
;
3518 /* All the optimizations using this function assume integer fields.
3519 There are problems with FP fields since the type_for_size call
3520 below can fail for, e.g., XFmode. */
3521 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3524 /* We are interested in the bare arrangement of bits, so strip everything
3525 that doesn't affect the machine mode. However, record the type of the
3526 outermost expression if it may matter below. */
3527 if (CONVERT_EXPR_P (exp
)
3528 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3529 outer_type
= TREE_TYPE (exp
);
3532 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3534 and_mask
= TREE_OPERAND (exp
, 1);
3535 exp
= TREE_OPERAND (exp
, 0);
3536 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3537 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3541 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3542 punsignedp
, pvolatilep
, false);
3543 if ((inner
== exp
&& and_mask
== 0)
3544 || *pbitsize
< 0 || offset
!= 0
3545 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3548 /* If the number of bits in the reference is the same as the bitsize of
3549 the outer type, then the outer type gives the signedness. Otherwise
3550 (in case of a small bitfield) the signedness is unchanged. */
3551 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3552 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3554 /* Compute the mask to access the bitfield. */
3555 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3556 precision
= TYPE_PRECISION (unsigned_type
);
3558 mask
= build_int_cst_type (unsigned_type
, -1);
3560 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3561 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3563 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3565 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3566 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3569 *pand_mask
= and_mask
;
3573 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3577 all_ones_mask_p (const_tree mask
, int size
)
3579 tree type
= TREE_TYPE (mask
);
3580 unsigned int precision
= TYPE_PRECISION (type
);
3583 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3586 tree_int_cst_equal (mask
,
3587 const_binop (RSHIFT_EXPR
,
3588 const_binop (LSHIFT_EXPR
, tmask
,
3589 size_int (precision
- size
)),
3590 size_int (precision
- size
)));
3593 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3594 represents the sign bit of EXP's type. If EXP represents a sign
3595 or zero extension, also test VAL against the unextended type.
3596 The return value is the (sub)expression whose sign bit is VAL,
3597 or NULL_TREE otherwise. */
3600 sign_bit_p (tree exp
, const_tree val
)
3602 unsigned HOST_WIDE_INT mask_lo
, lo
;
3603 HOST_WIDE_INT mask_hi
, hi
;
3607 /* Tree EXP must have an integral type. */
3608 t
= TREE_TYPE (exp
);
3609 if (! INTEGRAL_TYPE_P (t
))
3612 /* Tree VAL must be an integer constant. */
3613 if (TREE_CODE (val
) != INTEGER_CST
3614 || TREE_OVERFLOW (val
))
3617 width
= TYPE_PRECISION (t
);
3618 if (width
> HOST_BITS_PER_WIDE_INT
)
3620 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3623 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3624 >> (HOST_BITS_PER_DOUBLE_INT
- width
));
3630 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3633 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3634 >> (HOST_BITS_PER_WIDE_INT
- width
));
3637 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3638 treat VAL as if it were unsigned. */
3639 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3640 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3643 /* Handle extension from a narrower type. */
3644 if (TREE_CODE (exp
) == NOP_EXPR
3645 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3646 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3651 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3652 to be evaluated unconditionally. */
3655 simple_operand_p (const_tree exp
)
3657 /* Strip any conversions that don't change the machine mode. */
3660 return (CONSTANT_CLASS_P (exp
)
3661 || TREE_CODE (exp
) == SSA_NAME
3663 && ! TREE_ADDRESSABLE (exp
)
3664 && ! TREE_THIS_VOLATILE (exp
)
3665 && ! DECL_NONLOCAL (exp
)
3666 /* Don't regard global variables as simple. They may be
3667 allocated in ways unknown to the compiler (shared memory,
3668 #pragma weak, etc). */
3669 && ! TREE_PUBLIC (exp
)
3670 && ! DECL_EXTERNAL (exp
)
3671 /* Loading a static variable is unduly expensive, but global
3672 registers aren't expensive. */
3673 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3676 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3677 to be evaluated unconditionally.
3678 I addition to simple_operand_p, we assume that comparisons, conversions,
3679 and logic-not operations are simple, if their operands are simple, too. */
3682 simple_operand_p_2 (tree exp
)
3684 enum tree_code code
;
3686 if (TREE_SIDE_EFFECTS (exp
)
3687 || tree_could_trap_p (exp
))
3690 while (CONVERT_EXPR_P (exp
))
3691 exp
= TREE_OPERAND (exp
, 0);
3693 code
= TREE_CODE (exp
);
3695 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3696 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3697 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3699 if (code
== TRUTH_NOT_EXPR
)
3700 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3702 return simple_operand_p (exp
);
3706 /* The following functions are subroutines to fold_range_test and allow it to
3707 try to change a logical combination of comparisons into a range test.
3710 X == 2 || X == 3 || X == 4 || X == 5
3714 (unsigned) (X - 2) <= 3
3716 We describe each set of comparisons as being either inside or outside
3717 a range, using a variable named like IN_P, and then describe the
3718 range with a lower and upper bound. If one of the bounds is omitted,
3719 it represents either the highest or lowest value of the type.
3721 In the comments below, we represent a range by two numbers in brackets
3722 preceded by a "+" to designate being inside that range, or a "-" to
3723 designate being outside that range, so the condition can be inverted by
3724 flipping the prefix. An omitted bound is represented by a "-". For
3725 example, "- [-, 10]" means being outside the range starting at the lowest
3726 possible value and ending at 10, in other words, being greater than 10.
3727 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3730 We set up things so that the missing bounds are handled in a consistent
3731 manner so neither a missing bound nor "true" and "false" need to be
3732 handled using a special case. */
3734 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3735 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3736 and UPPER1_P are nonzero if the respective argument is an upper bound
3737 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3738 must be specified for a comparison. ARG1 will be converted to ARG0's
3739 type if both are specified. */
3742 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3743 tree arg1
, int upper1_p
)
3749 /* If neither arg represents infinity, do the normal operation.
3750 Else, if not a comparison, return infinity. Else handle the special
3751 comparison rules. Note that most of the cases below won't occur, but
3752 are handled for consistency. */
3754 if (arg0
!= 0 && arg1
!= 0)
3756 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3757 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3759 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3762 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3765 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3766 for neither. In real maths, we cannot assume open ended ranges are
3767 the same. But, this is computer arithmetic, where numbers are finite.
3768 We can therefore make the transformation of any unbounded range with
3769 the value Z, Z being greater than any representable number. This permits
3770 us to treat unbounded ranges as equal. */
3771 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3772 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3776 result
= sgn0
== sgn1
;
3779 result
= sgn0
!= sgn1
;
3782 result
= sgn0
< sgn1
;
3785 result
= sgn0
<= sgn1
;
3788 result
= sgn0
> sgn1
;
3791 result
= sgn0
>= sgn1
;
3797 return constant_boolean_node (result
, type
);
3800 /* Helper routine for make_range. Perform one step for it, return
3801 new expression if the loop should continue or NULL_TREE if it should
3805 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3806 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3807 bool *strict_overflow_p
)
3809 tree arg0_type
= TREE_TYPE (arg0
);
3810 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3811 int in_p
= *p_in_p
, n_in_p
;
3815 case TRUTH_NOT_EXPR
:
3819 case EQ_EXPR
: case NE_EXPR
:
3820 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3821 /* We can only do something if the range is testing for zero
3822 and if the second operand is an integer constant. Note that
3823 saying something is "in" the range we make is done by
3824 complementing IN_P since it will set in the initial case of
3825 being not equal to zero; "out" is leaving it alone. */
3826 if (low
== NULL_TREE
|| high
== NULL_TREE
3827 || ! integer_zerop (low
) || ! integer_zerop (high
)
3828 || TREE_CODE (arg1
) != INTEGER_CST
)
3833 case NE_EXPR
: /* - [c, c] */
3836 case EQ_EXPR
: /* + [c, c] */
3837 in_p
= ! in_p
, low
= high
= arg1
;
3839 case GT_EXPR
: /* - [-, c] */
3840 low
= 0, high
= arg1
;
3842 case GE_EXPR
: /* + [c, -] */
3843 in_p
= ! in_p
, low
= arg1
, high
= 0;
3845 case LT_EXPR
: /* - [c, -] */
3846 low
= arg1
, high
= 0;
3848 case LE_EXPR
: /* + [-, c] */
3849 in_p
= ! in_p
, low
= 0, high
= arg1
;
3855 /* If this is an unsigned comparison, we also know that EXP is
3856 greater than or equal to zero. We base the range tests we make
3857 on that fact, so we record it here so we can parse existing
3858 range tests. We test arg0_type since often the return type
3859 of, e.g. EQ_EXPR, is boolean. */
3860 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3862 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3864 build_int_cst (arg0_type
, 0),
3868 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3870 /* If the high bound is missing, but we have a nonzero low
3871 bound, reverse the range so it goes from zero to the low bound
3873 if (high
== 0 && low
&& ! integer_zerop (low
))
3876 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3877 integer_one_node
, 0);
3878 low
= build_int_cst (arg0_type
, 0);
3888 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3889 low and high are non-NULL, then normalize will DTRT. */
3890 if (!TYPE_UNSIGNED (arg0_type
)
3891 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3893 if (low
== NULL_TREE
)
3894 low
= TYPE_MIN_VALUE (arg0_type
);
3895 if (high
== NULL_TREE
)
3896 high
= TYPE_MAX_VALUE (arg0_type
);
3899 /* (-x) IN [a,b] -> x in [-b, -a] */
3900 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3901 build_int_cst (exp_type
, 0),
3903 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3904 build_int_cst (exp_type
, 0),
3906 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3912 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3913 build_int_cst (exp_type
, 1));
3917 if (TREE_CODE (arg1
) != INTEGER_CST
)
3920 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3921 move a constant to the other side. */
3922 if (!TYPE_UNSIGNED (arg0_type
)
3923 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3926 /* If EXP is signed, any overflow in the computation is undefined,
3927 so we don't worry about it so long as our computations on
3928 the bounds don't overflow. For unsigned, overflow is defined
3929 and this is exactly the right thing. */
3930 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3931 arg0_type
, low
, 0, arg1
, 0);
3932 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3933 arg0_type
, high
, 1, arg1
, 0);
3934 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3935 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3938 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3939 *strict_overflow_p
= true;
3942 /* Check for an unsigned range which has wrapped around the maximum
3943 value thus making n_high < n_low, and normalize it. */
3944 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3946 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3947 integer_one_node
, 0);
3948 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3949 integer_one_node
, 0);
3951 /* If the range is of the form +/- [ x+1, x ], we won't
3952 be able to normalize it. But then, it represents the
3953 whole range or the empty set, so make it
3955 if (tree_int_cst_equal (n_low
, low
)
3956 && tree_int_cst_equal (n_high
, high
))
3962 low
= n_low
, high
= n_high
;
3970 case NON_LVALUE_EXPR
:
3971 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3974 if (! INTEGRAL_TYPE_P (arg0_type
)
3975 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3976 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3979 n_low
= low
, n_high
= high
;
3982 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3985 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3987 /* If we're converting arg0 from an unsigned type, to exp,
3988 a signed type, we will be doing the comparison as unsigned.
3989 The tests above have already verified that LOW and HIGH
3992 So we have to ensure that we will handle large unsigned
3993 values the same way that the current signed bounds treat
3996 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4000 /* For fixed-point modes, we need to pass the saturating flag
4001 as the 2nd parameter. */
4002 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4004 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4005 TYPE_SATURATING (arg0_type
));
4008 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4010 /* A range without an upper bound is, naturally, unbounded.
4011 Since convert would have cropped a very large value, use
4012 the max value for the destination type. */
4014 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4015 : TYPE_MAX_VALUE (arg0_type
);
4017 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4018 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4019 fold_convert_loc (loc
, arg0_type
,
4021 build_int_cst (arg0_type
, 1));
4023 /* If the low bound is specified, "and" the range with the
4024 range for which the original unsigned value will be
4028 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4029 1, fold_convert_loc (loc
, arg0_type
,
4034 in_p
= (n_in_p
== in_p
);
4038 /* Otherwise, "or" the range with the range of the input
4039 that will be interpreted as negative. */
4040 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4041 1, fold_convert_loc (loc
, arg0_type
,
4046 in_p
= (in_p
!= n_in_p
);
4060 /* Given EXP, a logical expression, set the range it is testing into
4061 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4062 actually being tested. *PLOW and *PHIGH will be made of the same
4063 type as the returned expression. If EXP is not a comparison, we
4064 will most likely not be returning a useful value and range. Set
4065 *STRICT_OVERFLOW_P to true if the return value is only valid
4066 because signed overflow is undefined; otherwise, do not change
4067 *STRICT_OVERFLOW_P. */
4070 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4071 bool *strict_overflow_p
)
4073 enum tree_code code
;
4074 tree arg0
, arg1
= NULL_TREE
;
4075 tree exp_type
, nexp
;
4078 location_t loc
= EXPR_LOCATION (exp
);
4080 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4081 and see if we can refine the range. Some of the cases below may not
4082 happen, but it doesn't seem worth worrying about this. We "continue"
4083 the outer loop when we've changed something; otherwise we "break"
4084 the switch, which will "break" the while. */
4087 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4091 code
= TREE_CODE (exp
);
4092 exp_type
= TREE_TYPE (exp
);
4095 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4097 if (TREE_OPERAND_LENGTH (exp
) > 0)
4098 arg0
= TREE_OPERAND (exp
, 0);
4099 if (TREE_CODE_CLASS (code
) == tcc_binary
4100 || TREE_CODE_CLASS (code
) == tcc_comparison
4101 || (TREE_CODE_CLASS (code
) == tcc_expression
4102 && TREE_OPERAND_LENGTH (exp
) > 1))
4103 arg1
= TREE_OPERAND (exp
, 1);
4105 if (arg0
== NULL_TREE
)
4108 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4109 &high
, &in_p
, strict_overflow_p
);
4110 if (nexp
== NULL_TREE
)
4115 /* If EXP is a constant, we can evaluate whether this is true or false. */
4116 if (TREE_CODE (exp
) == INTEGER_CST
)
4118 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4120 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4126 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4130 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4131 type, TYPE, return an expression to test if EXP is in (or out of, depending
4132 on IN_P) the range. Return 0 if the test couldn't be created. */
4135 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4136 tree low
, tree high
)
4138 tree etype
= TREE_TYPE (exp
), value
;
4140 #ifdef HAVE_canonicalize_funcptr_for_compare
4141 /* Disable this optimization for function pointer expressions
4142 on targets that require function pointer canonicalization. */
4143 if (HAVE_canonicalize_funcptr_for_compare
4144 && TREE_CODE (etype
) == POINTER_TYPE
4145 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4151 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4153 return invert_truthvalue_loc (loc
, value
);
4158 if (low
== 0 && high
== 0)
4159 return build_int_cst (type
, 1);
4162 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4163 fold_convert_loc (loc
, etype
, high
));
4166 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4167 fold_convert_loc (loc
, etype
, low
));
4169 if (operand_equal_p (low
, high
, 0))
4170 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4171 fold_convert_loc (loc
, etype
, low
));
4173 if (integer_zerop (low
))
4175 if (! TYPE_UNSIGNED (etype
))
4177 etype
= unsigned_type_for (etype
);
4178 high
= fold_convert_loc (loc
, etype
, high
);
4179 exp
= fold_convert_loc (loc
, etype
, exp
);
4181 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4184 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4185 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4187 unsigned HOST_WIDE_INT lo
;
4191 prec
= TYPE_PRECISION (etype
);
4192 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4195 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4199 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4200 lo
= (unsigned HOST_WIDE_INT
) -1;
4203 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4205 if (TYPE_UNSIGNED (etype
))
4207 tree signed_etype
= signed_type_for (etype
);
4208 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4210 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4212 etype
= signed_etype
;
4213 exp
= fold_convert_loc (loc
, etype
, exp
);
4215 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4216 build_int_cst (etype
, 0));
4220 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4221 This requires wrap-around arithmetics for the type of the expression.
4222 First make sure that arithmetics in this type is valid, then make sure
4223 that it wraps around. */
4224 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4225 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4226 TYPE_UNSIGNED (etype
));
4228 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4230 tree utype
, minv
, maxv
;
4232 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4233 for the type in question, as we rely on this here. */
4234 utype
= unsigned_type_for (etype
);
4235 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4236 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4237 integer_one_node
, 1);
4238 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4240 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4247 high
= fold_convert_loc (loc
, etype
, high
);
4248 low
= fold_convert_loc (loc
, etype
, low
);
4249 exp
= fold_convert_loc (loc
, etype
, exp
);
4251 value
= const_binop (MINUS_EXPR
, high
, low
);
4254 if (POINTER_TYPE_P (etype
))
4256 if (value
!= 0 && !TREE_OVERFLOW (value
))
4258 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4259 return build_range_check (loc
, type
,
4260 fold_build_pointer_plus_loc (loc
, exp
, low
),
4261 1, build_int_cst (etype
, 0), value
);
4266 if (value
!= 0 && !TREE_OVERFLOW (value
))
4267 return build_range_check (loc
, type
,
4268 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4269 1, build_int_cst (etype
, 0), value
);
4274 /* Return the predecessor of VAL in its type, handling the infinite case. */
4277 range_predecessor (tree val
)
4279 tree type
= TREE_TYPE (val
);
4281 if (INTEGRAL_TYPE_P (type
)
4282 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4285 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4288 /* Return the successor of VAL in its type, handling the infinite case. */
4291 range_successor (tree val
)
4293 tree type
= TREE_TYPE (val
);
4295 if (INTEGRAL_TYPE_P (type
)
4296 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4299 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4302 /* Given two ranges, see if we can merge them into one. Return 1 if we
4303 can, 0 if we can't. Set the output range into the specified parameters. */
4306 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4307 tree high0
, int in1_p
, tree low1
, tree high1
)
4315 int lowequal
= ((low0
== 0 && low1
== 0)
4316 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4317 low0
, 0, low1
, 0)));
4318 int highequal
= ((high0
== 0 && high1
== 0)
4319 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4320 high0
, 1, high1
, 1)));
4322 /* Make range 0 be the range that starts first, or ends last if they
4323 start at the same value. Swap them if it isn't. */
4324 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4327 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4328 high1
, 1, high0
, 1))))
4330 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4331 tem
= low0
, low0
= low1
, low1
= tem
;
4332 tem
= high0
, high0
= high1
, high1
= tem
;
4335 /* Now flag two cases, whether the ranges are disjoint or whether the
4336 second range is totally subsumed in the first. Note that the tests
4337 below are simplified by the ones above. */
4338 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4339 high0
, 1, low1
, 0));
4340 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4341 high1
, 1, high0
, 1));
4343 /* We now have four cases, depending on whether we are including or
4344 excluding the two ranges. */
4347 /* If they don't overlap, the result is false. If the second range
4348 is a subset it is the result. Otherwise, the range is from the start
4349 of the second to the end of the first. */
4351 in_p
= 0, low
= high
= 0;
4353 in_p
= 1, low
= low1
, high
= high1
;
4355 in_p
= 1, low
= low1
, high
= high0
;
4358 else if (in0_p
&& ! in1_p
)
4360 /* If they don't overlap, the result is the first range. If they are
4361 equal, the result is false. If the second range is a subset of the
4362 first, and the ranges begin at the same place, we go from just after
4363 the end of the second range to the end of the first. If the second
4364 range is not a subset of the first, or if it is a subset and both
4365 ranges end at the same place, the range starts at the start of the
4366 first range and ends just before the second range.
4367 Otherwise, we can't describe this as a single range. */
4369 in_p
= 1, low
= low0
, high
= high0
;
4370 else if (lowequal
&& highequal
)
4371 in_p
= 0, low
= high
= 0;
4372 else if (subset
&& lowequal
)
4374 low
= range_successor (high1
);
4379 /* We are in the weird situation where high0 > high1 but
4380 high1 has no successor. Punt. */
4384 else if (! subset
|| highequal
)
4387 high
= range_predecessor (low1
);
4391 /* low0 < low1 but low1 has no predecessor. Punt. */
4399 else if (! in0_p
&& in1_p
)
4401 /* If they don't overlap, the result is the second range. If the second
4402 is a subset of the first, the result is false. Otherwise,
4403 the range starts just after the first range and ends at the
4404 end of the second. */
4406 in_p
= 1, low
= low1
, high
= high1
;
4407 else if (subset
|| highequal
)
4408 in_p
= 0, low
= high
= 0;
4411 low
= range_successor (high0
);
4416 /* high1 > high0 but high0 has no successor. Punt. */
4424 /* The case where we are excluding both ranges. Here the complex case
4425 is if they don't overlap. In that case, the only time we have a
4426 range is if they are adjacent. If the second is a subset of the
4427 first, the result is the first. Otherwise, the range to exclude
4428 starts at the beginning of the first range and ends at the end of the
4432 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4433 range_successor (high0
),
4435 in_p
= 0, low
= low0
, high
= high1
;
4438 /* Canonicalize - [min, x] into - [-, x]. */
4439 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4440 switch (TREE_CODE (TREE_TYPE (low0
)))
4443 if (TYPE_PRECISION (TREE_TYPE (low0
))
4444 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4448 if (tree_int_cst_equal (low0
,
4449 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4453 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4454 && integer_zerop (low0
))
4461 /* Canonicalize - [x, max] into - [x, -]. */
4462 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4463 switch (TREE_CODE (TREE_TYPE (high1
)))
4466 if (TYPE_PRECISION (TREE_TYPE (high1
))
4467 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4471 if (tree_int_cst_equal (high1
,
4472 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4476 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4477 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4479 integer_one_node
, 1)))
4486 /* The ranges might be also adjacent between the maximum and
4487 minimum values of the given type. For
4488 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4489 return + [x + 1, y - 1]. */
4490 if (low0
== 0 && high1
== 0)
4492 low
= range_successor (high0
);
4493 high
= range_predecessor (low1
);
4494 if (low
== 0 || high
== 0)
4504 in_p
= 0, low
= low0
, high
= high0
;
4506 in_p
= 0, low
= low0
, high
= high1
;
4509 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4514 /* Subroutine of fold, looking inside expressions of the form
4515 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4516 of the COND_EXPR. This function is being used also to optimize
4517 A op B ? C : A, by reversing the comparison first.
4519 Return a folded expression whose code is not a COND_EXPR
4520 anymore, or NULL_TREE if no folding opportunity is found. */
4523 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4524 tree arg0
, tree arg1
, tree arg2
)
4526 enum tree_code comp_code
= TREE_CODE (arg0
);
4527 tree arg00
= TREE_OPERAND (arg0
, 0);
4528 tree arg01
= TREE_OPERAND (arg0
, 1);
4529 tree arg1_type
= TREE_TYPE (arg1
);
4535 /* If we have A op 0 ? A : -A, consider applying the following
4538 A == 0? A : -A same as -A
4539 A != 0? A : -A same as A
4540 A >= 0? A : -A same as abs (A)
4541 A > 0? A : -A same as abs (A)
4542 A <= 0? A : -A same as -abs (A)
4543 A < 0? A : -A same as -abs (A)
4545 None of these transformations work for modes with signed
4546 zeros. If A is +/-0, the first two transformations will
4547 change the sign of the result (from +0 to -0, or vice
4548 versa). The last four will fix the sign of the result,
4549 even though the original expressions could be positive or
4550 negative, depending on the sign of A.
4552 Note that all these transformations are correct if A is
4553 NaN, since the two alternatives (A and -A) are also NaNs. */
4554 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4555 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4556 ? real_zerop (arg01
)
4557 : integer_zerop (arg01
))
4558 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4559 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4560 /* In the case that A is of the form X-Y, '-A' (arg2) may
4561 have already been folded to Y-X, check for that. */
4562 || (TREE_CODE (arg1
) == MINUS_EXPR
4563 && TREE_CODE (arg2
) == MINUS_EXPR
4564 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4565 TREE_OPERAND (arg2
, 1), 0)
4566 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4567 TREE_OPERAND (arg2
, 0), 0))))
4572 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4573 return pedantic_non_lvalue_loc (loc
,
4574 fold_convert_loc (loc
, type
,
4575 negate_expr (tem
)));
4578 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4581 if (flag_trapping_math
)
4586 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4587 arg1
= fold_convert_loc (loc
, signed_type_for
4588 (TREE_TYPE (arg1
)), arg1
);
4589 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4590 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4593 if (flag_trapping_math
)
4597 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4598 arg1
= fold_convert_loc (loc
, signed_type_for
4599 (TREE_TYPE (arg1
)), arg1
);
4600 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4601 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4603 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4607 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4608 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4609 both transformations are correct when A is NaN: A != 0
4610 is then true, and A == 0 is false. */
4612 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4613 && integer_zerop (arg01
) && integer_zerop (arg2
))
4615 if (comp_code
== NE_EXPR
)
4616 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4617 else if (comp_code
== EQ_EXPR
)
4618 return build_int_cst (type
, 0);
4621 /* Try some transformations of A op B ? A : B.
4623 A == B? A : B same as B
4624 A != B? A : B same as A
4625 A >= B? A : B same as max (A, B)
4626 A > B? A : B same as max (B, A)
4627 A <= B? A : B same as min (A, B)
4628 A < B? A : B same as min (B, A)
4630 As above, these transformations don't work in the presence
4631 of signed zeros. For example, if A and B are zeros of
4632 opposite sign, the first two transformations will change
4633 the sign of the result. In the last four, the original
4634 expressions give different results for (A=+0, B=-0) and
4635 (A=-0, B=+0), but the transformed expressions do not.
4637 The first two transformations are correct if either A or B
4638 is a NaN. In the first transformation, the condition will
4639 be false, and B will indeed be chosen. In the case of the
4640 second transformation, the condition A != B will be true,
4641 and A will be chosen.
4643 The conversions to max() and min() are not correct if B is
4644 a number and A is not. The conditions in the original
4645 expressions will be false, so all four give B. The min()
4646 and max() versions would give a NaN instead. */
4647 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4648 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4649 /* Avoid these transformations if the COND_EXPR may be used
4650 as an lvalue in the C++ front-end. PR c++/19199. */
4652 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4653 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4654 || ! maybe_lvalue_p (arg1
)
4655 || ! maybe_lvalue_p (arg2
)))
4657 tree comp_op0
= arg00
;
4658 tree comp_op1
= arg01
;
4659 tree comp_type
= TREE_TYPE (comp_op0
);
4661 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4662 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4672 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4674 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4679 /* In C++ a ?: expression can be an lvalue, so put the
4680 operand which will be used if they are equal first
4681 so that we can convert this back to the
4682 corresponding COND_EXPR. */
4683 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4685 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4686 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4687 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4688 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4689 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4690 comp_op1
, comp_op0
);
4691 return pedantic_non_lvalue_loc (loc
,
4692 fold_convert_loc (loc
, type
, tem
));
4699 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4701 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4702 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4703 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4704 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4705 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4706 comp_op1
, comp_op0
);
4707 return pedantic_non_lvalue_loc (loc
,
4708 fold_convert_loc (loc
, type
, tem
));
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4713 return pedantic_non_lvalue_loc (loc
,
4714 fold_convert_loc (loc
, type
, arg2
));
4717 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4718 return pedantic_non_lvalue_loc (loc
,
4719 fold_convert_loc (loc
, type
, arg1
));
4722 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4727 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4728 we might still be able to simplify this. For example,
4729 if C1 is one less or one more than C2, this might have started
4730 out as a MIN or MAX and been transformed by this function.
4731 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4733 if (INTEGRAL_TYPE_P (type
)
4734 && TREE_CODE (arg01
) == INTEGER_CST
4735 && TREE_CODE (arg2
) == INTEGER_CST
)
4739 if (TREE_CODE (arg1
) == INTEGER_CST
)
4741 /* We can replace A with C1 in this case. */
4742 arg1
= fold_convert_loc (loc
, type
, arg01
);
4743 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4746 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4747 MIN_EXPR, to preserve the signedness of the comparison. */
4748 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4750 && operand_equal_p (arg01
,
4751 const_binop (PLUS_EXPR
, arg2
,
4752 build_int_cst (type
, 1)),
4755 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4756 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4758 return pedantic_non_lvalue_loc (loc
,
4759 fold_convert_loc (loc
, type
, tem
));
4764 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4766 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4768 && operand_equal_p (arg01
,
4769 const_binop (MINUS_EXPR
, arg2
,
4770 build_int_cst (type
, 1)),
4773 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4774 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4776 return pedantic_non_lvalue_loc (loc
,
4777 fold_convert_loc (loc
, type
, tem
));
4782 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4783 MAX_EXPR, to preserve the signedness of the comparison. */
4784 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4786 && operand_equal_p (arg01
,
4787 const_binop (MINUS_EXPR
, arg2
,
4788 build_int_cst (type
, 1)),
4791 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4792 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4794 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4799 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4800 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4802 && operand_equal_p (arg01
,
4803 const_binop (PLUS_EXPR
, arg2
,
4804 build_int_cst (type
, 1)),
4807 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4808 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4810 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4824 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4825 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4826 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4830 /* EXP is some logical combination of boolean tests. See if we can
4831 merge it into some range test. Return the new tree if so. */
4834 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4837 int or_op
= (code
== TRUTH_ORIF_EXPR
4838 || code
== TRUTH_OR_EXPR
);
4839 int in0_p
, in1_p
, in_p
;
4840 tree low0
, low1
, low
, high0
, high1
, high
;
4841 bool strict_overflow_p
= false;
4842 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4843 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4845 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4846 "when simplifying range test");
4848 /* If this is an OR operation, invert both sides; we will invert
4849 again at the end. */
4851 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4853 /* If both expressions are the same, if we can merge the ranges, and we
4854 can build the range test, return it or it inverted. If one of the
4855 ranges is always true or always false, consider it to be the same
4856 expression as the other. */
4857 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4858 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4860 && 0 != (tem
= (build_range_check (loc
, type
,
4862 : rhs
!= 0 ? rhs
: integer_zero_node
,
4865 if (strict_overflow_p
)
4866 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4867 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4870 /* On machines where the branch cost is expensive, if this is a
4871 short-circuited branch and the underlying object on both sides
4872 is the same, make a non-short-circuit operation. */
4873 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4874 && lhs
!= 0 && rhs
!= 0
4875 && (code
== TRUTH_ANDIF_EXPR
4876 || code
== TRUTH_ORIF_EXPR
)
4877 && operand_equal_p (lhs
, rhs
, 0))
4879 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4880 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4881 which cases we can't do this. */
4882 if (simple_operand_p (lhs
))
4883 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4884 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4887 else if (!lang_hooks
.decls
.global_bindings_p ()
4888 && !CONTAINS_PLACEHOLDER_P (lhs
))
4890 tree common
= save_expr (lhs
);
4892 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4893 or_op
? ! in0_p
: in0_p
,
4895 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4896 or_op
? ! in1_p
: in1_p
,
4899 if (strict_overflow_p
)
4900 fold_overflow_warning (warnmsg
,
4901 WARN_STRICT_OVERFLOW_COMPARISON
);
4902 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4903 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4912 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4913 bit value. Arrange things so the extra bits will be set to zero if and
4914 only if C is signed-extended to its full width. If MASK is nonzero,
4915 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4918 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4920 tree type
= TREE_TYPE (c
);
4921 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4924 if (p
== modesize
|| unsignedp
)
4927 /* We work by getting just the sign bit into the low-order bit, then
4928 into the high-order bit, then sign-extend. We then XOR that value
4930 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4931 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4933 /* We must use a signed type in order to get an arithmetic right shift.
4934 However, we must also avoid introducing accidental overflows, so that
4935 a subsequent call to integer_zerop will work. Hence we must
4936 do the type conversion here. At this point, the constant is either
4937 zero or one, and the conversion to a signed type can never overflow.
4938 We could get an overflow if this conversion is done anywhere else. */
4939 if (TYPE_UNSIGNED (type
))
4940 temp
= fold_convert (signed_type_for (type
), temp
);
4942 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4943 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4945 temp
= const_binop (BIT_AND_EXPR
, temp
,
4946 fold_convert (TREE_TYPE (c
), mask
));
4947 /* If necessary, convert the type back to match the type of C. */
4948 if (TYPE_UNSIGNED (type
))
4949 temp
= fold_convert (type
, temp
);
4951 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4954 /* For an expression that has the form
4958 we can drop one of the inner expressions and simplify to
4962 LOC is the location of the resulting expression. OP is the inner
4963 logical operation; the left-hand side in the examples above, while CMPOP
4964 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4965 removing a condition that guards another, as in
4966 (A != NULL && A->...) || A == NULL
4967 which we must not transform. If RHS_ONLY is true, only eliminate the
4968 right-most operand of the inner logical operation. */
4971 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4974 tree type
= TREE_TYPE (cmpop
);
4975 enum tree_code code
= TREE_CODE (cmpop
);
4976 enum tree_code truthop_code
= TREE_CODE (op
);
4977 tree lhs
= TREE_OPERAND (op
, 0);
4978 tree rhs
= TREE_OPERAND (op
, 1);
4979 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4980 enum tree_code rhs_code
= TREE_CODE (rhs
);
4981 enum tree_code lhs_code
= TREE_CODE (lhs
);
4982 enum tree_code inv_code
;
4984 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4987 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4990 if (rhs_code
== truthop_code
)
4992 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
4993 if (newrhs
!= NULL_TREE
)
4996 rhs_code
= TREE_CODE (rhs
);
4999 if (lhs_code
== truthop_code
&& !rhs_only
)
5001 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5002 if (newlhs
!= NULL_TREE
)
5005 lhs_code
= TREE_CODE (lhs
);
5009 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5010 if (inv_code
== rhs_code
5011 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5012 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5014 if (!rhs_only
&& inv_code
== lhs_code
5015 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5016 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5018 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5019 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5024 /* Find ways of folding logical expressions of LHS and RHS:
5025 Try to merge two comparisons to the same innermost item.
5026 Look for range tests like "ch >= '0' && ch <= '9'".
5027 Look for combinations of simple terms on machines with expensive branches
5028 and evaluate the RHS unconditionally.
5030 For example, if we have p->a == 2 && p->b == 4 and we can make an
5031 object large enough to span both A and B, we can do this with a comparison
5032 against the object ANDed with the a mask.
5034 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5035 operations to do this with one comparison.
5037 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5038 function and the one above.
5040 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5041 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5043 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5046 We return the simplified tree or 0 if no optimization is possible. */
5049 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5052 /* If this is the "or" of two comparisons, we can do something if
5053 the comparisons are NE_EXPR. If this is the "and", we can do something
5054 if the comparisons are EQ_EXPR. I.e.,
5055 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5057 WANTED_CODE is this operation code. For single bit fields, we can
5058 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5059 comparison for one-bit fields. */
5061 enum tree_code wanted_code
;
5062 enum tree_code lcode
, rcode
;
5063 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5064 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5065 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5066 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5067 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5068 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5069 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5070 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5071 enum machine_mode lnmode
, rnmode
;
5072 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5073 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5074 tree l_const
, r_const
;
5075 tree lntype
, rntype
, result
;
5076 HOST_WIDE_INT first_bit
, end_bit
;
5079 /* Start by getting the comparison codes. Fail if anything is volatile.
5080 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5081 it were surrounded with a NE_EXPR. */
5083 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5086 lcode
= TREE_CODE (lhs
);
5087 rcode
= TREE_CODE (rhs
);
5089 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5091 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5092 build_int_cst (TREE_TYPE (lhs
), 0));
5096 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5098 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5099 build_int_cst (TREE_TYPE (rhs
), 0));
5103 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5104 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5107 ll_arg
= TREE_OPERAND (lhs
, 0);
5108 lr_arg
= TREE_OPERAND (lhs
, 1);
5109 rl_arg
= TREE_OPERAND (rhs
, 0);
5110 rr_arg
= TREE_OPERAND (rhs
, 1);
5112 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5113 if (simple_operand_p (ll_arg
)
5114 && simple_operand_p (lr_arg
))
5116 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5117 && operand_equal_p (lr_arg
, rr_arg
, 0))
5119 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5120 truth_type
, ll_arg
, lr_arg
);
5124 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5125 && operand_equal_p (lr_arg
, rl_arg
, 0))
5127 result
= combine_comparisons (loc
, code
, lcode
,
5128 swap_tree_comparison (rcode
),
5129 truth_type
, ll_arg
, lr_arg
);
5135 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5136 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5138 /* If the RHS can be evaluated unconditionally and its operands are
5139 simple, it wins to evaluate the RHS unconditionally on machines
5140 with expensive branches. In this case, this isn't a comparison
5141 that can be merged. */
5143 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5145 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5146 && simple_operand_p (rl_arg
)
5147 && simple_operand_p (rr_arg
))
5149 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5150 if (code
== TRUTH_OR_EXPR
5151 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5152 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5153 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5154 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5155 return build2_loc (loc
, NE_EXPR
, truth_type
,
5156 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5158 build_int_cst (TREE_TYPE (ll_arg
), 0));
5160 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5161 if (code
== TRUTH_AND_EXPR
5162 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5163 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5164 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5166 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5167 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5169 build_int_cst (TREE_TYPE (ll_arg
), 0));
5172 /* See if the comparisons can be merged. Then get all the parameters for
5175 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5176 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5180 ll_inner
= decode_field_reference (loc
, ll_arg
,
5181 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5182 &ll_unsignedp
, &volatilep
, &ll_mask
,
5184 lr_inner
= decode_field_reference (loc
, lr_arg
,
5185 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5186 &lr_unsignedp
, &volatilep
, &lr_mask
,
5188 rl_inner
= decode_field_reference (loc
, rl_arg
,
5189 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5190 &rl_unsignedp
, &volatilep
, &rl_mask
,
5192 rr_inner
= decode_field_reference (loc
, rr_arg
,
5193 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5194 &rr_unsignedp
, &volatilep
, &rr_mask
,
5197 /* It must be true that the inner operation on the lhs of each
5198 comparison must be the same if we are to be able to do anything.
5199 Then see if we have constants. If not, the same must be true for
5201 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5202 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5205 if (TREE_CODE (lr_arg
) == INTEGER_CST
5206 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5207 l_const
= lr_arg
, r_const
= rr_arg
;
5208 else if (lr_inner
== 0 || rr_inner
== 0
5209 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5212 l_const
= r_const
= 0;
5214 /* If either comparison code is not correct for our logical operation,
5215 fail. However, we can convert a one-bit comparison against zero into
5216 the opposite comparison against that bit being set in the field. */
5218 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5219 if (lcode
!= wanted_code
)
5221 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5223 /* Make the left operand unsigned, since we are only interested
5224 in the value of one bit. Otherwise we are doing the wrong
5233 /* This is analogous to the code for l_const above. */
5234 if (rcode
!= wanted_code
)
5236 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5245 /* See if we can find a mode that contains both fields being compared on
5246 the left. If we can't, fail. Otherwise, update all constants and masks
5247 to be relative to a field of that size. */
5248 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5249 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5250 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5251 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5253 if (lnmode
== VOIDmode
)
5256 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5257 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5258 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5259 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5261 if (BYTES_BIG_ENDIAN
)
5263 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5264 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5267 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5268 size_int (xll_bitpos
));
5269 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5270 size_int (xrl_bitpos
));
5274 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5275 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5276 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5277 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5278 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5281 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5283 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5288 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5289 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5290 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5291 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5292 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5295 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5297 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5301 /* If the right sides are not constant, do the same for it. Also,
5302 disallow this optimization if a size or signedness mismatch occurs
5303 between the left and right sides. */
5306 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5307 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5308 /* Make sure the two fields on the right
5309 correspond to the left without being swapped. */
5310 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5313 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5314 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5315 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5316 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5318 if (rnmode
== VOIDmode
)
5321 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5322 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5323 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5324 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5326 if (BYTES_BIG_ENDIAN
)
5328 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5329 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5332 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5334 size_int (xlr_bitpos
));
5335 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5337 size_int (xrr_bitpos
));
5339 /* Make a mask that corresponds to both fields being compared.
5340 Do this for both items being compared. If the operands are the
5341 same size and the bits being compared are in the same position
5342 then we can do this by masking both and comparing the masked
5344 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5345 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5346 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5348 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5349 ll_unsignedp
|| rl_unsignedp
);
5350 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5351 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5353 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5354 lr_unsignedp
|| rr_unsignedp
);
5355 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5356 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5358 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5361 /* There is still another way we can do something: If both pairs of
5362 fields being compared are adjacent, we may be able to make a wider
5363 field containing them both.
5365 Note that we still must mask the lhs/rhs expressions. Furthermore,
5366 the mask must be shifted to account for the shift done by
5367 make_bit_field_ref. */
5368 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5369 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5370 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5371 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5375 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5376 ll_bitsize
+ rl_bitsize
,
5377 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5378 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5379 lr_bitsize
+ rr_bitsize
,
5380 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5382 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5383 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5384 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5385 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5387 /* Convert to the smaller type before masking out unwanted bits. */
5389 if (lntype
!= rntype
)
5391 if (lnbitsize
> rnbitsize
)
5393 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5394 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5397 else if (lnbitsize
< rnbitsize
)
5399 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5400 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5405 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5406 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5408 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5409 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5411 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5417 /* Handle the case of comparisons with constants. If there is something in
5418 common between the masks, those bits of the constants must be the same.
5419 If not, the condition is always false. Test for this to avoid generating
5420 incorrect code below. */
5421 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5422 if (! integer_zerop (result
)
5423 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5424 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5426 if (wanted_code
== NE_EXPR
)
5428 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5429 return constant_boolean_node (true, truth_type
);
5433 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5434 return constant_boolean_node (false, truth_type
);
5438 /* Construct the expression we will return. First get the component
5439 reference we will make. Unless the mask is all ones the width of
5440 that field, perform the mask operation. Then compare with the
5442 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5443 ll_unsignedp
|| rl_unsignedp
);
5445 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5446 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5447 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5449 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5450 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5453 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5457 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5461 enum tree_code op_code
;
5464 int consts_equal
, consts_lt
;
5467 STRIP_SIGN_NOPS (arg0
);
5469 op_code
= TREE_CODE (arg0
);
5470 minmax_const
= TREE_OPERAND (arg0
, 1);
5471 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5472 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5473 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5474 inner
= TREE_OPERAND (arg0
, 0);
5476 /* If something does not permit us to optimize, return the original tree. */
5477 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5478 || TREE_CODE (comp_const
) != INTEGER_CST
5479 || TREE_OVERFLOW (comp_const
)
5480 || TREE_CODE (minmax_const
) != INTEGER_CST
5481 || TREE_OVERFLOW (minmax_const
))
5484 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5485 and GT_EXPR, doing the rest with recursive calls using logical
5489 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5492 = optimize_minmax_comparison (loc
,
5493 invert_tree_comparison (code
, false),
5496 return invert_truthvalue_loc (loc
, tem
);
5502 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5503 optimize_minmax_comparison
5504 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5505 optimize_minmax_comparison
5506 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5509 if (op_code
== MAX_EXPR
&& consts_equal
)
5510 /* MAX (X, 0) == 0 -> X <= 0 */
5511 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5513 else if (op_code
== MAX_EXPR
&& consts_lt
)
5514 /* MAX (X, 0) == 5 -> X == 5 */
5515 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5517 else if (op_code
== MAX_EXPR
)
5518 /* MAX (X, 0) == -1 -> false */
5519 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5521 else if (consts_equal
)
5522 /* MIN (X, 0) == 0 -> X >= 0 */
5523 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5526 /* MIN (X, 0) == 5 -> false */
5527 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5530 /* MIN (X, 0) == -1 -> X == -1 */
5531 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5534 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5535 /* MAX (X, 0) > 0 -> X > 0
5536 MAX (X, 0) > 5 -> X > 5 */
5537 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5539 else if (op_code
== MAX_EXPR
)
5540 /* MAX (X, 0) > -1 -> true */
5541 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5543 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5544 /* MIN (X, 0) > 0 -> false
5545 MIN (X, 0) > 5 -> false */
5546 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5549 /* MIN (X, 0) > -1 -> X > -1 */
5550 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5557 /* T is an integer expression that is being multiplied, divided, or taken a
5558 modulus (CODE says which and what kind of divide or modulus) by a
5559 constant C. See if we can eliminate that operation by folding it with
5560 other operations already in T. WIDE_TYPE, if non-null, is a type that
5561 should be used for the computation if wider than our type.
5563 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5564 (X * 2) + (Y * 4). We must, however, be assured that either the original
5565 expression would not overflow or that overflow is undefined for the type
5566 in the language in question.
5568 If we return a non-null expression, it is an equivalent form of the
5569 original computation, but need not be in the original type.
5571 We set *STRICT_OVERFLOW_P to true if the return values depends on
5572 signed overflow being undefined. Otherwise we do not change
5573 *STRICT_OVERFLOW_P. */
5576 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5577 bool *strict_overflow_p
)
5579 /* To avoid exponential search depth, refuse to allow recursion past
5580 three levels. Beyond that (1) it's highly unlikely that we'll find
5581 something interesting and (2) we've probably processed it before
5582 when we built the inner expression. */
5591 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5598 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5599 bool *strict_overflow_p
)
5601 tree type
= TREE_TYPE (t
);
5602 enum tree_code tcode
= TREE_CODE (t
);
5603 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5604 > GET_MODE_SIZE (TYPE_MODE (type
)))
5605 ? wide_type
: type
);
5607 int same_p
= tcode
== code
;
5608 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5609 bool sub_strict_overflow_p
;
5611 /* Don't deal with constants of zero here; they confuse the code below. */
5612 if (integer_zerop (c
))
5615 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5616 op0
= TREE_OPERAND (t
, 0);
5618 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5619 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5621 /* Note that we need not handle conditional operations here since fold
5622 already handles those cases. So just do arithmetic here. */
5626 /* For a constant, we can always simplify if we are a multiply
5627 or (for divide and modulus) if it is a multiple of our constant. */
5628 if (code
== MULT_EXPR
5629 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5630 return const_binop (code
, fold_convert (ctype
, t
),
5631 fold_convert (ctype
, c
));
5634 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5635 /* If op0 is an expression ... */
5636 if ((COMPARISON_CLASS_P (op0
)
5637 || UNARY_CLASS_P (op0
)
5638 || BINARY_CLASS_P (op0
)
5639 || VL_EXP_CLASS_P (op0
)
5640 || EXPRESSION_CLASS_P (op0
))
5641 /* ... and has wrapping overflow, and its type is smaller
5642 than ctype, then we cannot pass through as widening. */
5643 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5644 && (TYPE_PRECISION (ctype
)
5645 > TYPE_PRECISION (TREE_TYPE (op0
))))
5646 /* ... or this is a truncation (t is narrower than op0),
5647 then we cannot pass through this narrowing. */
5648 || (TYPE_PRECISION (type
)
5649 < TYPE_PRECISION (TREE_TYPE (op0
)))
5650 /* ... or signedness changes for division or modulus,
5651 then we cannot pass through this conversion. */
5652 || (code
!= MULT_EXPR
5653 && (TYPE_UNSIGNED (ctype
)
5654 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5655 /* ... or has undefined overflow while the converted to
5656 type has not, we cannot do the operation in the inner type
5657 as that would introduce undefined overflow. */
5658 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5659 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5662 /* Pass the constant down and see if we can make a simplification. If
5663 we can, replace this expression with the inner simplification for
5664 possible later conversion to our or some other type. */
5665 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5666 && TREE_CODE (t2
) == INTEGER_CST
5667 && !TREE_OVERFLOW (t2
)
5668 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5670 ? ctype
: NULL_TREE
,
5671 strict_overflow_p
))))
5676 /* If widening the type changes it from signed to unsigned, then we
5677 must avoid building ABS_EXPR itself as unsigned. */
5678 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5680 tree cstype
= (*signed_type_for
) (ctype
);
5681 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5684 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5685 return fold_convert (ctype
, t1
);
5689 /* If the constant is negative, we cannot simplify this. */
5690 if (tree_int_cst_sgn (c
) == -1)
5694 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5696 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5699 case MIN_EXPR
: case MAX_EXPR
:
5700 /* If widening the type changes the signedness, then we can't perform
5701 this optimization as that changes the result. */
5702 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5705 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5706 sub_strict_overflow_p
= false;
5707 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5708 &sub_strict_overflow_p
)) != 0
5709 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5710 &sub_strict_overflow_p
)) != 0)
5712 if (tree_int_cst_sgn (c
) < 0)
5713 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5714 if (sub_strict_overflow_p
)
5715 *strict_overflow_p
= true;
5716 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5717 fold_convert (ctype
, t2
));
5721 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5722 /* If the second operand is constant, this is a multiplication
5723 or floor division, by a power of two, so we can treat it that
5724 way unless the multiplier or divisor overflows. Signed
5725 left-shift overflow is implementation-defined rather than
5726 undefined in C90, so do not convert signed left shift into
5728 if (TREE_CODE (op1
) == INTEGER_CST
5729 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5730 /* const_binop may not detect overflow correctly,
5731 so check for it explicitly here. */
5732 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5733 && TREE_INT_CST_HIGH (op1
) == 0
5734 && 0 != (t1
= fold_convert (ctype
,
5735 const_binop (LSHIFT_EXPR
,
5738 && !TREE_OVERFLOW (t1
))
5739 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5740 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5742 fold_convert (ctype
, op0
),
5744 c
, code
, wide_type
, strict_overflow_p
);
5747 case PLUS_EXPR
: case MINUS_EXPR
:
5748 /* See if we can eliminate the operation on both sides. If we can, we
5749 can return a new PLUS or MINUS. If we can't, the only remaining
5750 cases where we can do anything are if the second operand is a
5752 sub_strict_overflow_p
= false;
5753 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5754 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5755 if (t1
!= 0 && t2
!= 0
5756 && (code
== MULT_EXPR
5757 /* If not multiplication, we can only do this if both operands
5758 are divisible by c. */
5759 || (multiple_of_p (ctype
, op0
, c
)
5760 && multiple_of_p (ctype
, op1
, c
))))
5762 if (sub_strict_overflow_p
)
5763 *strict_overflow_p
= true;
5764 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5765 fold_convert (ctype
, t2
));
5768 /* If this was a subtraction, negate OP1 and set it to be an addition.
5769 This simplifies the logic below. */
5770 if (tcode
== MINUS_EXPR
)
5772 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5773 /* If OP1 was not easily negatable, the constant may be OP0. */
5774 if (TREE_CODE (op0
) == INTEGER_CST
)
5785 if (TREE_CODE (op1
) != INTEGER_CST
)
5788 /* If either OP1 or C are negative, this optimization is not safe for
5789 some of the division and remainder types while for others we need
5790 to change the code. */
5791 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5793 if (code
== CEIL_DIV_EXPR
)
5794 code
= FLOOR_DIV_EXPR
;
5795 else if (code
== FLOOR_DIV_EXPR
)
5796 code
= CEIL_DIV_EXPR
;
5797 else if (code
!= MULT_EXPR
5798 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5802 /* If it's a multiply or a division/modulus operation of a multiple
5803 of our constant, do the operation and verify it doesn't overflow. */
5804 if (code
== MULT_EXPR
5805 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5807 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5808 fold_convert (ctype
, c
));
5809 /* We allow the constant to overflow with wrapping semantics. */
5811 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5817 /* If we have an unsigned type, we cannot widen the operation since it
5818 will change the result if the original computation overflowed. */
5819 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5822 /* If we were able to eliminate our operation from the first side,
5823 apply our operation to the second side and reform the PLUS. */
5824 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5825 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5827 /* The last case is if we are a multiply. In that case, we can
5828 apply the distributive law to commute the multiply and addition
5829 if the multiplication of the constants doesn't overflow. */
5830 if (code
== MULT_EXPR
)
5831 return fold_build2 (tcode
, ctype
,
5832 fold_build2 (code
, ctype
,
5833 fold_convert (ctype
, op0
),
5834 fold_convert (ctype
, c
)),
5840 /* We have a special case here if we are doing something like
5841 (C * 8) % 4 since we know that's zero. */
5842 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5843 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5844 /* If the multiplication can overflow we cannot optimize this. */
5845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5846 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5847 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5849 *strict_overflow_p
= true;
5850 return omit_one_operand (type
, integer_zero_node
, op0
);
5853 /* ... fall through ... */
5855 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5856 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5857 /* If we can extract our operation from the LHS, do so and return a
5858 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5859 do something only if the second operand is a constant. */
5861 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5862 strict_overflow_p
)) != 0)
5863 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5864 fold_convert (ctype
, op1
));
5865 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5866 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5867 strict_overflow_p
)) != 0)
5868 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5869 fold_convert (ctype
, t1
));
5870 else if (TREE_CODE (op1
) != INTEGER_CST
)
5873 /* If these are the same operation types, we can associate them
5874 assuming no overflow. */
5879 unsigned prec
= TYPE_PRECISION (ctype
);
5880 bool uns
= TYPE_UNSIGNED (ctype
);
5881 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
5882 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
5883 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
5884 overflow_p
= ((!uns
&& overflow_p
)
5885 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5886 if (!double_int_fits_to_tree_p (ctype
, mul
)
5887 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
5890 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5891 double_int_to_tree (ctype
, mul
));
5894 /* If these operations "cancel" each other, we have the main
5895 optimizations of this pass, which occur when either constant is a
5896 multiple of the other, in which case we replace this with either an
5897 operation or CODE or TCODE.
5899 If we have an unsigned type, we cannot do this since it will change
5900 the result if the original computation overflowed. */
5901 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5902 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5903 || (tcode
== MULT_EXPR
5904 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5905 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5906 && code
!= MULT_EXPR
)))
5908 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5910 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5911 *strict_overflow_p
= true;
5912 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5913 fold_convert (ctype
,
5914 const_binop (TRUNC_DIV_EXPR
,
5917 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5919 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5920 *strict_overflow_p
= true;
5921 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5922 fold_convert (ctype
,
5923 const_binop (TRUNC_DIV_EXPR
,
5936 /* Return a node which has the indicated constant VALUE (either 0 or
5937 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5938 and is of the indicated TYPE. */
5941 constant_boolean_node (bool value
, tree type
)
5943 if (type
== integer_type_node
)
5944 return value
? integer_one_node
: integer_zero_node
;
5945 else if (type
== boolean_type_node
)
5946 return value
? boolean_true_node
: boolean_false_node
;
5947 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5948 return build_vector_from_val (type
,
5949 build_int_cst (TREE_TYPE (type
),
5952 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5956 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5957 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5958 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5959 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5960 COND is the first argument to CODE; otherwise (as in the example
5961 given here), it is the second argument. TYPE is the type of the
5962 original expression. Return NULL_TREE if no simplification is
5966 fold_binary_op_with_conditional_arg (location_t loc
,
5967 enum tree_code code
,
5968 tree type
, tree op0
, tree op1
,
5969 tree cond
, tree arg
, int cond_first_p
)
5971 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5972 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5973 tree test
, true_value
, false_value
;
5974 tree lhs
= NULL_TREE
;
5975 tree rhs
= NULL_TREE
;
5976 enum tree_code cond_code
= COND_EXPR
;
5978 if (TREE_CODE (cond
) == COND_EXPR
5979 || TREE_CODE (cond
) == VEC_COND_EXPR
)
5981 test
= TREE_OPERAND (cond
, 0);
5982 true_value
= TREE_OPERAND (cond
, 1);
5983 false_value
= TREE_OPERAND (cond
, 2);
5984 /* If this operand throws an expression, then it does not make
5985 sense to try to perform a logical or arithmetic operation
5987 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5989 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5994 tree testtype
= TREE_TYPE (cond
);
5996 true_value
= constant_boolean_node (true, testtype
);
5997 false_value
= constant_boolean_node (false, testtype
);
6000 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6001 cond_code
= VEC_COND_EXPR
;
6003 /* This transformation is only worthwhile if we don't have to wrap ARG
6004 in a SAVE_EXPR and the operation can be simplified without recursing
6005 on at least one of the branches once its pushed inside the COND_EXPR. */
6006 if (!TREE_CONSTANT (arg
)
6007 && (TREE_SIDE_EFFECTS (arg
)
6008 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6009 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6012 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6015 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6017 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6019 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6023 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6025 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6027 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6030 /* Check that we have simplified at least one of the branches. */
6031 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6034 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6038 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6040 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6041 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6042 ADDEND is the same as X.
6044 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6045 and finite. The problematic cases are when X is zero, and its mode
6046 has signed zeros. In the case of rounding towards -infinity,
6047 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6048 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6051 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6053 if (!real_zerop (addend
))
6056 /* Don't allow the fold with -fsignaling-nans. */
6057 if (HONOR_SNANS (TYPE_MODE (type
)))
6060 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6061 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6064 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6065 if (TREE_CODE (addend
) == REAL_CST
6066 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6069 /* The mode has signed zeros, and we have to honor their sign.
6070 In this situation, there is only one case we can return true for.
6071 X - 0 is the same as X unless rounding towards -infinity is
6073 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6076 /* Subroutine of fold() that checks comparisons of built-in math
6077 functions against real constants.
6079 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6080 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6081 is the type of the result and ARG0 and ARG1 are the operands of the
6082 comparison. ARG1 must be a TREE_REAL_CST.
6084 The function returns the constant folded tree if a simplification
6085 can be made, and NULL_TREE otherwise. */
6088 fold_mathfn_compare (location_t loc
,
6089 enum built_in_function fcode
, enum tree_code code
,
6090 tree type
, tree arg0
, tree arg1
)
6094 if (BUILTIN_SQRT_P (fcode
))
6096 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6097 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6099 c
= TREE_REAL_CST (arg1
);
6100 if (REAL_VALUE_NEGATIVE (c
))
6102 /* sqrt(x) < y is always false, if y is negative. */
6103 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6104 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6106 /* sqrt(x) > y is always true, if y is negative and we
6107 don't care about NaNs, i.e. negative values of x. */
6108 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6109 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6111 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6112 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6113 build_real (TREE_TYPE (arg
), dconst0
));
6115 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6119 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6120 real_convert (&c2
, mode
, &c2
);
6122 if (REAL_VALUE_ISINF (c2
))
6124 /* sqrt(x) > y is x == +Inf, when y is very large. */
6125 if (HONOR_INFINITIES (mode
))
6126 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6127 build_real (TREE_TYPE (arg
), c2
));
6129 /* sqrt(x) > y is always false, when y is very large
6130 and we don't care about infinities. */
6131 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6134 /* sqrt(x) > c is the same as x > c*c. */
6135 return fold_build2_loc (loc
, code
, type
, arg
,
6136 build_real (TREE_TYPE (arg
), c2
));
6138 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6142 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6143 real_convert (&c2
, mode
, &c2
);
6145 if (REAL_VALUE_ISINF (c2
))
6147 /* sqrt(x) < y is always true, when y is a very large
6148 value and we don't care about NaNs or Infinities. */
6149 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6150 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6152 /* sqrt(x) < y is x != +Inf when y is very large and we
6153 don't care about NaNs. */
6154 if (! HONOR_NANS (mode
))
6155 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6156 build_real (TREE_TYPE (arg
), c2
));
6158 /* sqrt(x) < y is x >= 0 when y is very large and we
6159 don't care about Infinities. */
6160 if (! HONOR_INFINITIES (mode
))
6161 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6162 build_real (TREE_TYPE (arg
), dconst0
));
6164 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6165 arg
= save_expr (arg
);
6166 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6167 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6168 build_real (TREE_TYPE (arg
),
6170 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6171 build_real (TREE_TYPE (arg
),
6175 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6176 if (! HONOR_NANS (mode
))
6177 return fold_build2_loc (loc
, code
, type
, arg
,
6178 build_real (TREE_TYPE (arg
), c2
));
6180 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6181 arg
= save_expr (arg
);
6182 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6183 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6184 build_real (TREE_TYPE (arg
),
6186 fold_build2_loc (loc
, code
, type
, arg
,
6187 build_real (TREE_TYPE (arg
),
6195 /* Subroutine of fold() that optimizes comparisons against Infinities,
6196 either +Inf or -Inf.
6198 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6199 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6200 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6202 The function returns the constant folded tree if a simplification
6203 can be made, and NULL_TREE otherwise. */
6206 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6207 tree arg0
, tree arg1
)
6209 enum machine_mode mode
;
6210 REAL_VALUE_TYPE max
;
6214 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6216 /* For negative infinity swap the sense of the comparison. */
6217 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6219 code
= swap_tree_comparison (code
);
6224 /* x > +Inf is always false, if with ignore sNANs. */
6225 if (HONOR_SNANS (mode
))
6227 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6230 /* x <= +Inf is always true, if we don't case about NaNs. */
6231 if (! HONOR_NANS (mode
))
6232 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6234 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6235 arg0
= save_expr (arg0
);
6236 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6240 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6241 real_maxval (&max
, neg
, mode
);
6242 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6243 arg0
, build_real (TREE_TYPE (arg0
), max
));
6246 /* x < +Inf is always equal to x <= DBL_MAX. */
6247 real_maxval (&max
, neg
, mode
);
6248 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6249 arg0
, build_real (TREE_TYPE (arg0
), max
));
6252 /* x != +Inf is always equal to !(x > DBL_MAX). */
6253 real_maxval (&max
, neg
, mode
);
6254 if (! HONOR_NANS (mode
))
6255 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6256 arg0
, build_real (TREE_TYPE (arg0
), max
));
6258 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6259 arg0
, build_real (TREE_TYPE (arg0
), max
));
6260 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6269 /* Subroutine of fold() that optimizes comparisons of a division by
6270 a nonzero integer constant against an integer constant, i.e.
6273 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6274 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6275 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6277 The function returns the constant folded tree if a simplification
6278 can be made, and NULL_TREE otherwise. */
6281 fold_div_compare (location_t loc
,
6282 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6284 tree prod
, tmp
, hi
, lo
;
6285 tree arg00
= TREE_OPERAND (arg0
, 0);
6286 tree arg01
= TREE_OPERAND (arg0
, 1);
6288 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6292 /* We have to do this the hard way to detect unsigned overflow.
6293 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6294 val
= TREE_INT_CST (arg01
)
6295 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6296 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6297 neg_overflow
= false;
6301 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6302 build_int_cst (TREE_TYPE (arg01
), 1));
6305 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6306 val
= TREE_INT_CST (prod
)
6307 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6308 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6309 -1, overflow
| TREE_OVERFLOW (prod
));
6311 else if (tree_int_cst_sgn (arg01
) >= 0)
6313 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6314 build_int_cst (TREE_TYPE (arg01
), 1));
6315 switch (tree_int_cst_sgn (arg1
))
6318 neg_overflow
= true;
6319 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6324 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6329 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6339 /* A negative divisor reverses the relational operators. */
6340 code
= swap_tree_comparison (code
);
6342 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6343 build_int_cst (TREE_TYPE (arg01
), 1));
6344 switch (tree_int_cst_sgn (arg1
))
6347 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6352 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6357 neg_overflow
= true;
6358 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6370 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6371 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6372 if (TREE_OVERFLOW (hi
))
6373 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6374 if (TREE_OVERFLOW (lo
))
6375 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6376 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6379 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6380 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6381 if (TREE_OVERFLOW (hi
))
6382 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6383 if (TREE_OVERFLOW (lo
))
6384 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6385 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6388 if (TREE_OVERFLOW (lo
))
6390 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6391 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6393 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6396 if (TREE_OVERFLOW (hi
))
6398 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6399 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6401 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6404 if (TREE_OVERFLOW (hi
))
6406 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6407 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6409 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6412 if (TREE_OVERFLOW (lo
))
6414 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6415 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6417 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6427 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6428 equality/inequality test, then return a simplified form of the test
6429 using a sign testing. Otherwise return NULL. TYPE is the desired
6433 fold_single_bit_test_into_sign_test (location_t loc
,
6434 enum tree_code code
, tree arg0
, tree arg1
,
6437 /* If this is testing a single bit, we can optimize the test. */
6438 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6439 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6440 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6442 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6443 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6444 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6446 if (arg00
!= NULL_TREE
6447 /* This is only a win if casting to a signed type is cheap,
6448 i.e. when arg00's type is not a partial mode. */
6449 && TYPE_PRECISION (TREE_TYPE (arg00
))
6450 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6452 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6453 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6455 fold_convert_loc (loc
, stype
, arg00
),
6456 build_int_cst (stype
, 0));
6463 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6464 equality/inequality test, then return a simplified form of
6465 the test using shifts and logical operations. Otherwise return
6466 NULL. TYPE is the desired result type. */
6469 fold_single_bit_test (location_t loc
, enum tree_code code
,
6470 tree arg0
, tree arg1
, tree result_type
)
6472 /* If this is testing a single bit, we can optimize the test. */
6473 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6474 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6475 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6477 tree inner
= TREE_OPERAND (arg0
, 0);
6478 tree type
= TREE_TYPE (arg0
);
6479 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6480 enum machine_mode operand_mode
= TYPE_MODE (type
);
6482 tree signed_type
, unsigned_type
, intermediate_type
;
6485 /* First, see if we can fold the single bit test into a sign-bit
6487 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6492 /* Otherwise we have (A & C) != 0 where C is a single bit,
6493 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6494 Similarly for (A & C) == 0. */
6496 /* If INNER is a right shift of a constant and it plus BITNUM does
6497 not overflow, adjust BITNUM and INNER. */
6498 if (TREE_CODE (inner
) == RSHIFT_EXPR
6499 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6500 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6501 && bitnum
< TYPE_PRECISION (type
)
6502 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6503 bitnum
- TYPE_PRECISION (type
)))
6505 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6506 inner
= TREE_OPERAND (inner
, 0);
6509 /* If we are going to be able to omit the AND below, we must do our
6510 operations as unsigned. If we must use the AND, we have a choice.
6511 Normally unsigned is faster, but for some machines signed is. */
6512 #ifdef LOAD_EXTEND_OP
6513 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6514 && !flag_syntax_only
) ? 0 : 1;
6519 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6520 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6521 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6522 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6525 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6526 inner
, size_int (bitnum
));
6528 one
= build_int_cst (intermediate_type
, 1);
6530 if (code
== EQ_EXPR
)
6531 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6533 /* Put the AND last so it can combine with more things. */
6534 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6536 /* Make sure to return the proper type. */
6537 inner
= fold_convert_loc (loc
, result_type
, inner
);
6544 /* Check whether we are allowed to reorder operands arg0 and arg1,
6545 such that the evaluation of arg1 occurs before arg0. */
6548 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6550 if (! flag_evaluation_order
)
6552 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6554 return ! TREE_SIDE_EFFECTS (arg0
)
6555 && ! TREE_SIDE_EFFECTS (arg1
);
6558 /* Test whether it is preferable two swap two operands, ARG0 and
6559 ARG1, for example because ARG0 is an integer constant and ARG1
6560 isn't. If REORDER is true, only recommend swapping if we can
6561 evaluate the operands in reverse order. */
6564 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6566 STRIP_SIGN_NOPS (arg0
);
6567 STRIP_SIGN_NOPS (arg1
);
6569 if (TREE_CODE (arg1
) == INTEGER_CST
)
6571 if (TREE_CODE (arg0
) == INTEGER_CST
)
6574 if (TREE_CODE (arg1
) == REAL_CST
)
6576 if (TREE_CODE (arg0
) == REAL_CST
)
6579 if (TREE_CODE (arg1
) == FIXED_CST
)
6581 if (TREE_CODE (arg0
) == FIXED_CST
)
6584 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6586 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6589 if (TREE_CONSTANT (arg1
))
6591 if (TREE_CONSTANT (arg0
))
6594 if (optimize_function_for_size_p (cfun
))
6597 if (reorder
&& flag_evaluation_order
6598 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6601 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6602 for commutative and comparison operators. Ensuring a canonical
6603 form allows the optimizers to find additional redundancies without
6604 having to explicitly check for both orderings. */
6605 if (TREE_CODE (arg0
) == SSA_NAME
6606 && TREE_CODE (arg1
) == SSA_NAME
6607 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6610 /* Put SSA_NAMEs last. */
6611 if (TREE_CODE (arg1
) == SSA_NAME
)
6613 if (TREE_CODE (arg0
) == SSA_NAME
)
6616 /* Put variables last. */
6625 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6626 ARG0 is extended to a wider type. */
6629 fold_widened_comparison (location_t loc
, enum tree_code code
,
6630 tree type
, tree arg0
, tree arg1
)
6632 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6634 tree shorter_type
, outer_type
;
6638 if (arg0_unw
== arg0
)
6640 shorter_type
= TREE_TYPE (arg0_unw
);
6642 #ifdef HAVE_canonicalize_funcptr_for_compare
6643 /* Disable this optimization if we're casting a function pointer
6644 type on targets that require function pointer canonicalization. */
6645 if (HAVE_canonicalize_funcptr_for_compare
6646 && TREE_CODE (shorter_type
) == POINTER_TYPE
6647 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6651 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6654 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6656 /* If possible, express the comparison in the shorter mode. */
6657 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6658 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6659 && (TREE_TYPE (arg1_unw
) == shorter_type
6660 || ((TYPE_PRECISION (shorter_type
)
6661 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6662 && (TYPE_UNSIGNED (shorter_type
)
6663 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6664 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6665 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6666 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6667 && int_fits_type_p (arg1_unw
, shorter_type
))))
6668 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6669 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6671 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6672 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6673 || !int_fits_type_p (arg1_unw
, shorter_type
))
6676 /* If we are comparing with the integer that does not fit into the range
6677 of the shorter type, the result is known. */
6678 outer_type
= TREE_TYPE (arg1_unw
);
6679 min
= lower_bound_in_type (outer_type
, shorter_type
);
6680 max
= upper_bound_in_type (outer_type
, shorter_type
);
6682 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6684 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6691 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6696 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6702 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6704 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6709 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6711 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6720 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6721 ARG0 just the signedness is changed. */
6724 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6725 tree arg0
, tree arg1
)
6728 tree inner_type
, outer_type
;
6730 if (!CONVERT_EXPR_P (arg0
))
6733 outer_type
= TREE_TYPE (arg0
);
6734 arg0_inner
= TREE_OPERAND (arg0
, 0);
6735 inner_type
= TREE_TYPE (arg0_inner
);
6737 #ifdef HAVE_canonicalize_funcptr_for_compare
6738 /* Disable this optimization if we're casting a function pointer
6739 type on targets that require function pointer canonicalization. */
6740 if (HAVE_canonicalize_funcptr_for_compare
6741 && TREE_CODE (inner_type
) == POINTER_TYPE
6742 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6746 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6749 if (TREE_CODE (arg1
) != INTEGER_CST
6750 && !(CONVERT_EXPR_P (arg1
)
6751 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6754 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6759 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6762 if (TREE_CODE (arg1
) == INTEGER_CST
)
6763 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6764 0, TREE_OVERFLOW (arg1
));
6766 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6768 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6771 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6772 step of the array. Reconstructs s and delta in the case of s *
6773 delta being an integer constant (and thus already folded). ADDR is
6774 the address. MULT is the multiplicative expression. If the
6775 function succeeds, the new address expression is returned.
6776 Otherwise NULL_TREE is returned. LOC is the location of the
6777 resulting expression. */
6780 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6782 tree s
, delta
, step
;
6783 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6788 /* Strip the nops that might be added when converting op1 to sizetype. */
6791 /* Canonicalize op1 into a possibly non-constant delta
6792 and an INTEGER_CST s. */
6793 if (TREE_CODE (op1
) == MULT_EXPR
)
6795 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6800 if (TREE_CODE (arg0
) == INTEGER_CST
)
6805 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6813 else if (TREE_CODE (op1
) == INTEGER_CST
)
6820 /* Simulate we are delta * 1. */
6822 s
= integer_one_node
;
6825 /* Handle &x.array the same as we would handle &x.array[0]. */
6826 if (TREE_CODE (ref
) == COMPONENT_REF
6827 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6831 /* Remember if this was a multi-dimensional array. */
6832 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6835 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6838 itype
= TREE_TYPE (domain
);
6840 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6841 if (TREE_CODE (step
) != INTEGER_CST
)
6846 if (! tree_int_cst_equal (step
, s
))
6851 /* Try if delta is a multiple of step. */
6852 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6858 /* Only fold here if we can verify we do not overflow one
6859 dimension of a multi-dimensional array. */
6864 if (!TYPE_MIN_VALUE (domain
)
6865 || !TYPE_MAX_VALUE (domain
)
6866 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6869 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6870 fold_convert_loc (loc
, itype
,
6871 TYPE_MIN_VALUE (domain
)),
6872 fold_convert_loc (loc
, itype
, delta
));
6873 if (TREE_CODE (tmp
) != INTEGER_CST
6874 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6878 /* We found a suitable component reference. */
6880 pref
= TREE_OPERAND (addr
, 0);
6881 ret
= copy_node (pref
);
6882 SET_EXPR_LOCATION (ret
, loc
);
6884 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6886 (loc
, PLUS_EXPR
, itype
,
6887 fold_convert_loc (loc
, itype
,
6889 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6890 fold_convert_loc (loc
, itype
, delta
)),
6891 NULL_TREE
, NULL_TREE
);
6892 return build_fold_addr_expr_loc (loc
, ret
);
6897 for (;; ref
= TREE_OPERAND (ref
, 0))
6899 if (TREE_CODE (ref
) == ARRAY_REF
)
6903 /* Remember if this was a multi-dimensional array. */
6904 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6907 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6910 itype
= TREE_TYPE (domain
);
6912 step
= array_ref_element_size (ref
);
6913 if (TREE_CODE (step
) != INTEGER_CST
)
6918 if (! tree_int_cst_equal (step
, s
))
6923 /* Try if delta is a multiple of step. */
6924 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6930 /* Only fold here if we can verify we do not overflow one
6931 dimension of a multi-dimensional array. */
6936 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6937 || !TYPE_MAX_VALUE (domain
)
6938 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6941 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6942 fold_convert_loc (loc
, itype
,
6943 TREE_OPERAND (ref
, 1)),
6944 fold_convert_loc (loc
, itype
, delta
));
6946 || TREE_CODE (tmp
) != INTEGER_CST
6947 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6956 if (!handled_component_p (ref
))
6960 /* We found the suitable array reference. So copy everything up to it,
6961 and replace the index. */
6963 pref
= TREE_OPERAND (addr
, 0);
6964 ret
= copy_node (pref
);
6965 SET_EXPR_LOCATION (ret
, loc
);
6970 pref
= TREE_OPERAND (pref
, 0);
6971 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6972 pos
= TREE_OPERAND (pos
, 0);
6975 TREE_OPERAND (pos
, 1)
6976 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6977 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6978 fold_convert_loc (loc
, itype
, delta
));
6979 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6983 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6984 means A >= Y && A != MAX, but in this case we know that
6985 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6988 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6990 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6992 if (TREE_CODE (bound
) == LT_EXPR
)
6993 a
= TREE_OPERAND (bound
, 0);
6994 else if (TREE_CODE (bound
) == GT_EXPR
)
6995 a
= TREE_OPERAND (bound
, 1);
6999 typea
= TREE_TYPE (a
);
7000 if (!INTEGRAL_TYPE_P (typea
)
7001 && !POINTER_TYPE_P (typea
))
7004 if (TREE_CODE (ineq
) == LT_EXPR
)
7006 a1
= TREE_OPERAND (ineq
, 1);
7007 y
= TREE_OPERAND (ineq
, 0);
7009 else if (TREE_CODE (ineq
) == GT_EXPR
)
7011 a1
= TREE_OPERAND (ineq
, 0);
7012 y
= TREE_OPERAND (ineq
, 1);
7017 if (TREE_TYPE (a1
) != typea
)
7020 if (POINTER_TYPE_P (typea
))
7022 /* Convert the pointer types into integer before taking the difference. */
7023 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7024 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7025 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7028 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7030 if (!diff
|| !integer_onep (diff
))
7033 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7036 /* Fold a sum or difference of at least one multiplication.
7037 Returns the folded tree or NULL if no simplification could be made. */
7040 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7041 tree arg0
, tree arg1
)
7043 tree arg00
, arg01
, arg10
, arg11
;
7044 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7046 /* (A * C) +- (B * C) -> (A+-B) * C.
7047 (A * C) +- A -> A * (C+-1).
7048 We are most concerned about the case where C is a constant,
7049 but other combinations show up during loop reduction. Since
7050 it is not difficult, try all four possibilities. */
7052 if (TREE_CODE (arg0
) == MULT_EXPR
)
7054 arg00
= TREE_OPERAND (arg0
, 0);
7055 arg01
= TREE_OPERAND (arg0
, 1);
7057 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7059 arg00
= build_one_cst (type
);
7064 /* We cannot generate constant 1 for fract. */
7065 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7068 arg01
= build_one_cst (type
);
7070 if (TREE_CODE (arg1
) == MULT_EXPR
)
7072 arg10
= TREE_OPERAND (arg1
, 0);
7073 arg11
= TREE_OPERAND (arg1
, 1);
7075 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7077 arg10
= build_one_cst (type
);
7078 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7079 the purpose of this canonicalization. */
7080 if (TREE_INT_CST_HIGH (arg1
) == -1
7081 && negate_expr_p (arg1
)
7082 && code
== PLUS_EXPR
)
7084 arg11
= negate_expr (arg1
);
7092 /* We cannot generate constant 1 for fract. */
7093 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7096 arg11
= build_one_cst (type
);
7100 if (operand_equal_p (arg01
, arg11
, 0))
7101 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7102 else if (operand_equal_p (arg00
, arg10
, 0))
7103 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7104 else if (operand_equal_p (arg00
, arg11
, 0))
7105 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7106 else if (operand_equal_p (arg01
, arg10
, 0))
7107 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7109 /* No identical multiplicands; see if we can find a common
7110 power-of-two factor in non-power-of-two multiplies. This
7111 can help in multi-dimensional array access. */
7112 else if (host_integerp (arg01
, 0)
7113 && host_integerp (arg11
, 0))
7115 HOST_WIDE_INT int01
, int11
, tmp
;
7118 int01
= TREE_INT_CST_LOW (arg01
);
7119 int11
= TREE_INT_CST_LOW (arg11
);
7121 /* Move min of absolute values to int11. */
7122 if (absu_hwi (int01
) < absu_hwi (int11
))
7124 tmp
= int01
, int01
= int11
, int11
= tmp
;
7125 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7132 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7133 /* The remainder should not be a constant, otherwise we
7134 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7135 increased the number of multiplications necessary. */
7136 && TREE_CODE (arg10
) != INTEGER_CST
)
7138 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7139 build_int_cst (TREE_TYPE (arg00
),
7144 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7149 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7150 fold_build2_loc (loc
, code
, type
,
7151 fold_convert_loc (loc
, type
, alt0
),
7152 fold_convert_loc (loc
, type
, alt1
)),
7153 fold_convert_loc (loc
, type
, same
));
7158 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7159 specified by EXPR into the buffer PTR of length LEN bytes.
7160 Return the number of bytes placed in the buffer, or zero
7164 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7166 tree type
= TREE_TYPE (expr
);
7167 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7168 int byte
, offset
, word
, words
;
7169 unsigned char value
;
7171 if (total_bytes
> len
)
7173 words
= total_bytes
/ UNITS_PER_WORD
;
7175 for (byte
= 0; byte
< total_bytes
; byte
++)
7177 int bitpos
= byte
* BITS_PER_UNIT
;
7178 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7179 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7181 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7182 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7184 if (total_bytes
> UNITS_PER_WORD
)
7186 word
= byte
/ UNITS_PER_WORD
;
7187 if (WORDS_BIG_ENDIAN
)
7188 word
= (words
- 1) - word
;
7189 offset
= word
* UNITS_PER_WORD
;
7190 if (BYTES_BIG_ENDIAN
)
7191 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7193 offset
+= byte
% UNITS_PER_WORD
;
7196 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7197 ptr
[offset
] = value
;
7203 /* Subroutine of native_encode_expr. Encode the REAL_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7209 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7211 tree type
= TREE_TYPE (expr
);
7212 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7213 int byte
, offset
, word
, words
, bitpos
;
7214 unsigned char value
;
7216 /* There are always 32 bits in each long, no matter the size of
7217 the hosts long. We handle floating point representations with
7221 if (total_bytes
> len
)
7223 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7225 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7227 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7228 bitpos
+= BITS_PER_UNIT
)
7230 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7231 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7233 if (UNITS_PER_WORD
< 4)
7235 word
= byte
/ UNITS_PER_WORD
;
7236 if (WORDS_BIG_ENDIAN
)
7237 word
= (words
- 1) - word
;
7238 offset
= word
* UNITS_PER_WORD
;
7239 if (BYTES_BIG_ENDIAN
)
7240 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7242 offset
+= byte
% UNITS_PER_WORD
;
7245 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7246 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7251 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7252 specified by EXPR into the buffer PTR of length LEN bytes.
7253 Return the number of bytes placed in the buffer, or zero
7257 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7262 part
= TREE_REALPART (expr
);
7263 rsize
= native_encode_expr (part
, ptr
, len
);
7266 part
= TREE_IMAGPART (expr
);
7267 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7270 return rsize
+ isize
;
7274 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7275 specified by EXPR into the buffer PTR of length LEN bytes.
7276 Return the number of bytes placed in the buffer, or zero
7280 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7287 count
= VECTOR_CST_NELTS (expr
);
7288 itype
= TREE_TYPE (TREE_TYPE (expr
));
7289 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7290 for (i
= 0; i
< count
; i
++)
7292 elem
= VECTOR_CST_ELT (expr
, i
);
7293 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7301 /* Subroutine of native_encode_expr. Encode the STRING_CST
7302 specified by EXPR into the buffer PTR of length LEN bytes.
7303 Return the number of bytes placed in the buffer, or zero
7307 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7309 tree type
= TREE_TYPE (expr
);
7310 HOST_WIDE_INT total_bytes
;
7312 if (TREE_CODE (type
) != ARRAY_TYPE
7313 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7314 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7315 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7317 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7318 if (total_bytes
> len
)
7320 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7322 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7323 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7324 total_bytes
- TREE_STRING_LENGTH (expr
));
7327 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7332 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7333 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7334 buffer PTR of length LEN bytes. Return the number of bytes
7335 placed in the buffer, or zero upon failure. */
7338 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7340 switch (TREE_CODE (expr
))
7343 return native_encode_int (expr
, ptr
, len
);
7346 return native_encode_real (expr
, ptr
, len
);
7349 return native_encode_complex (expr
, ptr
, len
);
7352 return native_encode_vector (expr
, ptr
, len
);
7355 return native_encode_string (expr
, ptr
, len
);
7363 /* Subroutine of native_interpret_expr. Interpret the contents of
7364 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7365 If the buffer cannot be interpreted, return NULL_TREE. */
7368 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7370 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7371 int byte
, offset
, word
, words
;
7372 unsigned char value
;
7375 if (total_bytes
> len
)
7377 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7380 result
= double_int_zero
;
7381 words
= total_bytes
/ UNITS_PER_WORD
;
7383 for (byte
= 0; byte
< total_bytes
; byte
++)
7385 int bitpos
= byte
* BITS_PER_UNIT
;
7386 if (total_bytes
> UNITS_PER_WORD
)
7388 word
= byte
/ UNITS_PER_WORD
;
7389 if (WORDS_BIG_ENDIAN
)
7390 word
= (words
- 1) - word
;
7391 offset
= word
* UNITS_PER_WORD
;
7392 if (BYTES_BIG_ENDIAN
)
7393 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7395 offset
+= byte
% UNITS_PER_WORD
;
7398 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7399 value
= ptr
[offset
];
7401 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7402 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7404 result
.high
|= (unsigned HOST_WIDE_INT
) value
7405 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7408 return double_int_to_tree (type
, result
);
7412 /* Subroutine of native_interpret_expr. Interpret the contents of
7413 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7414 If the buffer cannot be interpreted, return NULL_TREE. */
7417 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7419 enum machine_mode mode
= TYPE_MODE (type
);
7420 int total_bytes
= GET_MODE_SIZE (mode
);
7421 int byte
, offset
, word
, words
, bitpos
;
7422 unsigned char value
;
7423 /* There are always 32 bits in each long, no matter the size of
7424 the hosts long. We handle floating point representations with
7429 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7430 if (total_bytes
> len
|| total_bytes
> 24)
7432 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7434 memset (tmp
, 0, sizeof (tmp
));
7435 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7436 bitpos
+= BITS_PER_UNIT
)
7438 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7439 if (UNITS_PER_WORD
< 4)
7441 word
= byte
/ UNITS_PER_WORD
;
7442 if (WORDS_BIG_ENDIAN
)
7443 word
= (words
- 1) - word
;
7444 offset
= word
* UNITS_PER_WORD
;
7445 if (BYTES_BIG_ENDIAN
)
7446 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7448 offset
+= byte
% UNITS_PER_WORD
;
7451 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7452 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7454 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7457 real_from_target (&r
, tmp
, mode
);
7458 return build_real (type
, r
);
7462 /* Subroutine of native_interpret_expr. Interpret the contents of
7463 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7464 If the buffer cannot be interpreted, return NULL_TREE. */
7467 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7469 tree etype
, rpart
, ipart
;
7472 etype
= TREE_TYPE (type
);
7473 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7476 rpart
= native_interpret_expr (etype
, ptr
, size
);
7479 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7482 return build_complex (type
, rpart
, ipart
);
7486 /* Subroutine of native_interpret_expr. Interpret the contents of
7487 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7488 If the buffer cannot be interpreted, return NULL_TREE. */
7491 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7497 etype
= TREE_TYPE (type
);
7498 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7499 count
= TYPE_VECTOR_SUBPARTS (type
);
7500 if (size
* count
> len
)
7503 elements
= XALLOCAVEC (tree
, count
);
7504 for (i
= count
- 1; i
>= 0; i
--)
7506 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7511 return build_vector (type
, elements
);
7515 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7516 the buffer PTR of length LEN as a constant of type TYPE. For
7517 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7518 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7519 return NULL_TREE. */
7522 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7524 switch (TREE_CODE (type
))
7530 case REFERENCE_TYPE
:
7531 return native_interpret_int (type
, ptr
, len
);
7534 return native_interpret_real (type
, ptr
, len
);
7537 return native_interpret_complex (type
, ptr
, len
);
7540 return native_interpret_vector (type
, ptr
, len
);
7547 /* Returns true if we can interpret the contents of a native encoding
7551 can_native_interpret_type_p (tree type
)
7553 switch (TREE_CODE (type
))
7559 case REFERENCE_TYPE
:
7569 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7570 TYPE at compile-time. If we're unable to perform the conversion
7571 return NULL_TREE. */
7574 fold_view_convert_expr (tree type
, tree expr
)
7576 /* We support up to 512-bit values (for V8DFmode). */
7577 unsigned char buffer
[64];
7580 /* Check that the host and target are sane. */
7581 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7584 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7588 return native_interpret_expr (type
, buffer
, len
);
7591 /* Build an expression for the address of T. Folds away INDIRECT_REF
7592 to avoid confusing the gimplify process. */
7595 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7597 /* The size of the object is not relevant when talking about its address. */
7598 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7599 t
= TREE_OPERAND (t
, 0);
7601 if (TREE_CODE (t
) == INDIRECT_REF
)
7603 t
= TREE_OPERAND (t
, 0);
7605 if (TREE_TYPE (t
) != ptrtype
)
7606 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7608 else if (TREE_CODE (t
) == MEM_REF
7609 && integer_zerop (TREE_OPERAND (t
, 1)))
7610 return TREE_OPERAND (t
, 0);
7611 else if (TREE_CODE (t
) == MEM_REF
7612 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7613 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7614 TREE_OPERAND (t
, 0),
7615 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7616 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7618 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7620 if (TREE_TYPE (t
) != ptrtype
)
7621 t
= fold_convert_loc (loc
, ptrtype
, t
);
7624 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7629 /* Build an expression for the address of T. */
7632 build_fold_addr_expr_loc (location_t loc
, tree t
)
7634 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7636 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7639 static bool vec_cst_ctor_to_array (tree
, tree
*);
7641 /* Fold a unary expression of code CODE and type TYPE with operand
7642 OP0. Return the folded expression if folding is successful.
7643 Otherwise, return NULL_TREE. */
7646 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7650 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7652 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7653 && TREE_CODE_LENGTH (code
) == 1);
7658 if (CONVERT_EXPR_CODE_P (code
)
7659 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7661 /* Don't use STRIP_NOPS, because signedness of argument type
7663 STRIP_SIGN_NOPS (arg0
);
7667 /* Strip any conversions that don't change the mode. This
7668 is safe for every expression, except for a comparison
7669 expression because its signedness is derived from its
7672 Note that this is done as an internal manipulation within
7673 the constant folder, in order to find the simplest
7674 representation of the arguments so that their form can be
7675 studied. In any cases, the appropriate type conversions
7676 should be put back in the tree that will get out of the
7682 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7684 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7685 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7686 fold_build1_loc (loc
, code
, type
,
7687 fold_convert_loc (loc
, TREE_TYPE (op0
),
7688 TREE_OPERAND (arg0
, 1))));
7689 else if (TREE_CODE (arg0
) == COND_EXPR
)
7691 tree arg01
= TREE_OPERAND (arg0
, 1);
7692 tree arg02
= TREE_OPERAND (arg0
, 2);
7693 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7694 arg01
= fold_build1_loc (loc
, code
, type
,
7695 fold_convert_loc (loc
,
7696 TREE_TYPE (op0
), arg01
));
7697 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7698 arg02
= fold_build1_loc (loc
, code
, type
,
7699 fold_convert_loc (loc
,
7700 TREE_TYPE (op0
), arg02
));
7701 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7704 /* If this was a conversion, and all we did was to move into
7705 inside the COND_EXPR, bring it back out. But leave it if
7706 it is a conversion from integer to integer and the
7707 result precision is no wider than a word since such a
7708 conversion is cheap and may be optimized away by combine,
7709 while it couldn't if it were outside the COND_EXPR. Then return
7710 so we don't get into an infinite recursion loop taking the
7711 conversion out and then back in. */
7713 if ((CONVERT_EXPR_CODE_P (code
)
7714 || code
== NON_LVALUE_EXPR
)
7715 && TREE_CODE (tem
) == COND_EXPR
7716 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7717 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7718 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7719 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7720 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7721 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7722 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7724 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7725 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7726 || flag_syntax_only
))
7727 tem
= build1_loc (loc
, code
, type
,
7729 TREE_TYPE (TREE_OPERAND
7730 (TREE_OPERAND (tem
, 1), 0)),
7731 TREE_OPERAND (tem
, 0),
7732 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7733 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7742 /* Re-association barriers around constants and other re-association
7743 barriers can be removed. */
7744 if (CONSTANT_CLASS_P (op0
)
7745 || TREE_CODE (op0
) == PAREN_EXPR
)
7746 return fold_convert_loc (loc
, type
, op0
);
7751 case FIX_TRUNC_EXPR
:
7752 if (TREE_TYPE (op0
) == type
)
7755 if (COMPARISON_CLASS_P (op0
))
7757 /* If we have (type) (a CMP b) and type is an integral type, return
7758 new expression involving the new type. Canonicalize
7759 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7761 Do not fold the result as that would not simplify further, also
7762 folding again results in recursions. */
7763 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7764 return build2_loc (loc
, TREE_CODE (op0
), type
,
7765 TREE_OPERAND (op0
, 0),
7766 TREE_OPERAND (op0
, 1));
7767 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7768 && TREE_CODE (type
) != VECTOR_TYPE
)
7769 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7770 constant_boolean_node (true, type
),
7771 constant_boolean_node (false, type
));
7774 /* Handle cases of two conversions in a row. */
7775 if (CONVERT_EXPR_P (op0
))
7777 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7778 tree inter_type
= TREE_TYPE (op0
);
7779 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7780 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7781 int inside_float
= FLOAT_TYPE_P (inside_type
);
7782 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7783 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7784 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7785 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7786 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7787 int inter_float
= FLOAT_TYPE_P (inter_type
);
7788 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7789 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7790 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7791 int final_int
= INTEGRAL_TYPE_P (type
);
7792 int final_ptr
= POINTER_TYPE_P (type
);
7793 int final_float
= FLOAT_TYPE_P (type
);
7794 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7795 unsigned int final_prec
= TYPE_PRECISION (type
);
7796 int final_unsignedp
= TYPE_UNSIGNED (type
);
7798 /* check for cases specific to UPC, involving pointer types */
7799 if (final_ptr
|| inter_ptr
|| inside_ptr
)
7801 int final_pts
= final_ptr
7802 && upc_shared_type_p (TREE_TYPE (type
));
7803 int inter_pts
= inter_ptr
7804 && upc_shared_type_p (TREE_TYPE (inter_type
));
7805 int inside_pts
= inside_ptr
7806 && upc_shared_type_p (TREE_TYPE (inside_type
));
7807 if (final_pts
|| inter_pts
|| inside_pts
)
7809 if (!((final_pts
&& inter_pts
)
7810 && TREE_TYPE (type
) == TREE_TYPE (inter_type
))
7811 || ((inter_pts
&& inside_pts
)
7812 && (TREE_TYPE (inter_type
)
7813 == TREE_TYPE (inside_type
))))
7818 /* In addition to the cases of two conversions in a row
7819 handled below, if we are converting something to its own
7820 type via an object of identical or wider precision, neither
7821 conversion is needed. */
7822 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7823 && (((inter_int
|| inter_ptr
) && final_int
)
7824 || (inter_float
&& final_float
))
7825 && inter_prec
>= final_prec
)
7826 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7828 /* Likewise, if the intermediate and initial types are either both
7829 float or both integer, we don't need the middle conversion if the
7830 former is wider than the latter and doesn't change the signedness
7831 (for integers). Avoid this if the final type is a pointer since
7832 then we sometimes need the middle conversion. Likewise if the
7833 final type has a precision not equal to the size of its mode. */
7834 if (((inter_int
&& inside_int
)
7835 || (inter_float
&& inside_float
)
7836 || (inter_vec
&& inside_vec
))
7837 && inter_prec
>= inside_prec
7838 && (inter_float
|| inter_vec
7839 || inter_unsignedp
== inside_unsignedp
)
7840 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7841 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7843 && (! final_vec
|| inter_prec
== inside_prec
))
7844 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7846 /* If we have a sign-extension of a zero-extended value, we can
7847 replace that by a single zero-extension. Likewise if the
7848 final conversion does not change precision we can drop the
7849 intermediate conversion. */
7850 if (inside_int
&& inter_int
&& final_int
7851 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7852 && inside_unsignedp
&& !inter_unsignedp
)
7853 || final_prec
== inter_prec
))
7854 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7856 /* Two conversions in a row are not needed unless:
7857 - some conversion is floating-point (overstrict for now), or
7858 - some conversion is a vector (overstrict for now), or
7859 - the intermediate type is narrower than both initial and
7861 - the intermediate type and innermost type differ in signedness,
7862 and the outermost type is wider than the intermediate, or
7863 - the initial type is a pointer type and the precisions of the
7864 intermediate and final types differ, or
7865 - the final type is a pointer type and the precisions of the
7866 initial and intermediate types differ. */
7867 if (! inside_float
&& ! inter_float
&& ! final_float
7868 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7869 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7870 && ! (inside_int
&& inter_int
7871 && inter_unsignedp
!= inside_unsignedp
7872 && inter_prec
< final_prec
)
7873 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7874 == (final_unsignedp
&& final_prec
> inter_prec
))
7875 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7876 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7877 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7878 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7879 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7882 /* Handle (T *)&A.B.C for A being of type T and B and C
7883 living at offset zero. This occurs frequently in
7884 C++ upcasting and then accessing the base. */
7885 if (TREE_CODE (op0
) == ADDR_EXPR
7886 && POINTER_TYPE_P (type
)
7887 && handled_component_p (TREE_OPERAND (op0
, 0)))
7889 HOST_WIDE_INT bitsize
, bitpos
;
7891 enum machine_mode mode
;
7892 int unsignedp
, volatilep
;
7893 tree base
= TREE_OPERAND (op0
, 0);
7894 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7895 &mode
, &unsignedp
, &volatilep
, false);
7896 /* If the reference was to a (constant) zero offset, we can use
7897 the address of the base if it has the same base type
7898 as the result type and the pointer type is unqualified. */
7899 if (! offset
&& bitpos
== 0
7900 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7901 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7902 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7903 return fold_convert_loc (loc
, type
,
7904 build_fold_addr_expr_loc (loc
, base
));
7907 if (TREE_CODE (op0
) == MODIFY_EXPR
7908 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7909 /* Detect assigning a bitfield. */
7910 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7912 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7914 /* Don't leave an assignment inside a conversion
7915 unless assigning a bitfield. */
7916 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7917 /* First do the assignment, then return converted constant. */
7918 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7919 TREE_NO_WARNING (tem
) = 1;
7920 TREE_USED (tem
) = 1;
7924 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7925 constants (if x has signed type, the sign bit cannot be set
7926 in c). This folds extension into the BIT_AND_EXPR.
7927 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7928 very likely don't have maximal range for their precision and this
7929 transformation effectively doesn't preserve non-maximal ranges. */
7930 if (TREE_CODE (type
) == INTEGER_TYPE
7931 && TREE_CODE (op0
) == BIT_AND_EXPR
7932 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7934 tree and_expr
= op0
;
7935 tree and0
= TREE_OPERAND (and_expr
, 0);
7936 tree and1
= TREE_OPERAND (and_expr
, 1);
7939 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7940 || (TYPE_PRECISION (type
)
7941 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7943 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7944 <= HOST_BITS_PER_WIDE_INT
7945 && host_integerp (and1
, 1))
7947 unsigned HOST_WIDE_INT cst
;
7949 cst
= tree_low_cst (and1
, 1);
7950 cst
&= (HOST_WIDE_INT
) -1
7951 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7952 change
= (cst
== 0);
7953 #ifdef LOAD_EXTEND_OP
7955 && !flag_syntax_only
7956 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7959 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7960 and0
= fold_convert_loc (loc
, uns
, and0
);
7961 and1
= fold_convert_loc (loc
, uns
, and1
);
7967 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7968 0, TREE_OVERFLOW (and1
));
7969 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7970 fold_convert_loc (loc
, type
, and0
), tem
);
7974 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7975 when one of the new casts will fold away. Conservatively we assume
7976 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7977 if (POINTER_TYPE_P (type
)
7978 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7979 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7980 && !upc_shared_type_p (TREE_TYPE (type
))
7981 && !upc_shared_type_p (TREE_TYPE (
7982 TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7983 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7984 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7985 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7987 tree arg00
= TREE_OPERAND (arg0
, 0);
7988 tree arg01
= TREE_OPERAND (arg0
, 1);
7990 return fold_build_pointer_plus_loc
7991 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7994 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7995 of the same precision, and X is an integer type not narrower than
7996 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7997 if (INTEGRAL_TYPE_P (type
)
7998 && TREE_CODE (op0
) == BIT_NOT_EXPR
7999 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8000 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8001 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8003 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8004 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8005 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8006 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8007 fold_convert_loc (loc
, type
, tem
));
8010 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8011 type of X and Y (integer types only). */
8012 if (INTEGRAL_TYPE_P (type
)
8013 && TREE_CODE (op0
) == MULT_EXPR
8014 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8015 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8017 /* Be careful not to introduce new overflows. */
8019 if (TYPE_OVERFLOW_WRAPS (type
))
8022 mult_type
= unsigned_type_for (type
);
8024 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8026 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8027 fold_convert_loc (loc
, mult_type
,
8028 TREE_OPERAND (op0
, 0)),
8029 fold_convert_loc (loc
, mult_type
,
8030 TREE_OPERAND (op0
, 1)));
8031 return fold_convert_loc (loc
, type
, tem
);
8035 tem
= fold_convert_const (code
, type
, op0
);
8036 return tem
? tem
: NULL_TREE
;
8038 case ADDR_SPACE_CONVERT_EXPR
:
8039 if (integer_zerop (arg0
))
8040 return fold_convert_const (code
, type
, arg0
);
8043 case FIXED_CONVERT_EXPR
:
8044 tem
= fold_convert_const (code
, type
, arg0
);
8045 return tem
? tem
: NULL_TREE
;
8047 case VIEW_CONVERT_EXPR
:
8048 if (TREE_TYPE (op0
) == type
)
8050 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8051 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8052 type
, TREE_OPERAND (op0
, 0));
8053 if (TREE_CODE (op0
) == MEM_REF
)
8054 return fold_build2_loc (loc
, MEM_REF
, type
,
8055 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8057 /* For integral conversions with the same precision or pointer
8058 conversions use a NOP_EXPR instead. */
8059 if ((INTEGRAL_TYPE_P (type
)
8060 || (POINTER_TYPE_P (type
)
8061 && !upc_shared_type_p (TREE_TYPE (type
))))
8062 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8063 || (POINTER_TYPE_P (TREE_TYPE (op0
))
8064 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0
)))))
8065 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8066 return fold_convert_loc (loc
, type
, op0
);
8068 /* Strip inner integral conversions that do not change the precision. */
8069 if (CONVERT_EXPR_P (op0
)
8070 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8071 || (POINTER_TYPE_P (TREE_TYPE (op0
))
8072 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0
)))))
8073 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8074 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8075 && !upc_shared_type_p (TREE_TYPE (
8077 TREE_OPERAND (op0
, 0))))))
8078 && (TYPE_PRECISION (TREE_TYPE (op0
))
8079 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8080 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8081 type
, TREE_OPERAND (op0
, 0));
8083 return fold_view_convert_expr (type
, op0
);
8086 tem
= fold_negate_expr (loc
, arg0
);
8088 return fold_convert_loc (loc
, type
, tem
);
8092 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8093 return fold_abs_const (arg0
, type
);
8094 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8095 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8096 /* Convert fabs((double)float) into (double)fabsf(float). */
8097 else if (TREE_CODE (arg0
) == NOP_EXPR
8098 && TREE_CODE (type
) == REAL_TYPE
)
8100 tree targ0
= strip_float_extensions (arg0
);
8102 return fold_convert_loc (loc
, type
,
8103 fold_build1_loc (loc
, ABS_EXPR
,
8107 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8108 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8110 else if (tree_expr_nonnegative_p (arg0
))
8113 /* Strip sign ops from argument. */
8114 if (TREE_CODE (type
) == REAL_TYPE
)
8116 tem
= fold_strip_sign_ops (arg0
);
8118 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8119 fold_convert_loc (loc
, type
, tem
));
8124 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8125 return fold_convert_loc (loc
, type
, arg0
);
8126 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8128 tree itype
= TREE_TYPE (type
);
8129 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8130 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8131 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8132 negate_expr (ipart
));
8134 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8136 tree itype
= TREE_TYPE (type
);
8137 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8138 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8139 return build_complex (type
, rpart
, negate_expr (ipart
));
8141 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8142 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8146 if (TREE_CODE (arg0
) == INTEGER_CST
)
8147 return fold_not_const (arg0
, type
);
8148 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8149 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8150 /* Convert ~ (-A) to A - 1. */
8151 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8152 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8153 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8154 build_int_cst (type
, 1));
8155 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8156 else if (INTEGRAL_TYPE_P (type
)
8157 && ((TREE_CODE (arg0
) == MINUS_EXPR
8158 && integer_onep (TREE_OPERAND (arg0
, 1)))
8159 || (TREE_CODE (arg0
) == PLUS_EXPR
8160 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8161 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8162 fold_convert_loc (loc
, type
,
8163 TREE_OPERAND (arg0
, 0)));
8164 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8165 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8166 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8167 fold_convert_loc (loc
, type
,
8168 TREE_OPERAND (arg0
, 0)))))
8169 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8170 fold_convert_loc (loc
, type
,
8171 TREE_OPERAND (arg0
, 1)));
8172 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8173 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8174 fold_convert_loc (loc
, type
,
8175 TREE_OPERAND (arg0
, 1)))))
8176 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8177 fold_convert_loc (loc
, type
,
8178 TREE_OPERAND (arg0
, 0)), tem
);
8179 /* Perform BIT_NOT_EXPR on each element individually. */
8180 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8184 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8186 elements
= XALLOCAVEC (tree
, count
);
8187 for (i
= 0; i
< count
; i
++)
8189 elem
= VECTOR_CST_ELT (arg0
, i
);
8190 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8191 if (elem
== NULL_TREE
)
8196 return build_vector (type
, elements
);
8201 case TRUTH_NOT_EXPR
:
8202 /* The argument to invert_truthvalue must have Boolean type. */
8203 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8204 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8206 /* Note that the operand of this must be an int
8207 and its values must be 0 or 1.
8208 ("true" is a fixed value perhaps depending on the language,
8209 but we don't handle values other than 1 correctly yet.) */
8210 tem
= fold_truth_not_expr (loc
, arg0
);
8213 return fold_convert_loc (loc
, type
, tem
);
8216 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8217 return fold_convert_loc (loc
, type
, arg0
);
8218 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8219 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8220 TREE_OPERAND (arg0
, 1));
8221 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8222 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8223 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8225 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8226 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8227 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8228 TREE_OPERAND (arg0
, 0)),
8229 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8230 TREE_OPERAND (arg0
, 1)));
8231 return fold_convert_loc (loc
, type
, tem
);
8233 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8235 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8236 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8237 TREE_OPERAND (arg0
, 0));
8238 return fold_convert_loc (loc
, type
, tem
);
8240 if (TREE_CODE (arg0
) == CALL_EXPR
)
8242 tree fn
= get_callee_fndecl (arg0
);
8243 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8244 switch (DECL_FUNCTION_CODE (fn
))
8246 CASE_FLT_FN (BUILT_IN_CEXPI
):
8247 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8249 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8259 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8260 return build_zero_cst (type
);
8261 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8262 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8263 TREE_OPERAND (arg0
, 0));
8264 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8265 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8266 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8268 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8269 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8270 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8271 TREE_OPERAND (arg0
, 0)),
8272 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8273 TREE_OPERAND (arg0
, 1)));
8274 return fold_convert_loc (loc
, type
, tem
);
8276 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8278 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8279 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8280 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8282 if (TREE_CODE (arg0
) == CALL_EXPR
)
8284 tree fn
= get_callee_fndecl (arg0
);
8285 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8286 switch (DECL_FUNCTION_CODE (fn
))
8288 CASE_FLT_FN (BUILT_IN_CEXPI
):
8289 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8291 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8301 /* Fold *&X to X if X is an lvalue. */
8302 if (TREE_CODE (op0
) == ADDR_EXPR
)
8304 tree op00
= TREE_OPERAND (op0
, 0);
8305 if ((TREE_CODE (op00
) == VAR_DECL
8306 || TREE_CODE (op00
) == PARM_DECL
8307 || TREE_CODE (op00
) == RESULT_DECL
)
8308 && !TREE_READONLY (op00
))
8313 case VEC_UNPACK_LO_EXPR
:
8314 case VEC_UNPACK_HI_EXPR
:
8315 case VEC_UNPACK_FLOAT_LO_EXPR
:
8316 case VEC_UNPACK_FLOAT_HI_EXPR
:
8318 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8320 enum tree_code subcode
;
8322 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8323 if (TREE_CODE (arg0
) != VECTOR_CST
)
8326 elts
= XALLOCAVEC (tree
, nelts
* 2);
8327 if (!vec_cst_ctor_to_array (arg0
, elts
))
8330 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8331 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8334 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8337 subcode
= FLOAT_EXPR
;
8339 for (i
= 0; i
< nelts
; i
++)
8341 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8342 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8346 return build_vector (type
, elts
);
8349 case REDUC_MIN_EXPR
:
8350 case REDUC_MAX_EXPR
:
8351 case REDUC_PLUS_EXPR
:
8353 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8355 enum tree_code subcode
;
8357 if (TREE_CODE (op0
) != VECTOR_CST
)
8360 elts
= XALLOCAVEC (tree
, nelts
);
8361 if (!vec_cst_ctor_to_array (op0
, elts
))
8366 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8367 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8368 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8369 default: gcc_unreachable ();
8372 for (i
= 1; i
< nelts
; i
++)
8374 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8375 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8377 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8380 return build_vector (type
, elts
);
8385 } /* switch (code) */
8389 /* If the operation was a conversion do _not_ mark a resulting constant
8390 with TREE_OVERFLOW if the original constant was not. These conversions
8391 have implementation defined behavior and retaining the TREE_OVERFLOW
8392 flag here would confuse later passes such as VRP. */
8394 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8395 tree type
, tree op0
)
8397 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8399 && TREE_CODE (res
) == INTEGER_CST
8400 && TREE_CODE (op0
) == INTEGER_CST
8401 && CONVERT_EXPR_CODE_P (code
))
8402 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8407 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8408 operands OP0 and OP1. LOC is the location of the resulting expression.
8409 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8410 Return the folded expression if folding is successful. Otherwise,
8411 return NULL_TREE. */
8413 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8414 tree arg0
, tree arg1
, tree op0
, tree op1
)
8418 /* We only do these simplifications if we are optimizing. */
8422 /* Check for things like (A || B) && (A || C). We can convert this
8423 to A || (B && C). Note that either operator can be any of the four
8424 truth and/or operations and the transformation will still be
8425 valid. Also note that we only care about order for the
8426 ANDIF and ORIF operators. If B contains side effects, this
8427 might change the truth-value of A. */
8428 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8429 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8430 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8431 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8432 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8433 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8435 tree a00
= TREE_OPERAND (arg0
, 0);
8436 tree a01
= TREE_OPERAND (arg0
, 1);
8437 tree a10
= TREE_OPERAND (arg1
, 0);
8438 tree a11
= TREE_OPERAND (arg1
, 1);
8439 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8440 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8441 && (code
== TRUTH_AND_EXPR
8442 || code
== TRUTH_OR_EXPR
));
8444 if (operand_equal_p (a00
, a10
, 0))
8445 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8446 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8447 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8448 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8449 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8450 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8451 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8452 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8454 /* This case if tricky because we must either have commutative
8455 operators or else A10 must not have side-effects. */
8457 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8458 && operand_equal_p (a01
, a11
, 0))
8459 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8460 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8464 /* See if we can build a range comparison. */
8465 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8468 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8469 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8471 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8473 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8476 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8477 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8479 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8481 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8484 /* Check for the possibility of merging component references. If our
8485 lhs is another similar operation, try to merge its rhs with our
8486 rhs. Then try to merge our lhs and rhs. */
8487 if (TREE_CODE (arg0
) == code
8488 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8489 TREE_OPERAND (arg0
, 1), arg1
)))
8490 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8492 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8495 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8496 && (code
== TRUTH_AND_EXPR
8497 || code
== TRUTH_ANDIF_EXPR
8498 || code
== TRUTH_OR_EXPR
8499 || code
== TRUTH_ORIF_EXPR
))
8501 enum tree_code ncode
, icode
;
8503 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8504 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8505 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8507 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8508 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8509 We don't want to pack more than two leafs to a non-IF AND/OR
8511 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8512 equal to IF-CODE, then we don't want to add right-hand operand.
8513 If the inner right-hand side of left-hand operand has
8514 side-effects, or isn't simple, then we can't add to it,
8515 as otherwise we might destroy if-sequence. */
8516 if (TREE_CODE (arg0
) == icode
8517 && simple_operand_p_2 (arg1
)
8518 /* Needed for sequence points to handle trappings, and
8520 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8522 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8524 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8527 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8528 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8529 else if (TREE_CODE (arg1
) == icode
8530 && simple_operand_p_2 (arg0
)
8531 /* Needed for sequence points to handle trappings, and
8533 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8535 tem
= fold_build2_loc (loc
, ncode
, type
,
8536 arg0
, TREE_OPERAND (arg1
, 0));
8537 return fold_build2_loc (loc
, icode
, type
, tem
,
8538 TREE_OPERAND (arg1
, 1));
8540 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8542 For sequence point consistancy, we need to check for trapping,
8543 and side-effects. */
8544 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8545 && simple_operand_p_2 (arg1
))
8546 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8552 /* Fold a binary expression of code CODE and type TYPE with operands
8553 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8554 Return the folded expression if folding is successful. Otherwise,
8555 return NULL_TREE. */
8558 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8560 enum tree_code compl_code
;
8562 if (code
== MIN_EXPR
)
8563 compl_code
= MAX_EXPR
;
8564 else if (code
== MAX_EXPR
)
8565 compl_code
= MIN_EXPR
;
8569 /* MIN (MAX (a, b), b) == b. */
8570 if (TREE_CODE (op0
) == compl_code
8571 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8572 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8574 /* MIN (MAX (b, a), b) == b. */
8575 if (TREE_CODE (op0
) == compl_code
8576 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8577 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8578 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8580 /* MIN (a, MAX (a, b)) == a. */
8581 if (TREE_CODE (op1
) == compl_code
8582 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8583 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8584 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8586 /* MIN (a, MAX (b, a)) == a. */
8587 if (TREE_CODE (op1
) == compl_code
8588 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8589 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8590 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8595 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8596 by changing CODE to reduce the magnitude of constants involved in
8597 ARG0 of the comparison.
8598 Returns a canonicalized comparison tree if a simplification was
8599 possible, otherwise returns NULL_TREE.
8600 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8601 valid if signed overflow is undefined. */
8604 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8605 tree arg0
, tree arg1
,
8606 bool *strict_overflow_p
)
8608 enum tree_code code0
= TREE_CODE (arg0
);
8609 tree t
, cst0
= NULL_TREE
;
8613 /* Match A +- CST code arg1 and CST code arg1. We can change the
8614 first form only if overflow is undefined. */
8615 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8616 /* In principle pointers also have undefined overflow behavior,
8617 but that causes problems elsewhere. */
8618 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8619 && (code0
== MINUS_EXPR
8620 || code0
== PLUS_EXPR
)
8621 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8622 || code0
== INTEGER_CST
))
8625 /* Identify the constant in arg0 and its sign. */
8626 if (code0
== INTEGER_CST
)
8629 cst0
= TREE_OPERAND (arg0
, 1);
8630 sgn0
= tree_int_cst_sgn (cst0
);
8632 /* Overflowed constants and zero will cause problems. */
8633 if (integer_zerop (cst0
)
8634 || TREE_OVERFLOW (cst0
))
8637 /* See if we can reduce the magnitude of the constant in
8638 arg0 by changing the comparison code. */
8639 if (code0
== INTEGER_CST
)
8641 /* CST <= arg1 -> CST-1 < arg1. */
8642 if (code
== LE_EXPR
&& sgn0
== 1)
8644 /* -CST < arg1 -> -CST-1 <= arg1. */
8645 else if (code
== LT_EXPR
&& sgn0
== -1)
8647 /* CST > arg1 -> CST-1 >= arg1. */
8648 else if (code
== GT_EXPR
&& sgn0
== 1)
8650 /* -CST >= arg1 -> -CST-1 > arg1. */
8651 else if (code
== GE_EXPR
&& sgn0
== -1)
8655 /* arg1 code' CST' might be more canonical. */
8660 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8662 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8664 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8665 else if (code
== GT_EXPR
8666 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8668 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8669 else if (code
== LE_EXPR
8670 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8672 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8673 else if (code
== GE_EXPR
8674 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8678 *strict_overflow_p
= true;
8681 /* Now build the constant reduced in magnitude. But not if that
8682 would produce one outside of its types range. */
8683 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8685 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8686 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8688 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8689 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8690 /* We cannot swap the comparison here as that would cause us to
8691 endlessly recurse. */
8694 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8695 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8696 if (code0
!= INTEGER_CST
)
8697 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8698 t
= fold_convert (TREE_TYPE (arg1
), t
);
8700 /* If swapping might yield to a more canonical form, do so. */
8702 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8704 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8707 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8708 overflow further. Try to decrease the magnitude of constants involved
8709 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8710 and put sole constants at the second argument position.
8711 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8714 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8715 tree arg0
, tree arg1
)
8718 bool strict_overflow_p
;
8719 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8720 "when reducing constant in comparison");
8722 /* Try canonicalization by simplifying arg0. */
8723 strict_overflow_p
= false;
8724 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8725 &strict_overflow_p
);
8728 if (strict_overflow_p
)
8729 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8733 /* Try canonicalization by simplifying arg1 using the swapped
8735 code
= swap_tree_comparison (code
);
8736 strict_overflow_p
= false;
8737 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8738 &strict_overflow_p
);
8739 if (t
&& strict_overflow_p
)
8740 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8744 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8745 space. This is used to avoid issuing overflow warnings for
8746 expressions like &p->x which can not wrap. */
8749 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8751 double_int di_offset
, total
;
8753 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8759 if (offset
== NULL_TREE
)
8760 di_offset
= double_int_zero
;
8761 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8764 di_offset
= TREE_INT_CST (offset
);
8767 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8768 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8772 if (total
.high
!= 0)
8775 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8779 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8781 if (TREE_CODE (base
) == ADDR_EXPR
)
8783 HOST_WIDE_INT base_size
;
8785 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8786 if (base_size
> 0 && size
< base_size
)
8790 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8793 /* Subroutine of fold_binary. This routine performs all of the
8794 transformations that are common to the equality/inequality
8795 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8796 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8797 fold_binary should call fold_binary. Fold a comparison with
8798 tree code CODE and type TYPE with operands OP0 and OP1. Return
8799 the folded comparison or NULL_TREE. */
8802 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8805 tree arg0
, arg1
, tem
;
8810 STRIP_SIGN_NOPS (arg0
);
8811 STRIP_SIGN_NOPS (arg1
);
8813 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8814 if (tem
!= NULL_TREE
)
8817 /* If one arg is a real or integer constant, put it last. */
8818 if (tree_swap_operands_p (arg0
, arg1
, true))
8819 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8821 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8822 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8823 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8824 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8825 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8826 && (TREE_CODE (arg1
) == INTEGER_CST
8827 && !TREE_OVERFLOW (arg1
)))
8829 tree const1
= TREE_OPERAND (arg0
, 1);
8831 tree variable
= TREE_OPERAND (arg0
, 0);
8834 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8836 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8837 TREE_TYPE (arg1
), const2
, const1
);
8839 /* If the constant operation overflowed this can be
8840 simplified as a comparison against INT_MAX/INT_MIN. */
8841 if (TREE_CODE (lhs
) == INTEGER_CST
8842 && TREE_OVERFLOW (lhs
))
8844 int const1_sgn
= tree_int_cst_sgn (const1
);
8845 enum tree_code code2
= code
;
8847 /* Get the sign of the constant on the lhs if the
8848 operation were VARIABLE + CONST1. */
8849 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8850 const1_sgn
= -const1_sgn
;
8852 /* The sign of the constant determines if we overflowed
8853 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8854 Canonicalize to the INT_MIN overflow by swapping the comparison
8856 if (const1_sgn
== -1)
8857 code2
= swap_tree_comparison (code
);
8859 /* We now can look at the canonicalized case
8860 VARIABLE + 1 CODE2 INT_MIN
8861 and decide on the result. */
8862 if (code2
== LT_EXPR
8864 || code2
== EQ_EXPR
)
8865 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8866 else if (code2
== NE_EXPR
8868 || code2
== GT_EXPR
)
8869 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8872 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8873 && (TREE_CODE (lhs
) != INTEGER_CST
8874 || !TREE_OVERFLOW (lhs
)))
8876 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8877 fold_overflow_warning ("assuming signed overflow does not occur "
8878 "when changing X +- C1 cmp C2 to "
8880 WARN_STRICT_OVERFLOW_COMPARISON
);
8881 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8885 /* For comparisons of pointers we can decompose it to a compile time
8886 comparison of the base objects and the offsets into the object.
8887 This requires at least one operand being an ADDR_EXPR or a
8888 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8889 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8890 && (TREE_CODE (arg0
) == ADDR_EXPR
8891 || TREE_CODE (arg1
) == ADDR_EXPR
8892 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8893 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8895 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8896 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8897 enum machine_mode mode
;
8898 int volatilep
, unsignedp
;
8899 bool indirect_base0
= false, indirect_base1
= false;
8901 /* Get base and offset for the access. Strip ADDR_EXPR for
8902 get_inner_reference, but put it back by stripping INDIRECT_REF
8903 off the base object if possible. indirect_baseN will be true
8904 if baseN is not an address but refers to the object itself. */
8906 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8908 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8909 &bitsize
, &bitpos0
, &offset0
, &mode
,
8910 &unsignedp
, &volatilep
, false);
8911 if (TREE_CODE (base0
) == INDIRECT_REF
)
8912 base0
= TREE_OPERAND (base0
, 0);
8914 indirect_base0
= true;
8916 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8918 base0
= TREE_OPERAND (arg0
, 0);
8919 STRIP_SIGN_NOPS (base0
);
8920 if (TREE_CODE (base0
) == ADDR_EXPR
)
8922 base0
= TREE_OPERAND (base0
, 0);
8923 indirect_base0
= true;
8925 offset0
= TREE_OPERAND (arg0
, 1);
8926 if (host_integerp (offset0
, 0))
8928 HOST_WIDE_INT off
= size_low_cst (offset0
);
8929 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8931 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8933 bitpos0
= off
* BITS_PER_UNIT
;
8934 offset0
= NULL_TREE
;
8940 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8942 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8943 &bitsize
, &bitpos1
, &offset1
, &mode
,
8944 &unsignedp
, &volatilep
, false);
8945 if (TREE_CODE (base1
) == INDIRECT_REF
)
8946 base1
= TREE_OPERAND (base1
, 0);
8948 indirect_base1
= true;
8950 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8952 base1
= TREE_OPERAND (arg1
, 0);
8953 STRIP_SIGN_NOPS (base1
);
8954 if (TREE_CODE (base1
) == ADDR_EXPR
)
8956 base1
= TREE_OPERAND (base1
, 0);
8957 indirect_base1
= true;
8959 offset1
= TREE_OPERAND (arg1
, 1);
8960 if (host_integerp (offset1
, 0))
8962 HOST_WIDE_INT off
= size_low_cst (offset1
);
8963 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8965 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8967 bitpos1
= off
* BITS_PER_UNIT
;
8968 offset1
= NULL_TREE
;
8973 /* A local variable can never be pointed to by
8974 the default SSA name of an incoming parameter. */
8975 if ((TREE_CODE (arg0
) == ADDR_EXPR
8977 && TREE_CODE (base0
) == VAR_DECL
8978 && auto_var_in_fn_p (base0
, current_function_decl
)
8980 && TREE_CODE (base1
) == SSA_NAME
8981 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8982 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8983 || (TREE_CODE (arg1
) == ADDR_EXPR
8985 && TREE_CODE (base1
) == VAR_DECL
8986 && auto_var_in_fn_p (base1
, current_function_decl
)
8988 && TREE_CODE (base0
) == SSA_NAME
8989 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8990 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8992 if (code
== NE_EXPR
)
8993 return constant_boolean_node (1, type
);
8994 else if (code
== EQ_EXPR
)
8995 return constant_boolean_node (0, type
);
8997 /* If we have equivalent bases we might be able to simplify. */
8998 else if (indirect_base0
== indirect_base1
8999 && operand_equal_p (base0
, base1
, 0))
9001 /* We can fold this expression to a constant if the non-constant
9002 offset parts are equal. */
9003 if ((offset0
== offset1
9004 || (offset0
&& offset1
9005 && operand_equal_p (offset0
, offset1
, 0)))
9008 || (indirect_base0
&& DECL_P (base0
))
9009 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9014 && bitpos0
!= bitpos1
9015 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9016 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9017 fold_overflow_warning (("assuming pointer wraparound does not "
9018 "occur when comparing P +- C1 with "
9020 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9025 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9027 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9029 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9031 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9033 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9035 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9039 /* We can simplify the comparison to a comparison of the variable
9040 offset parts if the constant offset parts are equal.
9041 Be careful to use signed sizetype here because otherwise we
9042 mess with array offsets in the wrong way. This is possible
9043 because pointer arithmetic is restricted to retain within an
9044 object and overflow on pointer differences is undefined as of
9045 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9046 else if (bitpos0
== bitpos1
9047 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9048 || (indirect_base0
&& DECL_P (base0
))
9049 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9051 /* By converting to signed sizetype we cover middle-end pointer
9052 arithmetic which operates on unsigned pointer types of size
9053 type size and ARRAY_REF offsets which are properly sign or
9054 zero extended from their type in case it is narrower than
9056 if (offset0
== NULL_TREE
)
9057 offset0
= build_int_cst (ssizetype
, 0);
9059 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9060 if (offset1
== NULL_TREE
)
9061 offset1
= build_int_cst (ssizetype
, 0);
9063 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9067 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9068 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9069 fold_overflow_warning (("assuming pointer wraparound does not "
9070 "occur when comparing P +- C1 with "
9072 WARN_STRICT_OVERFLOW_COMPARISON
);
9074 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9077 /* For non-equal bases we can simplify if they are addresses
9078 of local binding decls or constants. */
9079 else if (indirect_base0
&& indirect_base1
9080 /* We know that !operand_equal_p (base0, base1, 0)
9081 because the if condition was false. But make
9082 sure two decls are not the same. */
9084 && TREE_CODE (arg0
) == ADDR_EXPR
9085 && TREE_CODE (arg1
) == ADDR_EXPR
9086 && (((TREE_CODE (base0
) == VAR_DECL
9087 || TREE_CODE (base0
) == PARM_DECL
)
9088 && (targetm
.binds_local_p (base0
)
9089 || CONSTANT_CLASS_P (base1
)))
9090 || CONSTANT_CLASS_P (base0
))
9091 && (((TREE_CODE (base1
) == VAR_DECL
9092 || TREE_CODE (base1
) == PARM_DECL
)
9093 && (targetm
.binds_local_p (base1
)
9094 || CONSTANT_CLASS_P (base0
)))
9095 || CONSTANT_CLASS_P (base1
)))
9097 if (code
== EQ_EXPR
)
9098 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9100 else if (code
== NE_EXPR
)
9101 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9104 /* For equal offsets we can simplify to a comparison of the
9106 else if (bitpos0
== bitpos1
9108 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9110 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9111 && ((offset0
== offset1
)
9112 || (offset0
&& offset1
9113 && operand_equal_p (offset0
, offset1
, 0))))
9116 base0
= build_fold_addr_expr_loc (loc
, base0
);
9118 base1
= build_fold_addr_expr_loc (loc
, base1
);
9119 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9123 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9124 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9125 the resulting offset is smaller in absolute value than the
9127 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9128 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9129 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9130 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9131 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9132 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9133 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9135 tree const1
= TREE_OPERAND (arg0
, 1);
9136 tree const2
= TREE_OPERAND (arg1
, 1);
9137 tree variable1
= TREE_OPERAND (arg0
, 0);
9138 tree variable2
= TREE_OPERAND (arg1
, 0);
9140 const char * const warnmsg
= G_("assuming signed overflow does not "
9141 "occur when combining constants around "
9144 /* Put the constant on the side where it doesn't overflow and is
9145 of lower absolute value than before. */
9146 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9147 ? MINUS_EXPR
: PLUS_EXPR
,
9149 if (!TREE_OVERFLOW (cst
)
9150 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9152 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9153 return fold_build2_loc (loc
, code
, type
,
9155 fold_build2_loc (loc
,
9156 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9160 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9161 ? MINUS_EXPR
: PLUS_EXPR
,
9163 if (!TREE_OVERFLOW (cst
)
9164 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9166 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9167 return fold_build2_loc (loc
, code
, type
,
9168 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9174 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9175 signed arithmetic case. That form is created by the compiler
9176 often enough for folding it to be of value. One example is in
9177 computing loop trip counts after Operator Strength Reduction. */
9178 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9179 && TREE_CODE (arg0
) == MULT_EXPR
9180 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9181 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9182 && integer_zerop (arg1
))
9184 tree const1
= TREE_OPERAND (arg0
, 1);
9185 tree const2
= arg1
; /* zero */
9186 tree variable1
= TREE_OPERAND (arg0
, 0);
9187 enum tree_code cmp_code
= code
;
9189 /* Handle unfolded multiplication by zero. */
9190 if (integer_zerop (const1
))
9191 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9193 fold_overflow_warning (("assuming signed overflow does not occur when "
9194 "eliminating multiplication in comparison "
9196 WARN_STRICT_OVERFLOW_COMPARISON
);
9198 /* If const1 is negative we swap the sense of the comparison. */
9199 if (tree_int_cst_sgn (const1
) < 0)
9200 cmp_code
= swap_tree_comparison (cmp_code
);
9202 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9205 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9209 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9211 tree targ0
= strip_float_extensions (arg0
);
9212 tree targ1
= strip_float_extensions (arg1
);
9213 tree newtype
= TREE_TYPE (targ0
);
9215 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9216 newtype
= TREE_TYPE (targ1
);
9218 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9219 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9220 return fold_build2_loc (loc
, code
, type
,
9221 fold_convert_loc (loc
, newtype
, targ0
),
9222 fold_convert_loc (loc
, newtype
, targ1
));
9224 /* (-a) CMP (-b) -> b CMP a */
9225 if (TREE_CODE (arg0
) == NEGATE_EXPR
9226 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9227 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9228 TREE_OPERAND (arg0
, 0));
9230 if (TREE_CODE (arg1
) == REAL_CST
)
9232 REAL_VALUE_TYPE cst
;
9233 cst
= TREE_REAL_CST (arg1
);
9235 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9236 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9237 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9238 TREE_OPERAND (arg0
, 0),
9239 build_real (TREE_TYPE (arg1
),
9240 real_value_negate (&cst
)));
9242 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9243 /* a CMP (-0) -> a CMP 0 */
9244 if (REAL_VALUE_MINUS_ZERO (cst
))
9245 return fold_build2_loc (loc
, code
, type
, arg0
,
9246 build_real (TREE_TYPE (arg1
), dconst0
));
9248 /* x != NaN is always true, other ops are always false. */
9249 if (REAL_VALUE_ISNAN (cst
)
9250 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9252 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9253 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9256 /* Fold comparisons against infinity. */
9257 if (REAL_VALUE_ISINF (cst
)
9258 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9260 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9261 if (tem
!= NULL_TREE
)
9266 /* If this is a comparison of a real constant with a PLUS_EXPR
9267 or a MINUS_EXPR of a real constant, we can convert it into a
9268 comparison with a revised real constant as long as no overflow
9269 occurs when unsafe_math_optimizations are enabled. */
9270 if (flag_unsafe_math_optimizations
9271 && TREE_CODE (arg1
) == REAL_CST
9272 && (TREE_CODE (arg0
) == PLUS_EXPR
9273 || TREE_CODE (arg0
) == MINUS_EXPR
)
9274 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9275 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9276 ? MINUS_EXPR
: PLUS_EXPR
,
9277 arg1
, TREE_OPERAND (arg0
, 1)))
9278 && !TREE_OVERFLOW (tem
))
9279 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9281 /* Likewise, we can simplify a comparison of a real constant with
9282 a MINUS_EXPR whose first operand is also a real constant, i.e.
9283 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9284 floating-point types only if -fassociative-math is set. */
9285 if (flag_associative_math
9286 && TREE_CODE (arg1
) == REAL_CST
9287 && TREE_CODE (arg0
) == MINUS_EXPR
9288 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9289 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9291 && !TREE_OVERFLOW (tem
))
9292 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9293 TREE_OPERAND (arg0
, 1), tem
);
9295 /* Fold comparisons against built-in math functions. */
9296 if (TREE_CODE (arg1
) == REAL_CST
9297 && flag_unsafe_math_optimizations
9298 && ! flag_errno_math
)
9300 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9302 if (fcode
!= END_BUILTINS
)
9304 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9305 if (tem
!= NULL_TREE
)
9311 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9312 && CONVERT_EXPR_P (arg0
))
9314 /* If we are widening one operand of an integer comparison,
9315 see if the other operand is similarly being widened. Perhaps we
9316 can do the comparison in the narrower type. */
9317 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9321 /* Or if we are changing signedness. */
9322 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9327 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9328 constant, we can simplify it. */
9329 if (TREE_CODE (arg1
) == INTEGER_CST
9330 && (TREE_CODE (arg0
) == MIN_EXPR
9331 || TREE_CODE (arg0
) == MAX_EXPR
)
9332 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9334 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9339 /* Simplify comparison of something with itself. (For IEEE
9340 floating-point, we can only do some of these simplifications.) */
9341 if (operand_equal_p (arg0
, arg1
, 0))
9346 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9347 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9348 return constant_boolean_node (1, type
);
9353 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9354 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9355 return constant_boolean_node (1, type
);
9356 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9359 /* For NE, we can only do this simplification if integer
9360 or we don't honor IEEE floating point NaNs. */
9361 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9362 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9364 /* ... fall through ... */
9367 return constant_boolean_node (0, type
);
9373 /* If we are comparing an expression that just has comparisons
9374 of two integer values, arithmetic expressions of those comparisons,
9375 and constants, we can simplify it. There are only three cases
9376 to check: the two values can either be equal, the first can be
9377 greater, or the second can be greater. Fold the expression for
9378 those three values. Since each value must be 0 or 1, we have
9379 eight possibilities, each of which corresponds to the constant 0
9380 or 1 or one of the six possible comparisons.
9382 This handles common cases like (a > b) == 0 but also handles
9383 expressions like ((x > y) - (y > x)) > 0, which supposedly
9384 occur in macroized code. */
9386 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9388 tree cval1
= 0, cval2
= 0;
9391 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9392 /* Don't handle degenerate cases here; they should already
9393 have been handled anyway. */
9394 && cval1
!= 0 && cval2
!= 0
9395 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9396 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9397 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9398 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9399 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9400 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9401 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9403 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9404 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9406 /* We can't just pass T to eval_subst in case cval1 or cval2
9407 was the same as ARG1. */
9410 = fold_build2_loc (loc
, code
, type
,
9411 eval_subst (loc
, arg0
, cval1
, maxval
,
9415 = fold_build2_loc (loc
, code
, type
,
9416 eval_subst (loc
, arg0
, cval1
, maxval
,
9420 = fold_build2_loc (loc
, code
, type
,
9421 eval_subst (loc
, arg0
, cval1
, minval
,
9425 /* All three of these results should be 0 or 1. Confirm they are.
9426 Then use those values to select the proper code to use. */
9428 if (TREE_CODE (high_result
) == INTEGER_CST
9429 && TREE_CODE (equal_result
) == INTEGER_CST
9430 && TREE_CODE (low_result
) == INTEGER_CST
)
9432 /* Make a 3-bit mask with the high-order bit being the
9433 value for `>', the next for '=', and the low for '<'. */
9434 switch ((integer_onep (high_result
) * 4)
9435 + (integer_onep (equal_result
) * 2)
9436 + integer_onep (low_result
))
9440 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9461 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9466 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9467 SET_EXPR_LOCATION (tem
, loc
);
9470 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9475 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9476 into a single range test. */
9477 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9478 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9479 && TREE_CODE (arg1
) == INTEGER_CST
9480 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9481 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9482 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9483 && !TREE_OVERFLOW (arg1
))
9485 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9486 if (tem
!= NULL_TREE
)
9490 /* Fold ~X op ~Y as Y op X. */
9491 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9492 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9494 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9495 return fold_build2_loc (loc
, code
, type
,
9496 fold_convert_loc (loc
, cmp_type
,
9497 TREE_OPERAND (arg1
, 0)),
9498 TREE_OPERAND (arg0
, 0));
9501 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9502 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9503 && TREE_CODE (arg1
) == INTEGER_CST
)
9505 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9506 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9507 TREE_OPERAND (arg0
, 0),
9508 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9509 fold_convert_loc (loc
, cmp_type
, arg1
)));
9516 /* Subroutine of fold_binary. Optimize complex multiplications of the
9517 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9518 argument EXPR represents the expression "z" of type TYPE. */
9521 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9523 tree itype
= TREE_TYPE (type
);
9524 tree rpart
, ipart
, tem
;
9526 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9528 rpart
= TREE_OPERAND (expr
, 0);
9529 ipart
= TREE_OPERAND (expr
, 1);
9531 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9533 rpart
= TREE_REALPART (expr
);
9534 ipart
= TREE_IMAGPART (expr
);
9538 expr
= save_expr (expr
);
9539 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9540 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9543 rpart
= save_expr (rpart
);
9544 ipart
= save_expr (ipart
);
9545 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9546 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9547 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9548 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9549 build_zero_cst (itype
));
9553 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9554 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9555 guarantees that P and N have the same least significant log2(M) bits.
9556 N is not otherwise constrained. In particular, N is not normalized to
9557 0 <= N < M as is common. In general, the precise value of P is unknown.
9558 M is chosen as large as possible such that constant N can be determined.
9560 Returns M and sets *RESIDUE to N.
9562 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9563 account. This is not always possible due to PR 35705.
9566 static unsigned HOST_WIDE_INT
9567 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9568 bool allow_func_align
)
9570 enum tree_code code
;
9574 code
= TREE_CODE (expr
);
9575 if (code
== ADDR_EXPR
)
9577 unsigned int bitalign
;
9578 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9579 *residue
/= BITS_PER_UNIT
;
9580 return bitalign
/ BITS_PER_UNIT
;
9582 else if (code
== POINTER_PLUS_EXPR
)
9585 unsigned HOST_WIDE_INT modulus
;
9586 enum tree_code inner_code
;
9588 op0
= TREE_OPERAND (expr
, 0);
9590 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9593 op1
= TREE_OPERAND (expr
, 1);
9595 inner_code
= TREE_CODE (op1
);
9596 if (inner_code
== INTEGER_CST
)
9598 *residue
+= TREE_INT_CST_LOW (op1
);
9601 else if (inner_code
== MULT_EXPR
)
9603 op1
= TREE_OPERAND (op1
, 1);
9604 if (TREE_CODE (op1
) == INTEGER_CST
)
9606 unsigned HOST_WIDE_INT align
;
9608 /* Compute the greatest power-of-2 divisor of op1. */
9609 align
= TREE_INT_CST_LOW (op1
);
9612 /* If align is non-zero and less than *modulus, replace
9613 *modulus with align., If align is 0, then either op1 is 0
9614 or the greatest power-of-2 divisor of op1 doesn't fit in an
9615 unsigned HOST_WIDE_INT. In either case, no additional
9616 constraint is imposed. */
9618 modulus
= MIN (modulus
, align
);
9625 /* If we get here, we were unable to determine anything useful about the
9630 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9631 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9634 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9636 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9638 if (TREE_CODE (arg
) == VECTOR_CST
)
9640 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9641 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9643 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9645 constructor_elt
*elt
;
9647 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9648 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9651 elts
[i
] = elt
->value
;
9655 for (; i
< nelts
; i
++)
9657 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9661 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9662 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9663 NULL_TREE otherwise. */
9666 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9668 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9670 bool need_ctor
= false;
9672 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9673 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9674 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9675 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9678 elts
= XALLOCAVEC (tree
, nelts
* 3);
9679 if (!vec_cst_ctor_to_array (arg0
, elts
)
9680 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9683 for (i
= 0; i
< nelts
; i
++)
9685 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9687 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9692 vec
<constructor_elt
, va_gc
> *v
;
9693 vec_alloc (v
, nelts
);
9694 for (i
= 0; i
< nelts
; i
++)
9695 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9696 return build_constructor (type
, v
);
9699 return build_vector (type
, &elts
[2 * nelts
]);
9702 /* Try to fold a pointer difference of type TYPE two address expressions of
9703 array references AREF0 and AREF1 using location LOC. Return a
9704 simplified expression for the difference or NULL_TREE. */
9707 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9708 tree aref0
, tree aref1
)
9710 tree base0
= TREE_OPERAND (aref0
, 0);
9711 tree base1
= TREE_OPERAND (aref1
, 0);
9712 tree base_offset
= build_int_cst (type
, 0);
9714 /* If the bases are array references as well, recurse. If the bases
9715 are pointer indirections compute the difference of the pointers.
9716 If the bases are equal, we are set. */
9717 if ((TREE_CODE (base0
) == ARRAY_REF
9718 && TREE_CODE (base1
) == ARRAY_REF
9720 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9721 || (INDIRECT_REF_P (base0
)
9722 && INDIRECT_REF_P (base1
)
9723 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9724 TREE_OPERAND (base0
, 0),
9725 TREE_OPERAND (base1
, 0))))
9726 || operand_equal_p (base0
, base1
, 0))
9728 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9729 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9730 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9731 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9732 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9734 fold_build2_loc (loc
, MULT_EXPR
, type
,
9740 /* If the real or vector real constant CST of type TYPE has an exact
9741 inverse, return it, else return NULL. */
9744 exact_inverse (tree type
, tree cst
)
9747 tree unit_type
, *elts
;
9748 enum machine_mode mode
;
9749 unsigned vec_nelts
, i
;
9751 switch (TREE_CODE (cst
))
9754 r
= TREE_REAL_CST (cst
);
9756 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9757 return build_real (type
, r
);
9762 vec_nelts
= VECTOR_CST_NELTS (cst
);
9763 elts
= XALLOCAVEC (tree
, vec_nelts
);
9764 unit_type
= TREE_TYPE (type
);
9765 mode
= TYPE_MODE (unit_type
);
9767 for (i
= 0; i
< vec_nelts
; i
++)
9769 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9770 if (!exact_real_inverse (mode
, &r
))
9772 elts
[i
] = build_real (unit_type
, r
);
9775 return build_vector (type
, elts
);
9782 /* Fold a binary expression of code CODE and type TYPE with operands
9783 OP0 and OP1. LOC is the location of the resulting expression.
9784 Return the folded expression if folding is successful. Otherwise,
9785 return NULL_TREE. */
9788 fold_binary_loc (location_t loc
,
9789 enum tree_code code
, tree type
, tree op0
, tree op1
)
9791 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9792 tree arg0
, arg1
, tem
;
9793 tree t1
= NULL_TREE
;
9794 bool strict_overflow_p
;
9796 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9797 && TREE_CODE_LENGTH (code
) == 2
9799 && op1
!= NULL_TREE
);
9804 /* Strip any conversions that don't change the mode. This is
9805 safe for every expression, except for a comparison expression
9806 because its signedness is derived from its operands. So, in
9807 the latter case, only strip conversions that don't change the
9808 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9811 Note that this is done as an internal manipulation within the
9812 constant folder, in order to find the simplest representation
9813 of the arguments so that their form can be studied. In any
9814 cases, the appropriate type conversions should be put back in
9815 the tree that will get out of the constant folder. */
9817 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9819 STRIP_SIGN_NOPS (arg0
);
9820 STRIP_SIGN_NOPS (arg1
);
9828 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9829 constant but we can't do arithmetic on them. */
9830 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9831 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9832 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9833 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9834 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9835 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9837 if (kind
== tcc_binary
)
9839 /* Make sure type and arg0 have the same saturating flag. */
9840 gcc_assert (TYPE_SATURATING (type
)
9841 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9842 tem
= const_binop (code
, arg0
, arg1
);
9844 else if (kind
== tcc_comparison
)
9845 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9849 if (tem
!= NULL_TREE
)
9851 if (TREE_TYPE (tem
) != type
)
9852 tem
= fold_convert_loc (loc
, type
, tem
);
9857 /* If this is a commutative operation, and ARG0 is a constant, move it
9858 to ARG1 to reduce the number of tests below. */
9859 if (commutative_tree_code (code
)
9860 && tree_swap_operands_p (arg0
, arg1
, true))
9861 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9863 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9865 First check for cases where an arithmetic operation is applied to a
9866 compound, conditional, or comparison operation. Push the arithmetic
9867 operation inside the compound or conditional to see if any folding
9868 can then be done. Convert comparison to conditional for this purpose.
9869 The also optimizes non-constant cases that used to be done in
9872 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9873 one of the operands is a comparison and the other is a comparison, a
9874 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9875 code below would make the expression more complex. Change it to a
9876 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9877 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9879 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9880 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9881 && TREE_CODE (type
) != VECTOR_TYPE
9882 && ((truth_value_p (TREE_CODE (arg0
))
9883 && (truth_value_p (TREE_CODE (arg1
))
9884 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9885 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9886 || (truth_value_p (TREE_CODE (arg1
))
9887 && (truth_value_p (TREE_CODE (arg0
))
9888 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9889 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9891 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9892 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9895 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9896 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9898 if (code
== EQ_EXPR
)
9899 tem
= invert_truthvalue_loc (loc
, tem
);
9901 return fold_convert_loc (loc
, type
, tem
);
9904 if (TREE_CODE_CLASS (code
) == tcc_binary
9905 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9907 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9909 tem
= fold_build2_loc (loc
, code
, type
,
9910 fold_convert_loc (loc
, TREE_TYPE (op0
),
9911 TREE_OPERAND (arg0
, 1)), op1
);
9912 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9915 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9916 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9918 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9919 fold_convert_loc (loc
, TREE_TYPE (op1
),
9920 TREE_OPERAND (arg1
, 1)));
9921 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9925 if (TREE_CODE (arg0
) == COND_EXPR
9926 || TREE_CODE (arg0
) == VEC_COND_EXPR
9927 || COMPARISON_CLASS_P (arg0
))
9929 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9931 /*cond_first_p=*/1);
9932 if (tem
!= NULL_TREE
)
9936 if (TREE_CODE (arg1
) == COND_EXPR
9937 || TREE_CODE (arg1
) == VEC_COND_EXPR
9938 || COMPARISON_CLASS_P (arg1
))
9940 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9942 /*cond_first_p=*/0);
9943 if (tem
!= NULL_TREE
)
9951 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9952 if (TREE_CODE (arg0
) == ADDR_EXPR
9953 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9955 tree iref
= TREE_OPERAND (arg0
, 0);
9956 return fold_build2 (MEM_REF
, type
,
9957 TREE_OPERAND (iref
, 0),
9958 int_const_binop (PLUS_EXPR
, arg1
,
9959 TREE_OPERAND (iref
, 1)));
9962 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9963 if (TREE_CODE (arg0
) == ADDR_EXPR
9964 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9967 HOST_WIDE_INT coffset
;
9968 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9972 return fold_build2 (MEM_REF
, type
,
9973 build_fold_addr_expr (base
),
9974 int_const_binop (PLUS_EXPR
, arg1
,
9975 size_int (coffset
)));
9980 case POINTER_PLUS_EXPR
:
9981 /* 0 +p index -> (type)index */
9982 if (integer_zerop (arg0
))
9983 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9985 /* PTR +p 0 -> PTR */
9986 if (integer_zerop (arg1
))
9987 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9989 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9990 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9991 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9992 return fold_convert_loc (loc
, type
,
9993 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9994 fold_convert_loc (loc
, sizetype
,
9996 fold_convert_loc (loc
, sizetype
,
9999 /* (PTR +p B) +p A -> PTR +p (B + A) */
10000 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10001 && !upc_shared_type_p (TREE_TYPE (type
)))
10004 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10005 tree arg00
= TREE_OPERAND (arg0
, 0);
10006 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10007 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10008 return fold_convert_loc (loc
, type
,
10009 fold_build_pointer_plus_loc (loc
,
10013 /* PTR_CST +p CST -> CST1 */
10014 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10015 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10016 fold_convert_loc (loc
, type
, arg1
));
10018 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10019 of the array. Loop optimizer sometimes produce this type of
10021 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10023 tem
= try_move_mult_to_index (loc
, arg0
,
10024 fold_convert_loc (loc
,
10027 return fold_convert_loc (loc
, type
, tem
);
10033 /* A + (-B) -> A - B */
10034 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10035 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10036 fold_convert_loc (loc
, type
, arg0
),
10037 fold_convert_loc (loc
, type
,
10038 TREE_OPERAND (arg1
, 0)));
10039 /* (-A) + B -> B - A */
10040 if (TREE_CODE (arg0
) == NEGATE_EXPR
10041 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10042 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10043 fold_convert_loc (loc
, type
, arg1
),
10044 fold_convert_loc (loc
, type
,
10045 TREE_OPERAND (arg0
, 0)));
10047 /* Disable further optimizations involving UPC shared pointers,
10048 because integers are not interoperable with shared pointers. */
10049 if ((TREE_TYPE (arg0
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
10050 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0
))))
10051 || (TREE_TYPE (arg1
) && POINTER_TYPE_P (TREE_TYPE (arg1
))
10052 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1
)))))
10055 if (INTEGRAL_TYPE_P (type
))
10057 /* Convert ~A + 1 to -A. */
10058 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10059 && integer_onep (arg1
))
10060 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10061 fold_convert_loc (loc
, type
,
10062 TREE_OPERAND (arg0
, 0)));
10064 /* ~X + X is -1. */
10065 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10066 && !TYPE_OVERFLOW_TRAPS (type
))
10068 tree tem
= TREE_OPERAND (arg0
, 0);
10071 if (operand_equal_p (tem
, arg1
, 0))
10073 t1
= build_int_cst_type (type
, -1);
10074 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10078 /* X + ~X is -1. */
10079 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10080 && !TYPE_OVERFLOW_TRAPS (type
))
10082 tree tem
= TREE_OPERAND (arg1
, 0);
10085 if (operand_equal_p (arg0
, tem
, 0))
10087 t1
= build_int_cst_type (type
, -1);
10088 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10092 /* X + (X / CST) * -CST is X % CST. */
10093 if (TREE_CODE (arg1
) == MULT_EXPR
10094 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10095 && operand_equal_p (arg0
,
10096 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10098 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10099 tree cst1
= TREE_OPERAND (arg1
, 1);
10100 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10102 if (sum
&& integer_zerop (sum
))
10103 return fold_convert_loc (loc
, type
,
10104 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10105 TREE_TYPE (arg0
), arg0
,
10110 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10111 one. Make sure the type is not saturating and has the signedness of
10112 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10113 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10114 if ((TREE_CODE (arg0
) == MULT_EXPR
10115 || TREE_CODE (arg1
) == MULT_EXPR
)
10116 && !TYPE_SATURATING (type
)
10117 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10118 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10119 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10121 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10126 if (! FLOAT_TYPE_P (type
))
10128 if (integer_zerop (arg1
))
10129 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10131 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10132 with a constant, and the two constants have no bits in common,
10133 we should treat this as a BIT_IOR_EXPR since this may produce more
10134 simplifications. */
10135 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10136 && TREE_CODE (arg1
) == BIT_AND_EXPR
10137 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10138 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10139 && integer_zerop (const_binop (BIT_AND_EXPR
,
10140 TREE_OPERAND (arg0
, 1),
10141 TREE_OPERAND (arg1
, 1))))
10143 code
= BIT_IOR_EXPR
;
10147 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10148 (plus (plus (mult) (mult)) (foo)) so that we can
10149 take advantage of the factoring cases below. */
10150 if (TYPE_OVERFLOW_WRAPS (type
)
10151 && (((TREE_CODE (arg0
) == PLUS_EXPR
10152 || TREE_CODE (arg0
) == MINUS_EXPR
)
10153 && TREE_CODE (arg1
) == MULT_EXPR
)
10154 || ((TREE_CODE (arg1
) == PLUS_EXPR
10155 || TREE_CODE (arg1
) == MINUS_EXPR
)
10156 && TREE_CODE (arg0
) == MULT_EXPR
)))
10158 tree parg0
, parg1
, parg
, marg
;
10159 enum tree_code pcode
;
10161 if (TREE_CODE (arg1
) == MULT_EXPR
)
10162 parg
= arg0
, marg
= arg1
;
10164 parg
= arg1
, marg
= arg0
;
10165 pcode
= TREE_CODE (parg
);
10166 parg0
= TREE_OPERAND (parg
, 0);
10167 parg1
= TREE_OPERAND (parg
, 1);
10168 STRIP_NOPS (parg0
);
10169 STRIP_NOPS (parg1
);
10171 if (TREE_CODE (parg0
) == MULT_EXPR
10172 && TREE_CODE (parg1
) != MULT_EXPR
)
10173 return fold_build2_loc (loc
, pcode
, type
,
10174 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10175 fold_convert_loc (loc
, type
,
10177 fold_convert_loc (loc
, type
,
10179 fold_convert_loc (loc
, type
, parg1
));
10180 if (TREE_CODE (parg0
) != MULT_EXPR
10181 && TREE_CODE (parg1
) == MULT_EXPR
)
10183 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10184 fold_convert_loc (loc
, type
, parg0
),
10185 fold_build2_loc (loc
, pcode
, type
,
10186 fold_convert_loc (loc
, type
, marg
),
10187 fold_convert_loc (loc
, type
,
10193 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10194 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10195 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10197 /* Likewise if the operands are reversed. */
10198 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10199 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10201 /* Convert X + -C into X - C. */
10202 if (TREE_CODE (arg1
) == REAL_CST
10203 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10205 tem
= fold_negate_const (arg1
, type
);
10206 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10207 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10208 fold_convert_loc (loc
, type
, arg0
),
10209 fold_convert_loc (loc
, type
, tem
));
10212 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10213 to __complex__ ( x, y ). This is not the same for SNaNs or
10214 if signed zeros are involved. */
10215 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10216 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10217 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10219 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10220 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10221 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10222 bool arg0rz
= false, arg0iz
= false;
10223 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10224 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10226 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10227 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10228 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10230 tree rp
= arg1r
? arg1r
10231 : build1 (REALPART_EXPR
, rtype
, arg1
);
10232 tree ip
= arg0i
? arg0i
10233 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10234 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10236 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10238 tree rp
= arg0r
? arg0r
10239 : build1 (REALPART_EXPR
, rtype
, arg0
);
10240 tree ip
= arg1i
? arg1i
10241 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10242 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10247 if (flag_unsafe_math_optimizations
10248 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10249 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10250 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10253 /* Convert x+x into x*2.0. */
10254 if (operand_equal_p (arg0
, arg1
, 0)
10255 && SCALAR_FLOAT_TYPE_P (type
))
10256 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10257 build_real (type
, dconst2
));
10259 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10260 We associate floats only if the user has specified
10261 -fassociative-math. */
10262 if (flag_associative_math
10263 && TREE_CODE (arg1
) == PLUS_EXPR
10264 && TREE_CODE (arg0
) != MULT_EXPR
)
10266 tree tree10
= TREE_OPERAND (arg1
, 0);
10267 tree tree11
= TREE_OPERAND (arg1
, 1);
10268 if (TREE_CODE (tree11
) == MULT_EXPR
10269 && TREE_CODE (tree10
) == MULT_EXPR
)
10272 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10273 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10276 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10277 We associate floats only if the user has specified
10278 -fassociative-math. */
10279 if (flag_associative_math
10280 && TREE_CODE (arg0
) == PLUS_EXPR
10281 && TREE_CODE (arg1
) != MULT_EXPR
)
10283 tree tree00
= TREE_OPERAND (arg0
, 0);
10284 tree tree01
= TREE_OPERAND (arg0
, 1);
10285 if (TREE_CODE (tree01
) == MULT_EXPR
10286 && TREE_CODE (tree00
) == MULT_EXPR
)
10289 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10290 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10296 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10297 is a rotate of A by C1 bits. */
10298 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10299 is a rotate of A by B bits. */
10301 enum tree_code code0
, code1
;
10303 code0
= TREE_CODE (arg0
);
10304 code1
= TREE_CODE (arg1
);
10305 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10306 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10307 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10308 TREE_OPERAND (arg1
, 0), 0)
10309 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10310 TYPE_UNSIGNED (rtype
))
10311 /* Only create rotates in complete modes. Other cases are not
10312 expanded properly. */
10313 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10315 tree tree01
, tree11
;
10316 enum tree_code code01
, code11
;
10318 tree01
= TREE_OPERAND (arg0
, 1);
10319 tree11
= TREE_OPERAND (arg1
, 1);
10320 STRIP_NOPS (tree01
);
10321 STRIP_NOPS (tree11
);
10322 code01
= TREE_CODE (tree01
);
10323 code11
= TREE_CODE (tree11
);
10324 if (code01
== INTEGER_CST
10325 && code11
== INTEGER_CST
10326 && TREE_INT_CST_HIGH (tree01
) == 0
10327 && TREE_INT_CST_HIGH (tree11
) == 0
10328 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10329 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10331 tem
= build2_loc (loc
, LROTATE_EXPR
,
10332 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10333 TREE_OPERAND (arg0
, 0),
10334 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10335 return fold_convert_loc (loc
, type
, tem
);
10337 else if (code11
== MINUS_EXPR
)
10339 tree tree110
, tree111
;
10340 tree110
= TREE_OPERAND (tree11
, 0);
10341 tree111
= TREE_OPERAND (tree11
, 1);
10342 STRIP_NOPS (tree110
);
10343 STRIP_NOPS (tree111
);
10344 if (TREE_CODE (tree110
) == INTEGER_CST
10345 && 0 == compare_tree_int (tree110
,
10347 (TREE_TYPE (TREE_OPERAND
10349 && operand_equal_p (tree01
, tree111
, 0))
10351 fold_convert_loc (loc
, type
,
10352 build2 ((code0
== LSHIFT_EXPR
10355 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10356 TREE_OPERAND (arg0
, 0), tree01
));
10358 else if (code01
== MINUS_EXPR
)
10360 tree tree010
, tree011
;
10361 tree010
= TREE_OPERAND (tree01
, 0);
10362 tree011
= TREE_OPERAND (tree01
, 1);
10363 STRIP_NOPS (tree010
);
10364 STRIP_NOPS (tree011
);
10365 if (TREE_CODE (tree010
) == INTEGER_CST
10366 && 0 == compare_tree_int (tree010
,
10368 (TREE_TYPE (TREE_OPERAND
10370 && operand_equal_p (tree11
, tree011
, 0))
10371 return fold_convert_loc
10373 build2 ((code0
!= LSHIFT_EXPR
10376 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10377 TREE_OPERAND (arg0
, 0), tree11
));
10383 /* In most languages, can't associate operations on floats through
10384 parentheses. Rather than remember where the parentheses were, we
10385 don't associate floats at all, unless the user has specified
10386 -fassociative-math.
10387 And, we need to make sure type is not saturating. */
10389 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10390 && !TYPE_SATURATING (type
))
10392 tree var0
, con0
, lit0
, minus_lit0
;
10393 tree var1
, con1
, lit1
, minus_lit1
;
10397 /* Split both trees into variables, constants, and literals. Then
10398 associate each group together, the constants with literals,
10399 then the result with variables. This increases the chances of
10400 literals being recombined later and of generating relocatable
10401 expressions for the sum of a constant and literal. */
10402 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10403 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10404 code
== MINUS_EXPR
);
10406 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10407 if (code
== MINUS_EXPR
)
10410 /* With undefined overflow prefer doing association in a type
10411 which wraps on overflow, if that is one of the operand types. */
10412 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10413 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10415 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10416 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10417 atype
= TREE_TYPE (arg0
);
10418 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10419 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10420 atype
= TREE_TYPE (arg1
);
10421 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10424 /* With undefined overflow we can only associate constants with one
10425 variable, and constants whose association doesn't overflow. */
10426 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10427 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10434 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10435 tmp0
= TREE_OPERAND (tmp0
, 0);
10436 if (CONVERT_EXPR_P (tmp0
)
10437 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10438 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10439 <= TYPE_PRECISION (atype
)))
10440 tmp0
= TREE_OPERAND (tmp0
, 0);
10441 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10442 tmp1
= TREE_OPERAND (tmp1
, 0);
10443 if (CONVERT_EXPR_P (tmp1
)
10444 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10445 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10446 <= TYPE_PRECISION (atype
)))
10447 tmp1
= TREE_OPERAND (tmp1
, 0);
10448 /* The only case we can still associate with two variables
10449 is if they are the same, modulo negation and bit-pattern
10450 preserving conversions. */
10451 if (!operand_equal_p (tmp0
, tmp1
, 0))
10456 /* Only do something if we found more than two objects. Otherwise,
10457 nothing has changed and we risk infinite recursion. */
10459 && (2 < ((var0
!= 0) + (var1
!= 0)
10460 + (con0
!= 0) + (con1
!= 0)
10461 + (lit0
!= 0) + (lit1
!= 0)
10462 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10464 bool any_overflows
= false;
10465 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10466 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10467 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10468 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10469 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10470 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10471 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10472 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10475 /* Preserve the MINUS_EXPR if the negative part of the literal is
10476 greater than the positive part. Otherwise, the multiplicative
10477 folding code (i.e extract_muldiv) may be fooled in case
10478 unsigned constants are subtracted, like in the following
10479 example: ((X*2 + 4) - 8U)/2. */
10480 if (minus_lit0
&& lit0
)
10482 if (TREE_CODE (lit0
) == INTEGER_CST
10483 && TREE_CODE (minus_lit0
) == INTEGER_CST
10484 && tree_int_cst_lt (lit0
, minus_lit0
))
10486 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10487 MINUS_EXPR
, atype
);
10492 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10493 MINUS_EXPR
, atype
);
10498 /* Don't introduce overflows through reassociation. */
10500 && ((lit0
&& TREE_OVERFLOW (lit0
))
10501 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10508 fold_convert_loc (loc
, type
,
10509 associate_trees (loc
, var0
, minus_lit0
,
10510 MINUS_EXPR
, atype
));
10513 con0
= associate_trees (loc
, con0
, minus_lit0
,
10514 MINUS_EXPR
, atype
);
10516 fold_convert_loc (loc
, type
,
10517 associate_trees (loc
, var0
, con0
,
10518 PLUS_EXPR
, atype
));
10522 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10524 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10532 /* Pointer simplifications for subtraction, simple reassociations. */
10533 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10535 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10536 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10537 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10539 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10540 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10541 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10542 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10543 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10544 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10546 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10549 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10550 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10552 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10553 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10554 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10555 fold_convert_loc (loc
, type
, arg1
));
10557 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10560 /* A - (-B) -> A + B */
10561 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10562 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10563 fold_convert_loc (loc
, type
,
10564 TREE_OPERAND (arg1
, 0)));
10566 /* Disable further optimizations involving UPC shared pointers,
10567 because integers are not interoperable with shared pointers.
10568 (The test below also detects pointer difference between
10569 shared pointers, which cannot be folded. */
10571 if (TREE_TYPE (arg0
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
10572 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0
))))
10575 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10576 if (TREE_CODE (arg0
) == NEGATE_EXPR
10577 && (FLOAT_TYPE_P (type
)
10578 || INTEGRAL_TYPE_P (type
))
10579 && negate_expr_p (arg1
)
10580 && reorder_operands_p (arg0
, arg1
))
10581 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10582 fold_convert_loc (loc
, type
,
10583 negate_expr (arg1
)),
10584 fold_convert_loc (loc
, type
,
10585 TREE_OPERAND (arg0
, 0)));
10586 /* Convert -A - 1 to ~A. */
10587 if (INTEGRAL_TYPE_P (type
)
10588 && TREE_CODE (arg0
) == NEGATE_EXPR
10589 && integer_onep (arg1
)
10590 && !TYPE_OVERFLOW_TRAPS (type
))
10591 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10592 fold_convert_loc (loc
, type
,
10593 TREE_OPERAND (arg0
, 0)));
10595 /* Convert -1 - A to ~A. */
10596 if (INTEGRAL_TYPE_P (type
)
10597 && integer_all_onesp (arg0
))
10598 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10601 /* X - (X / CST) * CST is X % CST. */
10602 if (INTEGRAL_TYPE_P (type
)
10603 && TREE_CODE (arg1
) == MULT_EXPR
10604 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10605 && operand_equal_p (arg0
,
10606 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10608 TREE_OPERAND (arg1
, 1), 0))
10610 fold_convert_loc (loc
, type
,
10611 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10612 arg0
, TREE_OPERAND (arg1
, 1)));
10614 if (! FLOAT_TYPE_P (type
))
10616 if (integer_zerop (arg0
))
10617 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10618 if (integer_zerop (arg1
))
10619 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10621 /* Fold A - (A & B) into ~B & A. */
10622 if (!TREE_SIDE_EFFECTS (arg0
)
10623 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10625 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10627 tree arg10
= fold_convert_loc (loc
, type
,
10628 TREE_OPERAND (arg1
, 0));
10629 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10630 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10632 fold_convert_loc (loc
, type
, arg0
));
10634 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10636 tree arg11
= fold_convert_loc (loc
,
10637 type
, TREE_OPERAND (arg1
, 1));
10638 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10639 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10641 fold_convert_loc (loc
, type
, arg0
));
10645 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10646 any power of 2 minus 1. */
10647 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10648 && TREE_CODE (arg1
) == BIT_AND_EXPR
10649 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10650 TREE_OPERAND (arg1
, 0), 0))
10652 tree mask0
= TREE_OPERAND (arg0
, 1);
10653 tree mask1
= TREE_OPERAND (arg1
, 1);
10654 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10656 if (operand_equal_p (tem
, mask1
, 0))
10658 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10659 TREE_OPERAND (arg0
, 0), mask1
);
10660 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10665 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10666 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10667 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10669 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10670 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10671 (-ARG1 + ARG0) reduces to -ARG1. */
10672 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10673 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10675 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10676 __complex__ ( x, -y ). This is not the same for SNaNs or if
10677 signed zeros are involved. */
10678 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10679 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10680 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10682 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10683 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10684 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10685 bool arg0rz
= false, arg0iz
= false;
10686 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10687 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10689 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10690 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10691 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10693 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10695 : build1 (REALPART_EXPR
, rtype
, arg1
));
10696 tree ip
= arg0i
? arg0i
10697 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10698 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10700 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10702 tree rp
= arg0r
? arg0r
10703 : build1 (REALPART_EXPR
, rtype
, arg0
);
10704 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10706 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10707 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10712 /* Fold &x - &x. This can happen from &x.foo - &x.
10713 This is unsafe for certain floats even in non-IEEE formats.
10714 In IEEE, it is unsafe because it does wrong for NaNs.
10715 Also note that operand_equal_p is always false if an operand
10718 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10719 && operand_equal_p (arg0
, arg1
, 0))
10720 return build_zero_cst (type
);
10722 /* A - B -> A + (-B) if B is easily negatable. */
10723 if (negate_expr_p (arg1
)
10724 && ((FLOAT_TYPE_P (type
)
10725 /* Avoid this transformation if B is a positive REAL_CST. */
10726 && (TREE_CODE (arg1
) != REAL_CST
10727 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10728 || INTEGRAL_TYPE_P (type
)))
10729 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10730 fold_convert_loc (loc
, type
, arg0
),
10731 fold_convert_loc (loc
, type
,
10732 negate_expr (arg1
)));
10734 /* Try folding difference of addresses. */
10736 HOST_WIDE_INT diff
;
10738 if ((TREE_CODE (arg0
) == ADDR_EXPR
10739 || TREE_CODE (arg1
) == ADDR_EXPR
)
10740 && ptr_difference_const (arg0
, arg1
, &diff
))
10741 return build_int_cst_type (type
, diff
);
10744 /* Fold &a[i] - &a[j] to i-j. */
10745 if (TREE_CODE (arg0
) == ADDR_EXPR
10746 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10747 && TREE_CODE (arg1
) == ADDR_EXPR
10748 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10750 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10751 TREE_OPERAND (arg0
, 0),
10752 TREE_OPERAND (arg1
, 0));
10757 if (FLOAT_TYPE_P (type
)
10758 && flag_unsafe_math_optimizations
10759 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10760 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10761 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10764 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10765 one. Make sure the type is not saturating and has the signedness of
10766 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10767 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10768 if ((TREE_CODE (arg0
) == MULT_EXPR
10769 || TREE_CODE (arg1
) == MULT_EXPR
)
10770 && !TYPE_SATURATING (type
)
10771 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10772 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10773 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10775 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10783 /* (-A) * (-B) -> A * B */
10784 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10785 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10786 fold_convert_loc (loc
, type
,
10787 TREE_OPERAND (arg0
, 0)),
10788 fold_convert_loc (loc
, type
,
10789 negate_expr (arg1
)));
10790 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10791 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10792 fold_convert_loc (loc
, type
,
10793 negate_expr (arg0
)),
10794 fold_convert_loc (loc
, type
,
10795 TREE_OPERAND (arg1
, 0)));
10797 if (! FLOAT_TYPE_P (type
))
10799 if (integer_zerop (arg1
))
10800 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10801 if (integer_onep (arg1
))
10802 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10803 /* Transform x * -1 into -x. Make sure to do the negation
10804 on the original operand with conversions not stripped
10805 because we can only strip non-sign-changing conversions. */
10806 if (integer_all_onesp (arg1
))
10807 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10808 /* Transform x * -C into -x * C if x is easily negatable. */
10809 if (TREE_CODE (arg1
) == INTEGER_CST
10810 && tree_int_cst_sgn (arg1
) == -1
10811 && negate_expr_p (arg0
)
10812 && (tem
= negate_expr (arg1
)) != arg1
10813 && !TREE_OVERFLOW (tem
))
10814 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10815 fold_convert_loc (loc
, type
,
10816 negate_expr (arg0
)),
10819 /* (a * (1 << b)) is (a << b) */
10820 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10821 && integer_onep (TREE_OPERAND (arg1
, 0)))
10822 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10823 TREE_OPERAND (arg1
, 1));
10824 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10825 && integer_onep (TREE_OPERAND (arg0
, 0)))
10826 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10827 TREE_OPERAND (arg0
, 1));
10829 /* (A + A) * C -> A * 2 * C */
10830 if (TREE_CODE (arg0
) == PLUS_EXPR
10831 && TREE_CODE (arg1
) == INTEGER_CST
10832 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10833 TREE_OPERAND (arg0
, 1), 0))
10834 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10835 omit_one_operand_loc (loc
, type
,
10836 TREE_OPERAND (arg0
, 0),
10837 TREE_OPERAND (arg0
, 1)),
10838 fold_build2_loc (loc
, MULT_EXPR
, type
,
10839 build_int_cst (type
, 2) , arg1
));
10841 strict_overflow_p
= false;
10842 if (TREE_CODE (arg1
) == INTEGER_CST
10843 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10844 &strict_overflow_p
)))
10846 if (strict_overflow_p
)
10847 fold_overflow_warning (("assuming signed overflow does not "
10848 "occur when simplifying "
10850 WARN_STRICT_OVERFLOW_MISC
);
10851 return fold_convert_loc (loc
, type
, tem
);
10854 /* Optimize z * conj(z) for integer complex numbers. */
10855 if (TREE_CODE (arg0
) == CONJ_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10857 return fold_mult_zconjz (loc
, type
, arg1
);
10858 if (TREE_CODE (arg1
) == CONJ_EXPR
10859 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10860 return fold_mult_zconjz (loc
, type
, arg0
);
10864 /* Maybe fold x * 0 to 0. The expressions aren't the same
10865 when x is NaN, since x * 0 is also NaN. Nor are they the
10866 same in modes with signed zeros, since multiplying a
10867 negative value by 0 gives -0, not +0. */
10868 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10869 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10870 && real_zerop (arg1
))
10871 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10872 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10873 Likewise for complex arithmetic with signed zeros. */
10874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10875 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10876 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10877 && real_onep (arg1
))
10878 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10880 /* Transform x * -1.0 into -x. */
10881 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10882 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10883 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10884 && real_minus_onep (arg1
))
10885 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10887 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10888 the result for floating point types due to rounding so it is applied
10889 only if -fassociative-math was specify. */
10890 if (flag_associative_math
10891 && TREE_CODE (arg0
) == RDIV_EXPR
10892 && TREE_CODE (arg1
) == REAL_CST
10893 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10895 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10898 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10899 TREE_OPERAND (arg0
, 1));
10902 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10903 if (operand_equal_p (arg0
, arg1
, 0))
10905 tree tem
= fold_strip_sign_ops (arg0
);
10906 if (tem
!= NULL_TREE
)
10908 tem
= fold_convert_loc (loc
, type
, tem
);
10909 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10913 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10914 This is not the same for NaNs or if signed zeros are
10916 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10917 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10918 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10919 && TREE_CODE (arg1
) == COMPLEX_CST
10920 && real_zerop (TREE_REALPART (arg1
)))
10922 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10923 if (real_onep (TREE_IMAGPART (arg1
)))
10925 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10926 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10928 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10929 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10931 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10932 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10933 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10937 /* Optimize z * conj(z) for floating point complex numbers.
10938 Guarded by flag_unsafe_math_optimizations as non-finite
10939 imaginary components don't produce scalar results. */
10940 if (flag_unsafe_math_optimizations
10941 && TREE_CODE (arg0
) == CONJ_EXPR
10942 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10943 return fold_mult_zconjz (loc
, type
, arg1
);
10944 if (flag_unsafe_math_optimizations
10945 && TREE_CODE (arg1
) == CONJ_EXPR
10946 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10947 return fold_mult_zconjz (loc
, type
, arg0
);
10949 if (flag_unsafe_math_optimizations
)
10951 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10952 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10954 /* Optimizations of root(...)*root(...). */
10955 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10958 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10959 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10961 /* Optimize sqrt(x)*sqrt(x) as x. */
10962 if (BUILTIN_SQRT_P (fcode0
)
10963 && operand_equal_p (arg00
, arg10
, 0)
10964 && ! HONOR_SNANS (TYPE_MODE (type
)))
10967 /* Optimize root(x)*root(y) as root(x*y). */
10968 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10969 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10970 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10973 /* Optimize expN(x)*expN(y) as expN(x+y). */
10974 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10976 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10977 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10978 CALL_EXPR_ARG (arg0
, 0),
10979 CALL_EXPR_ARG (arg1
, 0));
10980 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10983 /* Optimizations of pow(...)*pow(...). */
10984 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10985 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10986 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10988 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10989 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10990 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10991 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10993 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10994 if (operand_equal_p (arg01
, arg11
, 0))
10996 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10997 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10999 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11002 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11003 if (operand_equal_p (arg00
, arg10
, 0))
11005 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11006 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11008 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11012 /* Optimize tan(x)*cos(x) as sin(x). */
11013 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11014 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11015 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11016 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11017 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11018 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11019 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11020 CALL_EXPR_ARG (arg1
, 0), 0))
11022 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11024 if (sinfn
!= NULL_TREE
)
11025 return build_call_expr_loc (loc
, sinfn
, 1,
11026 CALL_EXPR_ARG (arg0
, 0));
11029 /* Optimize x*pow(x,c) as pow(x,c+1). */
11030 if (fcode1
== BUILT_IN_POW
11031 || fcode1
== BUILT_IN_POWF
11032 || fcode1
== BUILT_IN_POWL
)
11034 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11035 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11036 if (TREE_CODE (arg11
) == REAL_CST
11037 && !TREE_OVERFLOW (arg11
)
11038 && operand_equal_p (arg0
, arg10
, 0))
11040 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11044 c
= TREE_REAL_CST (arg11
);
11045 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11046 arg
= build_real (type
, c
);
11047 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11051 /* Optimize pow(x,c)*x as pow(x,c+1). */
11052 if (fcode0
== BUILT_IN_POW
11053 || fcode0
== BUILT_IN_POWF
11054 || fcode0
== BUILT_IN_POWL
)
11056 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11057 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11058 if (TREE_CODE (arg01
) == REAL_CST
11059 && !TREE_OVERFLOW (arg01
)
11060 && operand_equal_p (arg1
, arg00
, 0))
11062 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11066 c
= TREE_REAL_CST (arg01
);
11067 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11068 arg
= build_real (type
, c
);
11069 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11073 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11074 if (!in_gimple_form
11076 && operand_equal_p (arg0
, arg1
, 0))
11078 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11082 tree arg
= build_real (type
, dconst2
);
11083 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11092 if (integer_all_onesp (arg1
))
11093 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11094 if (integer_zerop (arg1
))
11095 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11096 if (operand_equal_p (arg0
, arg1
, 0))
11097 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11099 /* ~X | X is -1. */
11100 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11101 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11103 t1
= build_zero_cst (type
);
11104 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11105 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11108 /* X | ~X is -1. */
11109 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11110 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11112 t1
= build_zero_cst (type
);
11113 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11114 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11117 /* Canonicalize (X & C1) | C2. */
11118 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11119 && TREE_CODE (arg1
) == INTEGER_CST
11120 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11122 double_int c1
, c2
, c3
, msk
;
11123 int width
= TYPE_PRECISION (type
), w
;
11124 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11125 c2
= tree_to_double_int (arg1
);
11127 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11128 if ((c1
& c2
) == c1
)
11129 return omit_one_operand_loc (loc
, type
, arg1
,
11130 TREE_OPERAND (arg0
, 0));
11132 msk
= double_int::mask (width
);
11134 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11135 if (msk
.and_not (c1
| c2
).is_zero ())
11136 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11137 TREE_OPERAND (arg0
, 0), arg1
);
11139 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11140 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11141 mode which allows further optimizations. */
11144 c3
= c1
.and_not (c2
);
11145 for (w
= BITS_PER_UNIT
;
11146 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11149 unsigned HOST_WIDE_INT mask
11150 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11151 if (((c1
.low
| c2
.low
) & mask
) == mask
11152 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11154 c3
= double_int::from_uhwi (mask
);
11159 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11160 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11161 TREE_OPERAND (arg0
, 0),
11162 double_int_to_tree (type
,
11167 /* (X & Y) | Y is (X, Y). */
11168 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11169 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11170 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11171 /* (X & Y) | X is (Y, X). */
11172 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11173 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11174 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11175 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11176 /* X | (X & Y) is (Y, X). */
11177 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11178 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11179 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11180 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11181 /* X | (Y & X) is (Y, X). */
11182 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11183 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11184 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11185 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11187 /* (X & ~Y) | (~X & Y) is X ^ Y */
11188 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11189 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11191 tree a0
, a1
, l0
, l1
, n0
, n1
;
11193 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11194 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11196 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11197 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11199 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11200 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11202 if ((operand_equal_p (n0
, a0
, 0)
11203 && operand_equal_p (n1
, a1
, 0))
11204 || (operand_equal_p (n0
, a1
, 0)
11205 && operand_equal_p (n1
, a0
, 0)))
11206 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11209 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11210 if (t1
!= NULL_TREE
)
11213 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11215 This results in more efficient code for machines without a NAND
11216 instruction. Combine will canonicalize to the first form
11217 which will allow use of NAND instructions provided by the
11218 backend if they exist. */
11219 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11220 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11223 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11224 build2 (BIT_AND_EXPR
, type
,
11225 fold_convert_loc (loc
, type
,
11226 TREE_OPERAND (arg0
, 0)),
11227 fold_convert_loc (loc
, type
,
11228 TREE_OPERAND (arg1
, 0))));
11231 /* See if this can be simplified into a rotate first. If that
11232 is unsuccessful continue in the association code. */
11236 if (integer_zerop (arg1
))
11237 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11238 if (integer_all_onesp (arg1
))
11239 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11240 if (operand_equal_p (arg0
, arg1
, 0))
11241 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11243 /* ~X ^ X is -1. */
11244 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11245 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11247 t1
= build_zero_cst (type
);
11248 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11249 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11252 /* X ^ ~X is -1. */
11253 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11254 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11256 t1
= build_zero_cst (type
);
11257 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11258 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11261 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11262 with a constant, and the two constants have no bits in common,
11263 we should treat this as a BIT_IOR_EXPR since this may produce more
11264 simplifications. */
11265 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11266 && TREE_CODE (arg1
) == BIT_AND_EXPR
11267 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11268 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11269 && integer_zerop (const_binop (BIT_AND_EXPR
,
11270 TREE_OPERAND (arg0
, 1),
11271 TREE_OPERAND (arg1
, 1))))
11273 code
= BIT_IOR_EXPR
;
11277 /* (X | Y) ^ X -> Y & ~ X*/
11278 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11279 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11281 tree t2
= TREE_OPERAND (arg0
, 1);
11282 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11284 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11285 fold_convert_loc (loc
, type
, t2
),
11286 fold_convert_loc (loc
, type
, t1
));
11290 /* (Y | X) ^ X -> Y & ~ X*/
11291 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11292 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11294 tree t2
= TREE_OPERAND (arg0
, 0);
11295 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11297 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11298 fold_convert_loc (loc
, type
, t2
),
11299 fold_convert_loc (loc
, type
, t1
));
11303 /* X ^ (X | Y) -> Y & ~ X*/
11304 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11305 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11307 tree t2
= TREE_OPERAND (arg1
, 1);
11308 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11310 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11311 fold_convert_loc (loc
, type
, t2
),
11312 fold_convert_loc (loc
, type
, t1
));
11316 /* X ^ (Y | X) -> Y & ~ X*/
11317 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11318 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11320 tree t2
= TREE_OPERAND (arg1
, 0);
11321 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11323 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11324 fold_convert_loc (loc
, type
, t2
),
11325 fold_convert_loc (loc
, type
, t1
));
11329 /* Convert ~X ^ ~Y to X ^ Y. */
11330 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11331 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11332 return fold_build2_loc (loc
, code
, type
,
11333 fold_convert_loc (loc
, type
,
11334 TREE_OPERAND (arg0
, 0)),
11335 fold_convert_loc (loc
, type
,
11336 TREE_OPERAND (arg1
, 0)));
11338 /* Convert ~X ^ C to X ^ ~C. */
11339 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11340 && TREE_CODE (arg1
) == INTEGER_CST
)
11341 return fold_build2_loc (loc
, code
, type
,
11342 fold_convert_loc (loc
, type
,
11343 TREE_OPERAND (arg0
, 0)),
11344 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11346 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11347 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11348 && integer_onep (TREE_OPERAND (arg0
, 1))
11349 && integer_onep (arg1
))
11350 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11351 build_zero_cst (TREE_TYPE (arg0
)));
11353 /* Fold (X & Y) ^ Y as ~X & Y. */
11354 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11355 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11357 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11358 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11359 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11360 fold_convert_loc (loc
, type
, arg1
));
11362 /* Fold (X & Y) ^ X as ~Y & X. */
11363 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11364 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11365 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11367 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11368 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11369 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11370 fold_convert_loc (loc
, type
, arg1
));
11372 /* Fold X ^ (X & Y) as X & ~Y. */
11373 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11374 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11376 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11377 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11378 fold_convert_loc (loc
, type
, arg0
),
11379 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11381 /* Fold X ^ (Y & X) as ~Y & X. */
11382 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11383 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11384 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11386 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11387 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11388 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11389 fold_convert_loc (loc
, type
, arg0
));
11392 /* See if this can be simplified into a rotate first. If that
11393 is unsuccessful continue in the association code. */
11397 if (integer_all_onesp (arg1
))
11398 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11399 if (integer_zerop (arg1
))
11400 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11401 if (operand_equal_p (arg0
, arg1
, 0))
11402 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11404 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11405 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11406 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11407 || (TREE_CODE (arg0
) == EQ_EXPR
11408 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11409 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11410 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11412 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11413 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11414 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11415 || (TREE_CODE (arg1
) == EQ_EXPR
11416 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11417 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11418 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11420 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11421 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11422 && TREE_CODE (arg1
) == INTEGER_CST
11423 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11425 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11426 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11427 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11428 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11429 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11431 fold_convert_loc (loc
, type
,
11432 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11433 type
, tmp2
, tmp3
));
11436 /* (X | Y) & Y is (X, Y). */
11437 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11438 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11439 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11440 /* (X | Y) & X is (Y, X). */
11441 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11442 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11443 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11444 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11445 /* X & (X | Y) is (Y, X). */
11446 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11447 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11448 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11449 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11450 /* X & (Y | X) is (Y, X). */
11451 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11452 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11453 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11454 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11456 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11457 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11458 && integer_onep (TREE_OPERAND (arg0
, 1))
11459 && integer_onep (arg1
))
11462 tem
= TREE_OPERAND (arg0
, 0);
11463 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11464 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11466 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11467 build_zero_cst (TREE_TYPE (tem
)));
11469 /* Fold ~X & 1 as (X & 1) == 0. */
11470 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11471 && integer_onep (arg1
))
11474 tem
= TREE_OPERAND (arg0
, 0);
11475 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11476 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11478 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11479 build_zero_cst (TREE_TYPE (tem
)));
11481 /* Fold !X & 1 as X == 0. */
11482 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11483 && integer_onep (arg1
))
11485 tem
= TREE_OPERAND (arg0
, 0);
11486 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11487 build_zero_cst (TREE_TYPE (tem
)));
11490 /* Fold (X ^ Y) & Y as ~X & Y. */
11491 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11492 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11494 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11495 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11496 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11497 fold_convert_loc (loc
, type
, arg1
));
11499 /* Fold (X ^ Y) & X as ~Y & X. */
11500 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11501 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11502 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11504 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11505 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11506 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11507 fold_convert_loc (loc
, type
, arg1
));
11509 /* Fold X & (X ^ Y) as X & ~Y. */
11510 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11511 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11513 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11514 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11515 fold_convert_loc (loc
, type
, arg0
),
11516 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11518 /* Fold X & (Y ^ X) as ~Y & X. */
11519 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11520 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11521 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11523 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11524 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11525 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11526 fold_convert_loc (loc
, type
, arg0
));
11529 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11530 multiple of 1 << CST. */
11531 if (TREE_CODE (arg1
) == INTEGER_CST
)
11533 double_int cst1
= tree_to_double_int (arg1
);
11534 double_int ncst1
= (-cst1
).ext(TYPE_PRECISION (TREE_TYPE (arg1
)),
11535 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11536 if ((cst1
& ncst1
) == ncst1
11537 && multiple_of_p (type
, arg0
,
11538 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11539 return fold_convert_loc (loc
, type
, arg0
);
11542 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11544 if (TREE_CODE (arg1
) == INTEGER_CST
11545 && TREE_CODE (arg0
) == MULT_EXPR
11546 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11549 = tree_to_double_int (TREE_OPERAND (arg0
, 1)).trailing_zeros ();
11552 double_int arg1mask
, masked
;
11553 arg1mask
= ~double_int::mask (arg1tz
);
11554 arg1mask
= arg1mask
.ext (TYPE_PRECISION (type
),
11555 TYPE_UNSIGNED (type
));
11556 masked
= arg1mask
& tree_to_double_int (arg1
);
11557 if (masked
.is_zero ())
11558 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11560 else if (masked
!= tree_to_double_int (arg1
))
11561 return fold_build2_loc (loc
, code
, type
, op0
,
11562 double_int_to_tree (type
, masked
));
11566 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11567 ((A & N) + B) & M -> (A + B) & M
11568 Similarly if (N & M) == 0,
11569 ((A | N) + B) & M -> (A + B) & M
11570 and for - instead of + (or unary - instead of +)
11571 and/or ^ instead of |.
11572 If B is constant and (B & M) == 0, fold into A & M. */
11573 if (host_integerp (arg1
, 1))
11575 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11576 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11577 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11578 && (TREE_CODE (arg0
) == PLUS_EXPR
11579 || TREE_CODE (arg0
) == MINUS_EXPR
11580 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11581 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11582 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11586 unsigned HOST_WIDE_INT cst0
;
11588 /* Now we know that arg0 is (C + D) or (C - D) or
11589 -C and arg1 (M) is == (1LL << cst) - 1.
11590 Store C into PMOP[0] and D into PMOP[1]. */
11591 pmop
[0] = TREE_OPERAND (arg0
, 0);
11593 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11595 pmop
[1] = TREE_OPERAND (arg0
, 1);
11599 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11600 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11604 for (; which
>= 0; which
--)
11605 switch (TREE_CODE (pmop
[which
]))
11610 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11613 /* tree_low_cst not used, because we don't care about
11615 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11617 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11622 else if (cst0
!= 0)
11624 /* If C or D is of the form (A & N) where
11625 (N & M) == M, or of the form (A | N) or
11626 (A ^ N) where (N & M) == 0, replace it with A. */
11627 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11630 /* If C or D is a N where (N & M) == 0, it can be
11631 omitted (assumed 0). */
11632 if ((TREE_CODE (arg0
) == PLUS_EXPR
11633 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11634 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11635 pmop
[which
] = NULL
;
11641 /* Only build anything new if we optimized one or both arguments
11643 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11644 || (TREE_CODE (arg0
) != NEGATE_EXPR
11645 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11647 tree utype
= TREE_TYPE (arg0
);
11648 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11650 /* Perform the operations in a type that has defined
11651 overflow behavior. */
11652 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11653 if (pmop
[0] != NULL
)
11654 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11655 if (pmop
[1] != NULL
)
11656 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11659 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11660 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11661 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11663 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11664 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11666 else if (pmop
[0] != NULL
)
11668 else if (pmop
[1] != NULL
)
11671 return build_int_cst (type
, 0);
11673 else if (pmop
[0] == NULL
)
11674 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11676 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11678 /* TEM is now the new binary +, - or unary - replacement. */
11679 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11680 fold_convert_loc (loc
, utype
, arg1
));
11681 return fold_convert_loc (loc
, type
, tem
);
11686 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11687 if (t1
!= NULL_TREE
)
11689 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11690 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11691 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11694 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11696 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11697 && (~TREE_INT_CST_LOW (arg1
)
11698 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11700 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11703 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11705 This results in more efficient code for machines without a NOR
11706 instruction. Combine will canonicalize to the first form
11707 which will allow use of NOR instructions provided by the
11708 backend if they exist. */
11709 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11710 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11712 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11713 build2 (BIT_IOR_EXPR
, type
,
11714 fold_convert_loc (loc
, type
,
11715 TREE_OPERAND (arg0
, 0)),
11716 fold_convert_loc (loc
, type
,
11717 TREE_OPERAND (arg1
, 0))));
11720 /* If arg0 is derived from the address of an object or function, we may
11721 be able to fold this expression using the object or function's
11723 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11725 unsigned HOST_WIDE_INT modulus
, residue
;
11726 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11728 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11729 integer_onep (arg1
));
11731 /* This works because modulus is a power of 2. If this weren't the
11732 case, we'd have to replace it by its greatest power-of-2
11733 divisor: modulus & -modulus. */
11735 return build_int_cst (type
, residue
& low
);
11738 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11739 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11740 if the new mask might be further optimized. */
11741 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11742 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11743 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11744 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11745 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11746 < TYPE_PRECISION (TREE_TYPE (arg0
))
11747 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11748 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11750 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11751 unsigned HOST_WIDE_INT mask
11752 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11753 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11754 tree shift_type
= TREE_TYPE (arg0
);
11756 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11757 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11758 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11759 && TYPE_PRECISION (TREE_TYPE (arg0
))
11760 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11762 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11763 tree arg00
= TREE_OPERAND (arg0
, 0);
11764 /* See if more bits can be proven as zero because of
11766 if (TREE_CODE (arg00
) == NOP_EXPR
11767 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11769 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11770 if (TYPE_PRECISION (inner_type
)
11771 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11772 && TYPE_PRECISION (inner_type
) < prec
)
11774 prec
= TYPE_PRECISION (inner_type
);
11775 /* See if we can shorten the right shift. */
11777 shift_type
= inner_type
;
11780 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11781 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11782 zerobits
<<= prec
- shiftc
;
11783 /* For arithmetic shift if sign bit could be set, zerobits
11784 can contain actually sign bits, so no transformation is
11785 possible, unless MASK masks them all away. In that
11786 case the shift needs to be converted into logical shift. */
11787 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11788 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11790 if ((mask
& zerobits
) == 0)
11791 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11797 /* ((X << 16) & 0xff00) is (X, 0). */
11798 if ((mask
& zerobits
) == mask
)
11799 return omit_one_operand_loc (loc
, type
,
11800 build_int_cst (type
, 0), arg0
);
11802 newmask
= mask
| zerobits
;
11803 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11807 /* Only do the transformation if NEWMASK is some integer
11809 for (prec
= BITS_PER_UNIT
;
11810 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11811 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11813 if (prec
< HOST_BITS_PER_WIDE_INT
11814 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11818 if (shift_type
!= TREE_TYPE (arg0
))
11820 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11821 fold_convert_loc (loc
, shift_type
,
11822 TREE_OPERAND (arg0
, 0)),
11823 TREE_OPERAND (arg0
, 1));
11824 tem
= fold_convert_loc (loc
, type
, tem
);
11828 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11829 if (!tree_int_cst_equal (newmaskt
, arg1
))
11830 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11838 /* Don't touch a floating-point divide by zero unless the mode
11839 of the constant can represent infinity. */
11840 if (TREE_CODE (arg1
) == REAL_CST
11841 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11842 && real_zerop (arg1
))
11845 /* Optimize A / A to 1.0 if we don't care about
11846 NaNs or Infinities. Skip the transformation
11847 for non-real operands. */
11848 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11849 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11850 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11851 && operand_equal_p (arg0
, arg1
, 0))
11853 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11855 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11858 /* The complex version of the above A / A optimization. */
11859 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11860 && operand_equal_p (arg0
, arg1
, 0))
11862 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11863 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11864 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11866 tree r
= build_real (elem_type
, dconst1
);
11867 /* omit_two_operands will call fold_convert for us. */
11868 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11872 /* (-A) / (-B) -> A / B */
11873 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11874 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11875 TREE_OPERAND (arg0
, 0),
11876 negate_expr (arg1
));
11877 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11878 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11879 negate_expr (arg0
),
11880 TREE_OPERAND (arg1
, 0));
11882 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11883 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11884 && real_onep (arg1
))
11885 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11887 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11888 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11889 && real_minus_onep (arg1
))
11890 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11891 negate_expr (arg0
)));
11893 /* If ARG1 is a constant, we can convert this to a multiply by the
11894 reciprocal. This does not have the same rounding properties,
11895 so only do this if -freciprocal-math. We can actually
11896 always safely do it if ARG1 is a power of two, but it's hard to
11897 tell if it is or not in a portable manner. */
11899 && (TREE_CODE (arg1
) == REAL_CST
11900 || (TREE_CODE (arg1
) == COMPLEX_CST
11901 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11902 || (TREE_CODE (arg1
) == VECTOR_CST
11903 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11905 if (flag_reciprocal_math
11906 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11907 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11908 /* Find the reciprocal if optimizing and the result is exact.
11909 TODO: Complex reciprocal not implemented. */
11910 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11912 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11915 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11918 /* Convert A/B/C to A/(B*C). */
11919 if (flag_reciprocal_math
11920 && TREE_CODE (arg0
) == RDIV_EXPR
)
11921 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11922 fold_build2_loc (loc
, MULT_EXPR
, type
,
11923 TREE_OPERAND (arg0
, 1), arg1
));
11925 /* Convert A/(B/C) to (A/B)*C. */
11926 if (flag_reciprocal_math
11927 && TREE_CODE (arg1
) == RDIV_EXPR
)
11928 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11929 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11930 TREE_OPERAND (arg1
, 0)),
11931 TREE_OPERAND (arg1
, 1));
11933 /* Convert C1/(X*C2) into (C1/C2)/X. */
11934 if (flag_reciprocal_math
11935 && TREE_CODE (arg1
) == MULT_EXPR
11936 && TREE_CODE (arg0
) == REAL_CST
11937 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11939 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11940 TREE_OPERAND (arg1
, 1));
11942 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11943 TREE_OPERAND (arg1
, 0));
11946 if (flag_unsafe_math_optimizations
)
11948 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11949 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11951 /* Optimize sin(x)/cos(x) as tan(x). */
11952 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11953 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11954 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11955 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11956 CALL_EXPR_ARG (arg1
, 0), 0))
11958 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11960 if (tanfn
!= NULL_TREE
)
11961 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11964 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11965 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11966 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11967 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11968 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11969 CALL_EXPR_ARG (arg1
, 0), 0))
11971 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11973 if (tanfn
!= NULL_TREE
)
11975 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11976 CALL_EXPR_ARG (arg0
, 0));
11977 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11978 build_real (type
, dconst1
), tmp
);
11982 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11983 NaNs or Infinities. */
11984 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11985 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11986 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11988 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11989 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11991 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11992 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11993 && operand_equal_p (arg00
, arg01
, 0))
11995 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11997 if (cosfn
!= NULL_TREE
)
11998 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12002 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12003 NaNs or Infinities. */
12004 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12005 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12006 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12008 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12009 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12011 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12012 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12013 && operand_equal_p (arg00
, arg01
, 0))
12015 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12017 if (cosfn
!= NULL_TREE
)
12019 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12020 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12021 build_real (type
, dconst1
),
12027 /* Optimize pow(x,c)/x as pow(x,c-1). */
12028 if (fcode0
== BUILT_IN_POW
12029 || fcode0
== BUILT_IN_POWF
12030 || fcode0
== BUILT_IN_POWL
)
12032 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12033 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12034 if (TREE_CODE (arg01
) == REAL_CST
12035 && !TREE_OVERFLOW (arg01
)
12036 && operand_equal_p (arg1
, arg00
, 0))
12038 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12042 c
= TREE_REAL_CST (arg01
);
12043 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12044 arg
= build_real (type
, c
);
12045 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12049 /* Optimize a/root(b/c) into a*root(c/b). */
12050 if (BUILTIN_ROOT_P (fcode1
))
12052 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12054 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12056 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12057 tree b
= TREE_OPERAND (rootarg
, 0);
12058 tree c
= TREE_OPERAND (rootarg
, 1);
12060 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12062 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12063 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12067 /* Optimize x/expN(y) into x*expN(-y). */
12068 if (BUILTIN_EXPONENT_P (fcode1
))
12070 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12071 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12072 arg1
= build_call_expr_loc (loc
,
12074 fold_convert_loc (loc
, type
, arg
));
12075 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12078 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12079 if (fcode1
== BUILT_IN_POW
12080 || fcode1
== BUILT_IN_POWF
12081 || fcode1
== BUILT_IN_POWL
)
12083 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12084 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12085 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12086 tree neg11
= fold_convert_loc (loc
, type
,
12087 negate_expr (arg11
));
12088 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12089 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12094 case TRUNC_DIV_EXPR
:
12095 /* Optimize (X & (-A)) / A where A is a power of 2,
12097 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12098 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12099 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12101 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12102 arg1
, TREE_OPERAND (arg0
, 1));
12103 if (sum
&& integer_zerop (sum
)) {
12104 unsigned long pow2
;
12106 if (TREE_INT_CST_LOW (arg1
))
12107 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12109 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12110 + HOST_BITS_PER_WIDE_INT
;
12112 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12113 TREE_OPERAND (arg0
, 0),
12114 build_int_cst (integer_type_node
, pow2
));
12120 case FLOOR_DIV_EXPR
:
12121 /* Simplify A / (B << N) where A and B are positive and B is
12122 a power of 2, to A >> (N + log2(B)). */
12123 strict_overflow_p
= false;
12124 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12125 && (TYPE_UNSIGNED (type
)
12126 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12128 tree sval
= TREE_OPERAND (arg1
, 0);
12129 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12131 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12132 unsigned long pow2
;
12134 if (TREE_INT_CST_LOW (sval
))
12135 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12137 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12138 + HOST_BITS_PER_WIDE_INT
;
12140 if (strict_overflow_p
)
12141 fold_overflow_warning (("assuming signed overflow does not "
12142 "occur when simplifying A / (B << N)"),
12143 WARN_STRICT_OVERFLOW_MISC
);
12145 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12147 build_int_cst (TREE_TYPE (sh_cnt
),
12149 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12150 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12154 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12155 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12156 if (INTEGRAL_TYPE_P (type
)
12157 && TYPE_UNSIGNED (type
)
12158 && code
== FLOOR_DIV_EXPR
)
12159 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12163 case ROUND_DIV_EXPR
:
12164 case CEIL_DIV_EXPR
:
12165 case EXACT_DIV_EXPR
:
12166 if (integer_onep (arg1
))
12167 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12168 if (integer_zerop (arg1
))
12170 /* X / -1 is -X. */
12171 if (!TYPE_UNSIGNED (type
)
12172 && TREE_CODE (arg1
) == INTEGER_CST
12173 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12174 && TREE_INT_CST_HIGH (arg1
) == -1)
12175 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12177 /* Convert -A / -B to A / B when the type is signed and overflow is
12179 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12180 && TREE_CODE (arg0
) == NEGATE_EXPR
12181 && negate_expr_p (arg1
))
12183 if (INTEGRAL_TYPE_P (type
))
12184 fold_overflow_warning (("assuming signed overflow does not occur "
12185 "when distributing negation across "
12187 WARN_STRICT_OVERFLOW_MISC
);
12188 return fold_build2_loc (loc
, code
, type
,
12189 fold_convert_loc (loc
, type
,
12190 TREE_OPERAND (arg0
, 0)),
12191 fold_convert_loc (loc
, type
,
12192 negate_expr (arg1
)));
12194 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12195 && TREE_CODE (arg1
) == NEGATE_EXPR
12196 && negate_expr_p (arg0
))
12198 if (INTEGRAL_TYPE_P (type
))
12199 fold_overflow_warning (("assuming signed overflow does not occur "
12200 "when distributing negation across "
12202 WARN_STRICT_OVERFLOW_MISC
);
12203 return fold_build2_loc (loc
, code
, type
,
12204 fold_convert_loc (loc
, type
,
12205 negate_expr (arg0
)),
12206 fold_convert_loc (loc
, type
,
12207 TREE_OPERAND (arg1
, 0)));
12210 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12211 operation, EXACT_DIV_EXPR.
12213 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12214 At one time others generated faster code, it's not clear if they do
12215 after the last round to changes to the DIV code in expmed.c. */
12216 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12217 && multiple_of_p (type
, arg0
, arg1
))
12218 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12220 strict_overflow_p
= false;
12221 if (TREE_CODE (arg1
) == INTEGER_CST
12222 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12223 &strict_overflow_p
)))
12225 if (strict_overflow_p
)
12226 fold_overflow_warning (("assuming signed overflow does not occur "
12227 "when simplifying division"),
12228 WARN_STRICT_OVERFLOW_MISC
);
12229 return fold_convert_loc (loc
, type
, tem
);
12234 case CEIL_MOD_EXPR
:
12235 case FLOOR_MOD_EXPR
:
12236 case ROUND_MOD_EXPR
:
12237 case TRUNC_MOD_EXPR
:
12238 /* X % 1 is always zero, but be sure to preserve any side
12240 if (integer_onep (arg1
))
12241 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12243 /* X % 0, return X % 0 unchanged so that we can get the
12244 proper warnings and errors. */
12245 if (integer_zerop (arg1
))
12248 /* 0 % X is always zero, but be sure to preserve any side
12249 effects in X. Place this after checking for X == 0. */
12250 if (integer_zerop (arg0
))
12251 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12253 /* X % -1 is zero. */
12254 if (!TYPE_UNSIGNED (type
)
12255 && TREE_CODE (arg1
) == INTEGER_CST
12256 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12257 && TREE_INT_CST_HIGH (arg1
) == -1)
12258 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12260 /* X % -C is the same as X % C. */
12261 if (code
== TRUNC_MOD_EXPR
12262 && !TYPE_UNSIGNED (type
)
12263 && TREE_CODE (arg1
) == INTEGER_CST
12264 && !TREE_OVERFLOW (arg1
)
12265 && TREE_INT_CST_HIGH (arg1
) < 0
12266 && !TYPE_OVERFLOW_TRAPS (type
)
12267 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12268 && !sign_bit_p (arg1
, arg1
))
12269 return fold_build2_loc (loc
, code
, type
,
12270 fold_convert_loc (loc
, type
, arg0
),
12271 fold_convert_loc (loc
, type
,
12272 negate_expr (arg1
)));
12274 /* X % -Y is the same as X % Y. */
12275 if (code
== TRUNC_MOD_EXPR
12276 && !TYPE_UNSIGNED (type
)
12277 && TREE_CODE (arg1
) == NEGATE_EXPR
12278 && !TYPE_OVERFLOW_TRAPS (type
))
12279 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12280 fold_convert_loc (loc
, type
,
12281 TREE_OPERAND (arg1
, 0)));
12283 strict_overflow_p
= false;
12284 if (TREE_CODE (arg1
) == INTEGER_CST
12285 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12286 &strict_overflow_p
)))
12288 if (strict_overflow_p
)
12289 fold_overflow_warning (("assuming signed overflow does not occur "
12290 "when simplifying modulus"),
12291 WARN_STRICT_OVERFLOW_MISC
);
12292 return fold_convert_loc (loc
, type
, tem
);
12295 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12296 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12297 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12298 && (TYPE_UNSIGNED (type
)
12299 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12302 /* Also optimize A % (C << N) where C is a power of 2,
12303 to A & ((C << N) - 1). */
12304 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12305 c
= TREE_OPERAND (arg1
, 0);
12307 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12310 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12311 build_int_cst (TREE_TYPE (arg1
), 1));
12312 if (strict_overflow_p
)
12313 fold_overflow_warning (("assuming signed overflow does not "
12314 "occur when simplifying "
12315 "X % (power of two)"),
12316 WARN_STRICT_OVERFLOW_MISC
);
12317 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12318 fold_convert_loc (loc
, type
, arg0
),
12319 fold_convert_loc (loc
, type
, mask
));
12327 if (integer_all_onesp (arg0
))
12328 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12332 /* Optimize -1 >> x for arithmetic right shifts. */
12333 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12334 && tree_expr_nonnegative_p (arg1
))
12335 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12336 /* ... fall through ... */
12340 if (integer_zerop (arg1
))
12341 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12342 if (integer_zerop (arg0
))
12343 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12345 /* Since negative shift count is not well-defined,
12346 don't try to compute it in the compiler. */
12347 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12350 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12351 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12352 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12353 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12354 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12356 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12357 + TREE_INT_CST_LOW (arg1
));
12359 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12360 being well defined. */
12361 if (low
>= TYPE_PRECISION (type
))
12363 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12364 low
= low
% TYPE_PRECISION (type
);
12365 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12366 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12367 TREE_OPERAND (arg0
, 0));
12369 low
= TYPE_PRECISION (type
) - 1;
12372 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12373 build_int_cst (type
, low
));
12376 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12377 into x & ((unsigned)-1 >> c) for unsigned types. */
12378 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12379 || (TYPE_UNSIGNED (type
)
12380 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12381 && host_integerp (arg1
, false)
12382 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12383 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12384 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12386 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12387 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12393 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12395 lshift
= build_int_cst (type
, -1);
12396 lshift
= int_const_binop (code
, lshift
, arg1
);
12398 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12402 /* Rewrite an LROTATE_EXPR by a constant into an
12403 RROTATE_EXPR by a new constant. */
12404 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12406 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12407 TYPE_PRECISION (type
));
12408 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12409 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12412 /* If we have a rotate of a bit operation with the rotate count and
12413 the second operand of the bit operation both constant,
12414 permute the two operations. */
12415 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12416 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12417 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12418 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12419 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12420 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12421 fold_build2_loc (loc
, code
, type
,
12422 TREE_OPERAND (arg0
, 0), arg1
),
12423 fold_build2_loc (loc
, code
, type
,
12424 TREE_OPERAND (arg0
, 1), arg1
));
12426 /* Two consecutive rotates adding up to the precision of the
12427 type can be ignored. */
12428 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12429 && TREE_CODE (arg0
) == RROTATE_EXPR
12430 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12431 && TREE_INT_CST_HIGH (arg1
) == 0
12432 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12433 && ((TREE_INT_CST_LOW (arg1
)
12434 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12435 == (unsigned int) TYPE_PRECISION (type
)))
12436 return TREE_OPERAND (arg0
, 0);
12438 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12439 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12440 if the latter can be further optimized. */
12441 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12442 && TREE_CODE (arg0
) == BIT_AND_EXPR
12443 && TREE_CODE (arg1
) == INTEGER_CST
12444 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12446 tree mask
= fold_build2_loc (loc
, code
, type
,
12447 fold_convert_loc (loc
, type
,
12448 TREE_OPERAND (arg0
, 1)),
12450 tree shift
= fold_build2_loc (loc
, code
, type
,
12451 fold_convert_loc (loc
, type
,
12452 TREE_OPERAND (arg0
, 0)),
12454 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12462 if (operand_equal_p (arg0
, arg1
, 0))
12463 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12464 if (INTEGRAL_TYPE_P (type
)
12465 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12466 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12467 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12473 if (operand_equal_p (arg0
, arg1
, 0))
12474 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12475 if (INTEGRAL_TYPE_P (type
)
12476 && TYPE_MAX_VALUE (type
)
12477 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12478 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12479 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12484 case TRUTH_ANDIF_EXPR
:
12485 /* Note that the operands of this must be ints
12486 and their values must be 0 or 1.
12487 ("true" is a fixed value perhaps depending on the language.) */
12488 /* If first arg is constant zero, return it. */
12489 if (integer_zerop (arg0
))
12490 return fold_convert_loc (loc
, type
, arg0
);
12491 case TRUTH_AND_EXPR
:
12492 /* If either arg is constant true, drop it. */
12493 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12494 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12495 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12496 /* Preserve sequence points. */
12497 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12498 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12499 /* If second arg is constant zero, result is zero, but first arg
12500 must be evaluated. */
12501 if (integer_zerop (arg1
))
12502 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12503 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12504 case will be handled here. */
12505 if (integer_zerop (arg0
))
12506 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12508 /* !X && X is always false. */
12509 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12510 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12511 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12512 /* X && !X is always false. */
12513 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12514 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12515 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12517 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12518 means A >= Y && A != MAX, but in this case we know that
12521 if (!TREE_SIDE_EFFECTS (arg0
)
12522 && !TREE_SIDE_EFFECTS (arg1
))
12524 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12525 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12526 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12528 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12529 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12530 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12533 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12539 case TRUTH_ORIF_EXPR
:
12540 /* Note that the operands of this must be ints
12541 and their values must be 0 or true.
12542 ("true" is a fixed value perhaps depending on the language.) */
12543 /* If first arg is constant true, return it. */
12544 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12545 return fold_convert_loc (loc
, type
, arg0
);
12546 case TRUTH_OR_EXPR
:
12547 /* If either arg is constant zero, drop it. */
12548 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12549 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12550 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12551 /* Preserve sequence points. */
12552 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12553 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12554 /* If second arg is constant true, result is true, but we must
12555 evaluate first arg. */
12556 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12557 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12558 /* Likewise for first arg, but note this only occurs here for
12560 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12561 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12563 /* !X || X is always true. */
12564 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12565 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12566 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12567 /* X || !X is always true. */
12568 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12569 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12570 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12572 /* (X && !Y) || (!X && Y) is X ^ Y */
12573 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12574 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12576 tree a0
, a1
, l0
, l1
, n0
, n1
;
12578 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12579 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12581 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12582 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12584 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12585 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12587 if ((operand_equal_p (n0
, a0
, 0)
12588 && operand_equal_p (n1
, a1
, 0))
12589 || (operand_equal_p (n0
, a1
, 0)
12590 && operand_equal_p (n1
, a0
, 0)))
12591 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12594 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12600 case TRUTH_XOR_EXPR
:
12601 /* If the second arg is constant zero, drop it. */
12602 if (integer_zerop (arg1
))
12603 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12604 /* If the second arg is constant true, this is a logical inversion. */
12605 if (integer_onep (arg1
))
12607 /* Only call invert_truthvalue if operand is a truth value. */
12608 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12609 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12611 tem
= invert_truthvalue_loc (loc
, arg0
);
12612 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12614 /* Identical arguments cancel to zero. */
12615 if (operand_equal_p (arg0
, arg1
, 0))
12616 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12618 /* !X ^ X is always true. */
12619 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12620 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12621 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12623 /* X ^ !X is always true. */
12624 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12625 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12626 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12635 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12636 if (tem
!= NULL_TREE
)
12639 /* bool_var != 0 becomes bool_var. */
12640 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12641 && code
== NE_EXPR
)
12642 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12644 /* bool_var == 1 becomes bool_var. */
12645 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12646 && code
== EQ_EXPR
)
12647 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12649 /* bool_var != 1 becomes !bool_var. */
12650 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12651 && code
== NE_EXPR
)
12652 return fold_convert_loc (loc
, type
,
12653 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12654 TREE_TYPE (arg0
), arg0
));
12656 /* bool_var == 0 becomes !bool_var. */
12657 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12658 && code
== EQ_EXPR
)
12659 return fold_convert_loc (loc
, type
,
12660 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12661 TREE_TYPE (arg0
), arg0
));
12663 /* !exp != 0 becomes !exp */
12664 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12665 && code
== NE_EXPR
)
12666 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12668 /* If this is an equality comparison of the address of two non-weak,
12669 unaliased symbols neither of which are extern (since we do not
12670 have access to attributes for externs), then we know the result. */
12671 if (TREE_CODE (arg0
) == ADDR_EXPR
12672 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12673 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12674 && ! lookup_attribute ("alias",
12675 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12676 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12677 && TREE_CODE (arg1
) == ADDR_EXPR
12678 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12679 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12680 && ! lookup_attribute ("alias",
12681 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12682 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12684 /* We know that we're looking at the address of two
12685 non-weak, unaliased, static _DECL nodes.
12687 It is both wasteful and incorrect to call operand_equal_p
12688 to compare the two ADDR_EXPR nodes. It is wasteful in that
12689 all we need to do is test pointer equality for the arguments
12690 to the two ADDR_EXPR nodes. It is incorrect to use
12691 operand_equal_p as that function is NOT equivalent to a
12692 C equality test. It can in fact return false for two
12693 objects which would test as equal using the C equality
12695 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12696 return constant_boolean_node (equal
12697 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12701 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12702 a MINUS_EXPR of a constant, we can convert it into a comparison with
12703 a revised constant as long as no overflow occurs. */
12704 if (TREE_CODE (arg1
) == INTEGER_CST
12705 && (TREE_CODE (arg0
) == PLUS_EXPR
12706 || TREE_CODE (arg0
) == MINUS_EXPR
)
12707 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12708 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12709 ? MINUS_EXPR
: PLUS_EXPR
,
12710 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12712 TREE_OPERAND (arg0
, 1)))
12713 && !TREE_OVERFLOW (tem
))
12714 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12716 /* Similarly for a NEGATE_EXPR. */
12717 if (TREE_CODE (arg0
) == NEGATE_EXPR
12718 && TREE_CODE (arg1
) == INTEGER_CST
12719 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12721 && TREE_CODE (tem
) == INTEGER_CST
12722 && !TREE_OVERFLOW (tem
))
12723 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12725 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12726 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12727 && TREE_CODE (arg1
) == INTEGER_CST
12728 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12729 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12730 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12731 fold_convert_loc (loc
,
12734 TREE_OPERAND (arg0
, 1)));
12736 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12737 if ((TREE_CODE (arg0
) == PLUS_EXPR
12738 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12739 || TREE_CODE (arg0
) == MINUS_EXPR
)
12740 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12743 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12744 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12746 tree val
= TREE_OPERAND (arg0
, 1);
12747 return omit_two_operands_loc (loc
, type
,
12748 fold_build2_loc (loc
, code
, type
,
12750 build_int_cst (TREE_TYPE (val
),
12752 TREE_OPERAND (arg0
, 0), arg1
);
12755 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12756 if (TREE_CODE (arg0
) == MINUS_EXPR
12757 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12758 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12761 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12763 return omit_two_operands_loc (loc
, type
,
12765 ? boolean_true_node
: boolean_false_node
,
12766 TREE_OPERAND (arg0
, 1), arg1
);
12769 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12770 for !=. Don't do this for ordered comparisons due to overflow. */
12771 if (TREE_CODE (arg0
) == MINUS_EXPR
12772 && integer_zerop (arg1
))
12773 return fold_build2_loc (loc
, code
, type
,
12774 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12776 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12777 if (TREE_CODE (arg0
) == ABS_EXPR
12778 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12779 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12781 /* If this is an EQ or NE comparison with zero and ARG0 is
12782 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12783 two operations, but the latter can be done in one less insn
12784 on machines that have only two-operand insns or on which a
12785 constant cannot be the first operand. */
12786 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12787 && integer_zerop (arg1
))
12789 tree arg00
= TREE_OPERAND (arg0
, 0);
12790 tree arg01
= TREE_OPERAND (arg0
, 1);
12791 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12792 && integer_onep (TREE_OPERAND (arg00
, 0)))
12794 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12795 arg01
, TREE_OPERAND (arg00
, 1));
12796 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12797 build_int_cst (TREE_TYPE (arg0
), 1));
12798 return fold_build2_loc (loc
, code
, type
,
12799 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12802 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12803 && integer_onep (TREE_OPERAND (arg01
, 0)))
12805 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12806 arg00
, TREE_OPERAND (arg01
, 1));
12807 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12808 build_int_cst (TREE_TYPE (arg0
), 1));
12809 return fold_build2_loc (loc
, code
, type
,
12810 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12815 /* If this is an NE or EQ comparison of zero against the result of a
12816 signed MOD operation whose second operand is a power of 2, make
12817 the MOD operation unsigned since it is simpler and equivalent. */
12818 if (integer_zerop (arg1
)
12819 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12820 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12821 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12822 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12823 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12824 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12826 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12827 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12828 fold_convert_loc (loc
, newtype
,
12829 TREE_OPERAND (arg0
, 0)),
12830 fold_convert_loc (loc
, newtype
,
12831 TREE_OPERAND (arg0
, 1)));
12833 return fold_build2_loc (loc
, code
, type
, newmod
,
12834 fold_convert_loc (loc
, newtype
, arg1
));
12837 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12838 C1 is a valid shift constant, and C2 is a power of two, i.e.
12840 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12841 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12842 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12844 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12845 && integer_zerop (arg1
))
12847 tree itype
= TREE_TYPE (arg0
);
12848 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12849 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12851 /* Check for a valid shift count. */
12852 if (TREE_INT_CST_HIGH (arg001
) == 0
12853 && TREE_INT_CST_LOW (arg001
) < prec
)
12855 tree arg01
= TREE_OPERAND (arg0
, 1);
12856 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12857 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12858 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12859 can be rewritten as (X & (C2 << C1)) != 0. */
12860 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12862 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12863 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12864 return fold_build2_loc (loc
, code
, type
, tem
,
12865 fold_convert_loc (loc
, itype
, arg1
));
12867 /* Otherwise, for signed (arithmetic) shifts,
12868 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12869 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12870 else if (!TYPE_UNSIGNED (itype
))
12871 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12872 arg000
, build_int_cst (itype
, 0));
12873 /* Otherwise, of unsigned (logical) shifts,
12874 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12875 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12877 return omit_one_operand_loc (loc
, type
,
12878 code
== EQ_EXPR
? integer_one_node
12879 : integer_zero_node
,
12884 /* If we have (A & C) == C where C is a power of 2, convert this into
12885 (A & C) != 0. Similarly for NE_EXPR. */
12886 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12887 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12888 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12889 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12890 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12891 integer_zero_node
));
12893 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12894 bit, then fold the expression into A < 0 or A >= 0. */
12895 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12899 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12900 Similarly for NE_EXPR. */
12901 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12902 && TREE_CODE (arg1
) == INTEGER_CST
12903 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12905 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12906 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12907 TREE_OPERAND (arg0
, 1));
12909 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12910 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12912 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12913 if (integer_nonzerop (dandnotc
))
12914 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12917 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12918 Similarly for NE_EXPR. */
12919 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12920 && TREE_CODE (arg1
) == INTEGER_CST
12921 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12923 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12925 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12926 TREE_OPERAND (arg0
, 1),
12927 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12928 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12929 if (integer_nonzerop (candnotd
))
12930 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12933 /* If this is a comparison of a field, we may be able to simplify it. */
12934 if ((TREE_CODE (arg0
) == COMPONENT_REF
12935 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12936 /* Handle the constant case even without -O
12937 to make sure the warnings are given. */
12938 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12940 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12945 /* Optimize comparisons of strlen vs zero to a compare of the
12946 first character of the string vs zero. To wit,
12947 strlen(ptr) == 0 => *ptr == 0
12948 strlen(ptr) != 0 => *ptr != 0
12949 Other cases should reduce to one of these two (or a constant)
12950 due to the return value of strlen being unsigned. */
12951 if (TREE_CODE (arg0
) == CALL_EXPR
12952 && integer_zerop (arg1
))
12954 tree fndecl
= get_callee_fndecl (arg0
);
12957 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12958 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12959 && call_expr_nargs (arg0
) == 1
12960 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12962 tree iref
= build_fold_indirect_ref_loc (loc
,
12963 CALL_EXPR_ARG (arg0
, 0));
12964 return fold_build2_loc (loc
, code
, type
, iref
,
12965 build_int_cst (TREE_TYPE (iref
), 0));
12969 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12970 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12971 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12972 && integer_zerop (arg1
)
12973 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12975 tree arg00
= TREE_OPERAND (arg0
, 0);
12976 tree arg01
= TREE_OPERAND (arg0
, 1);
12977 tree itype
= TREE_TYPE (arg00
);
12978 if (TREE_INT_CST_HIGH (arg01
) == 0
12979 && TREE_INT_CST_LOW (arg01
)
12980 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12982 if (TYPE_UNSIGNED (itype
))
12984 itype
= signed_type_for (itype
);
12985 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12987 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12988 type
, arg00
, build_zero_cst (itype
));
12992 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12993 if (integer_zerop (arg1
)
12994 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12995 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12996 TREE_OPERAND (arg0
, 1));
12998 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12999 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13000 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13001 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13002 build_zero_cst (TREE_TYPE (arg0
)));
13003 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13004 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13005 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13006 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13007 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13008 build_zero_cst (TREE_TYPE (arg0
)));
13010 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13011 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13012 && TREE_CODE (arg1
) == INTEGER_CST
13013 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13014 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13015 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13016 TREE_OPERAND (arg0
, 1), arg1
));
13018 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13019 (X & C) == 0 when C is a single bit. */
13020 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13021 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13022 && integer_zerop (arg1
)
13023 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13025 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13026 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13027 TREE_OPERAND (arg0
, 1));
13028 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13030 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13034 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13035 constant C is a power of two, i.e. a single bit. */
13036 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13037 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13038 && integer_zerop (arg1
)
13039 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13041 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13043 tree arg00
= TREE_OPERAND (arg0
, 0);
13044 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13045 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13048 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13049 when is C is a power of two, i.e. a single bit. */
13050 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13051 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13052 && integer_zerop (arg1
)
13053 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13055 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13057 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13058 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13059 arg000
, TREE_OPERAND (arg0
, 1));
13060 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13061 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13064 if (integer_zerop (arg1
)
13065 && tree_expr_nonzero_p (arg0
))
13067 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13068 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13071 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13072 if (TREE_CODE (arg0
) == NEGATE_EXPR
13073 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13074 return fold_build2_loc (loc
, code
, type
,
13075 TREE_OPERAND (arg0
, 0),
13076 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13077 TREE_OPERAND (arg1
, 0)));
13079 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13080 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13081 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13083 tree arg00
= TREE_OPERAND (arg0
, 0);
13084 tree arg01
= TREE_OPERAND (arg0
, 1);
13085 tree arg10
= TREE_OPERAND (arg1
, 0);
13086 tree arg11
= TREE_OPERAND (arg1
, 1);
13087 tree itype
= TREE_TYPE (arg0
);
13089 if (operand_equal_p (arg01
, arg11
, 0))
13090 return fold_build2_loc (loc
, code
, type
,
13091 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13092 fold_build2_loc (loc
,
13093 BIT_XOR_EXPR
, itype
,
13096 build_zero_cst (itype
));
13098 if (operand_equal_p (arg01
, arg10
, 0))
13099 return fold_build2_loc (loc
, code
, type
,
13100 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13101 fold_build2_loc (loc
,
13102 BIT_XOR_EXPR
, itype
,
13105 build_zero_cst (itype
));
13107 if (operand_equal_p (arg00
, arg11
, 0))
13108 return fold_build2_loc (loc
, code
, type
,
13109 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13110 fold_build2_loc (loc
,
13111 BIT_XOR_EXPR
, itype
,
13114 build_zero_cst (itype
));
13116 if (operand_equal_p (arg00
, arg10
, 0))
13117 return fold_build2_loc (loc
, code
, type
,
13118 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13119 fold_build2_loc (loc
,
13120 BIT_XOR_EXPR
, itype
,
13123 build_zero_cst (itype
));
13126 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13127 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13129 tree arg00
= TREE_OPERAND (arg0
, 0);
13130 tree arg01
= TREE_OPERAND (arg0
, 1);
13131 tree arg10
= TREE_OPERAND (arg1
, 0);
13132 tree arg11
= TREE_OPERAND (arg1
, 1);
13133 tree itype
= TREE_TYPE (arg0
);
13135 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13136 operand_equal_p guarantees no side-effects so we don't need
13137 to use omit_one_operand on Z. */
13138 if (operand_equal_p (arg01
, arg11
, 0))
13139 return fold_build2_loc (loc
, code
, type
, arg00
,
13140 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13142 if (operand_equal_p (arg01
, arg10
, 0))
13143 return fold_build2_loc (loc
, code
, type
, arg00
,
13144 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13146 if (operand_equal_p (arg00
, arg11
, 0))
13147 return fold_build2_loc (loc
, code
, type
, arg01
,
13148 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13150 if (operand_equal_p (arg00
, arg10
, 0))
13151 return fold_build2_loc (loc
, code
, type
, arg01
,
13152 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13155 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13156 if (TREE_CODE (arg01
) == INTEGER_CST
13157 && TREE_CODE (arg11
) == INTEGER_CST
)
13159 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13160 fold_convert_loc (loc
, itype
, arg11
));
13161 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13162 return fold_build2_loc (loc
, code
, type
, tem
,
13163 fold_convert_loc (loc
, itype
, arg10
));
13167 /* Attempt to simplify equality/inequality comparisons of complex
13168 values. Only lower the comparison if the result is known or
13169 can be simplified to a single scalar comparison. */
13170 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13171 || TREE_CODE (arg0
) == COMPLEX_CST
)
13172 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13173 || TREE_CODE (arg1
) == COMPLEX_CST
))
13175 tree real0
, imag0
, real1
, imag1
;
13178 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13180 real0
= TREE_OPERAND (arg0
, 0);
13181 imag0
= TREE_OPERAND (arg0
, 1);
13185 real0
= TREE_REALPART (arg0
);
13186 imag0
= TREE_IMAGPART (arg0
);
13189 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13191 real1
= TREE_OPERAND (arg1
, 0);
13192 imag1
= TREE_OPERAND (arg1
, 1);
13196 real1
= TREE_REALPART (arg1
);
13197 imag1
= TREE_IMAGPART (arg1
);
13200 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13201 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13203 if (integer_zerop (rcond
))
13205 if (code
== EQ_EXPR
)
13206 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13208 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13212 if (code
== NE_EXPR
)
13213 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13215 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13219 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13220 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13222 if (integer_zerop (icond
))
13224 if (code
== EQ_EXPR
)
13225 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13227 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13231 if (code
== NE_EXPR
)
13232 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13234 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13245 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13246 if (tem
!= NULL_TREE
)
13249 /* Transform comparisons of the form X +- C CMP X. */
13250 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13251 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13252 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13253 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13254 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13255 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13257 tree arg01
= TREE_OPERAND (arg0
, 1);
13258 enum tree_code code0
= TREE_CODE (arg0
);
13261 if (TREE_CODE (arg01
) == REAL_CST
)
13262 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13264 is_positive
= tree_int_cst_sgn (arg01
);
13266 /* (X - c) > X becomes false. */
13267 if (code
== GT_EXPR
13268 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13269 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13271 if (TREE_CODE (arg01
) == INTEGER_CST
13272 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13273 fold_overflow_warning (("assuming signed overflow does not "
13274 "occur when assuming that (X - c) > X "
13275 "is always false"),
13276 WARN_STRICT_OVERFLOW_ALL
);
13277 return constant_boolean_node (0, type
);
13280 /* Likewise (X + c) < X becomes false. */
13281 if (code
== LT_EXPR
13282 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13283 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13285 if (TREE_CODE (arg01
) == INTEGER_CST
13286 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13287 fold_overflow_warning (("assuming signed overflow does not "
13288 "occur when assuming that "
13289 "(X + c) < X is always false"),
13290 WARN_STRICT_OVERFLOW_ALL
);
13291 return constant_boolean_node (0, type
);
13294 /* Convert (X - c) <= X to true. */
13295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13297 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13298 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13300 if (TREE_CODE (arg01
) == INTEGER_CST
13301 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13302 fold_overflow_warning (("assuming signed overflow does not "
13303 "occur when assuming that "
13304 "(X - c) <= X is always true"),
13305 WARN_STRICT_OVERFLOW_ALL
);
13306 return constant_boolean_node (1, type
);
13309 /* Convert (X + c) >= X to true. */
13310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13312 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13313 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13315 if (TREE_CODE (arg01
) == INTEGER_CST
13316 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13317 fold_overflow_warning (("assuming signed overflow does not "
13318 "occur when assuming that "
13319 "(X + c) >= X is always true"),
13320 WARN_STRICT_OVERFLOW_ALL
);
13321 return constant_boolean_node (1, type
);
13324 if (TREE_CODE (arg01
) == INTEGER_CST
)
13326 /* Convert X + c > X and X - c < X to true for integers. */
13327 if (code
== GT_EXPR
13328 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13329 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13331 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13332 fold_overflow_warning (("assuming signed overflow does "
13333 "not occur when assuming that "
13334 "(X + c) > X is always true"),
13335 WARN_STRICT_OVERFLOW_ALL
);
13336 return constant_boolean_node (1, type
);
13339 if (code
== LT_EXPR
13340 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13341 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13343 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13344 fold_overflow_warning (("assuming signed overflow does "
13345 "not occur when assuming that "
13346 "(X - c) < X is always true"),
13347 WARN_STRICT_OVERFLOW_ALL
);
13348 return constant_boolean_node (1, type
);
13351 /* Convert X + c <= X and X - c >= X to false for integers. */
13352 if (code
== LE_EXPR
13353 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13354 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13356 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13357 fold_overflow_warning (("assuming signed overflow does "
13358 "not occur when assuming that "
13359 "(X + c) <= X is always false"),
13360 WARN_STRICT_OVERFLOW_ALL
);
13361 return constant_boolean_node (0, type
);
13364 if (code
== GE_EXPR
13365 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13366 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13368 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13369 fold_overflow_warning (("assuming signed overflow does "
13370 "not occur when assuming that "
13371 "(X - c) >= X is always false"),
13372 WARN_STRICT_OVERFLOW_ALL
);
13373 return constant_boolean_node (0, type
);
13378 /* Comparisons with the highest or lowest possible integer of
13379 the specified precision will have known values. */
13381 tree arg1_type
= TREE_TYPE (arg1
);
13382 unsigned int width
= TYPE_PRECISION (arg1_type
);
13384 if (TREE_CODE (arg1
) == INTEGER_CST
13385 && width
<= HOST_BITS_PER_DOUBLE_INT
13386 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13388 HOST_WIDE_INT signed_max_hi
;
13389 unsigned HOST_WIDE_INT signed_max_lo
;
13390 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13392 if (width
<= HOST_BITS_PER_WIDE_INT
)
13394 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13399 if (TYPE_UNSIGNED (arg1_type
))
13401 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13407 max_lo
= signed_max_lo
;
13408 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13414 width
-= HOST_BITS_PER_WIDE_INT
;
13415 signed_max_lo
= -1;
13416 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13421 if (TYPE_UNSIGNED (arg1_type
))
13423 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13428 max_hi
= signed_max_hi
;
13429 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13433 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13434 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13438 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13441 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13444 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13447 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13449 /* The GE_EXPR and LT_EXPR cases above are not normally
13450 reached because of previous transformations. */
13455 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13457 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13461 arg1
= const_binop (PLUS_EXPR
, arg1
,
13462 build_int_cst (TREE_TYPE (arg1
), 1));
13463 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13464 fold_convert_loc (loc
,
13465 TREE_TYPE (arg1
), arg0
),
13468 arg1
= const_binop (PLUS_EXPR
, arg1
,
13469 build_int_cst (TREE_TYPE (arg1
), 1));
13470 return fold_build2_loc (loc
, NE_EXPR
, type
,
13471 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13477 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13479 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13483 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13486 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13489 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13492 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13497 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13499 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13503 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13504 return fold_build2_loc (loc
, NE_EXPR
, type
,
13505 fold_convert_loc (loc
,
13506 TREE_TYPE (arg1
), arg0
),
13509 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13510 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13511 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13518 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13519 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13520 && TYPE_UNSIGNED (arg1_type
)
13521 /* We will flip the signedness of the comparison operator
13522 associated with the mode of arg1, so the sign bit is
13523 specified by this mode. Check that arg1 is the signed
13524 max associated with this sign bit. */
13525 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13526 /* signed_type does not work on pointer types. */
13527 && INTEGRAL_TYPE_P (arg1_type
))
13529 /* The following case also applies to X < signed_max+1
13530 and X >= signed_max+1 because previous transformations. */
13531 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13534 st
= signed_type_for (TREE_TYPE (arg1
));
13535 return fold_build2_loc (loc
,
13536 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13537 type
, fold_convert_loc (loc
, st
, arg0
),
13538 build_int_cst (st
, 0));
13544 /* If we are comparing an ABS_EXPR with a constant, we can
13545 convert all the cases into explicit comparisons, but they may
13546 well not be faster than doing the ABS and one comparison.
13547 But ABS (X) <= C is a range comparison, which becomes a subtraction
13548 and a comparison, and is probably faster. */
13549 if (code
== LE_EXPR
13550 && TREE_CODE (arg1
) == INTEGER_CST
13551 && TREE_CODE (arg0
) == ABS_EXPR
13552 && ! TREE_SIDE_EFFECTS (arg0
)
13553 && (0 != (tem
= negate_expr (arg1
)))
13554 && TREE_CODE (tem
) == INTEGER_CST
13555 && !TREE_OVERFLOW (tem
))
13556 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13557 build2 (GE_EXPR
, type
,
13558 TREE_OPERAND (arg0
, 0), tem
),
13559 build2 (LE_EXPR
, type
,
13560 TREE_OPERAND (arg0
, 0), arg1
));
13562 /* Convert ABS_EXPR<x> >= 0 to true. */
13563 strict_overflow_p
= false;
13564 if (code
== GE_EXPR
13565 && (integer_zerop (arg1
)
13566 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13567 && real_zerop (arg1
)))
13568 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13570 if (strict_overflow_p
)
13571 fold_overflow_warning (("assuming signed overflow does not occur "
13572 "when simplifying comparison of "
13573 "absolute value and zero"),
13574 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13575 return omit_one_operand_loc (loc
, type
,
13576 constant_boolean_node (true, type
),
13580 /* Convert ABS_EXPR<x> < 0 to false. */
13581 strict_overflow_p
= false;
13582 if (code
== LT_EXPR
13583 && (integer_zerop (arg1
) || real_zerop (arg1
))
13584 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13586 if (strict_overflow_p
)
13587 fold_overflow_warning (("assuming signed overflow does not occur "
13588 "when simplifying comparison of "
13589 "absolute value and zero"),
13590 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13591 return omit_one_operand_loc (loc
, type
,
13592 constant_boolean_node (false, type
),
13596 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13597 and similarly for >= into !=. */
13598 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13599 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13600 && TREE_CODE (arg1
) == LSHIFT_EXPR
13601 && integer_onep (TREE_OPERAND (arg1
, 0)))
13602 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13603 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13604 TREE_OPERAND (arg1
, 1)),
13605 build_zero_cst (TREE_TYPE (arg0
)));
13607 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13608 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13609 && CONVERT_EXPR_P (arg1
)
13610 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13611 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13613 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13614 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13615 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13616 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13617 build_zero_cst (TREE_TYPE (arg0
)));
13622 case UNORDERED_EXPR
:
13630 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13632 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13633 if (t1
!= NULL_TREE
)
13637 /* If the first operand is NaN, the result is constant. */
13638 if (TREE_CODE (arg0
) == REAL_CST
13639 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13640 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13642 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13643 ? integer_zero_node
13644 : integer_one_node
;
13645 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13648 /* If the second operand is NaN, the result is constant. */
13649 if (TREE_CODE (arg1
) == REAL_CST
13650 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13651 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13653 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13654 ? integer_zero_node
13655 : integer_one_node
;
13656 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13659 /* Simplify unordered comparison of something with itself. */
13660 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13661 && operand_equal_p (arg0
, arg1
, 0))
13662 return constant_boolean_node (1, type
);
13664 if (code
== LTGT_EXPR
13665 && !flag_trapping_math
13666 && operand_equal_p (arg0
, arg1
, 0))
13667 return constant_boolean_node (0, type
);
13669 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13671 tree targ0
= strip_float_extensions (arg0
);
13672 tree targ1
= strip_float_extensions (arg1
);
13673 tree newtype
= TREE_TYPE (targ0
);
13675 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13676 newtype
= TREE_TYPE (targ1
);
13678 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13679 return fold_build2_loc (loc
, code
, type
,
13680 fold_convert_loc (loc
, newtype
, targ0
),
13681 fold_convert_loc (loc
, newtype
, targ1
));
13686 case COMPOUND_EXPR
:
13687 /* When pedantic, a compound expression can be neither an lvalue
13688 nor an integer constant expression. */
13689 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13691 /* Don't let (0, 0) be null pointer constant. */
13692 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13693 : fold_convert_loc (loc
, type
, arg1
);
13694 return pedantic_non_lvalue_loc (loc
, tem
);
13697 if ((TREE_CODE (arg0
) == REAL_CST
13698 && TREE_CODE (arg1
) == REAL_CST
)
13699 || (TREE_CODE (arg0
) == INTEGER_CST
13700 && TREE_CODE (arg1
) == INTEGER_CST
))
13701 return build_complex (type
, arg0
, arg1
);
13702 if (TREE_CODE (arg0
) == REALPART_EXPR
13703 && TREE_CODE (arg1
) == IMAGPART_EXPR
13704 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13705 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13706 TREE_OPERAND (arg1
, 0), 0))
13707 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13708 TREE_OPERAND (arg1
, 0));
13712 /* An ASSERT_EXPR should never be passed to fold_binary. */
13713 gcc_unreachable ();
13715 case VEC_PACK_TRUNC_EXPR
:
13716 case VEC_PACK_FIX_TRUNC_EXPR
:
13718 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13721 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13722 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13723 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13726 elts
= XALLOCAVEC (tree
, nelts
);
13727 if (!vec_cst_ctor_to_array (arg0
, elts
)
13728 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13731 for (i
= 0; i
< nelts
; i
++)
13733 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13734 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13735 TREE_TYPE (type
), elts
[i
]);
13736 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13740 return build_vector (type
, elts
);
13743 case VEC_WIDEN_MULT_LO_EXPR
:
13744 case VEC_WIDEN_MULT_HI_EXPR
:
13745 case VEC_WIDEN_MULT_EVEN_EXPR
:
13746 case VEC_WIDEN_MULT_ODD_EXPR
:
13748 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13749 unsigned int out
, ofs
, scale
;
13752 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13753 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13754 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13757 elts
= XALLOCAVEC (tree
, nelts
* 4);
13758 if (!vec_cst_ctor_to_array (arg0
, elts
)
13759 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13762 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13763 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13764 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13765 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13766 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13767 scale
= 1, ofs
= 0;
13768 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13769 scale
= 1, ofs
= 1;
13771 for (out
= 0; out
< nelts
; out
++)
13773 unsigned int in1
= (out
<< scale
) + ofs
;
13774 unsigned int in2
= in1
+ nelts
* 2;
13777 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13778 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13780 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13782 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13783 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13787 return build_vector (type
, elts
);
13792 } /* switch (code) */
13795 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13796 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13800 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13802 switch (TREE_CODE (*tp
))
13808 *walk_subtrees
= 0;
13810 /* ... fall through ... */
13817 /* Return whether the sub-tree ST contains a label which is accessible from
13818 outside the sub-tree. */
13821 contains_label_p (tree st
)
13824 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13827 /* Fold a ternary expression of code CODE and type TYPE with operands
13828 OP0, OP1, and OP2. Return the folded expression if folding is
13829 successful. Otherwise, return NULL_TREE. */
13832 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13833 tree op0
, tree op1
, tree op2
)
13836 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13837 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13839 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13840 && TREE_CODE_LENGTH (code
) == 3);
13842 /* Strip any conversions that don't change the mode. This is safe
13843 for every expression, except for a comparison expression because
13844 its signedness is derived from its operands. So, in the latter
13845 case, only strip conversions that don't change the signedness.
13847 Note that this is done as an internal manipulation within the
13848 constant folder, in order to find the simplest representation of
13849 the arguments so that their form can be studied. In any cases,
13850 the appropriate type conversions should be put back in the tree
13851 that will get out of the constant folder. */
13872 case COMPONENT_REF
:
13873 if (TREE_CODE (arg0
) == CONSTRUCTOR
13874 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13876 unsigned HOST_WIDE_INT idx
;
13878 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13885 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13886 so all simple results must be passed through pedantic_non_lvalue. */
13887 if (TREE_CODE (arg0
) == INTEGER_CST
)
13889 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13890 tem
= integer_zerop (arg0
) ? op2
: op1
;
13891 /* Only optimize constant conditions when the selected branch
13892 has the same type as the COND_EXPR. This avoids optimizing
13893 away "c ? x : throw", where the throw has a void type.
13894 Avoid throwing away that operand which contains label. */
13895 if ((!TREE_SIDE_EFFECTS (unused_op
)
13896 || !contains_label_p (unused_op
))
13897 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13898 || VOID_TYPE_P (type
)))
13899 return pedantic_non_lvalue_loc (loc
, tem
);
13902 if (operand_equal_p (arg1
, op2
, 0))
13903 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13905 /* If we have A op B ? A : C, we may be able to convert this to a
13906 simpler expression, depending on the operation and the values
13907 of B and C. Signed zeros prevent all of these transformations,
13908 for reasons given above each one.
13910 Also try swapping the arguments and inverting the conditional. */
13911 if (COMPARISON_CLASS_P (arg0
)
13912 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13913 arg1
, TREE_OPERAND (arg0
, 1))
13914 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13916 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13921 if (COMPARISON_CLASS_P (arg0
)
13922 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13924 TREE_OPERAND (arg0
, 1))
13925 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13927 location_t loc0
= expr_location_or (arg0
, loc
);
13928 tem
= fold_truth_not_expr (loc0
, arg0
);
13929 if (tem
&& COMPARISON_CLASS_P (tem
))
13931 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13937 /* If the second operand is simpler than the third, swap them
13938 since that produces better jump optimization results. */
13939 if (truth_value_p (TREE_CODE (arg0
))
13940 && tree_swap_operands_p (op1
, op2
, false))
13942 location_t loc0
= expr_location_or (arg0
, loc
);
13943 /* See if this can be inverted. If it can't, possibly because
13944 it was a floating-point inequality comparison, don't do
13946 tem
= fold_truth_not_expr (loc0
, arg0
);
13948 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13951 /* Convert A ? 1 : 0 to simply A. */
13952 if (integer_onep (op1
)
13953 && integer_zerop (op2
)
13954 /* If we try to convert OP0 to our type, the
13955 call to fold will try to move the conversion inside
13956 a COND, which will recurse. In that case, the COND_EXPR
13957 is probably the best choice, so leave it alone. */
13958 && type
== TREE_TYPE (arg0
))
13959 return pedantic_non_lvalue_loc (loc
, arg0
);
13961 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13962 over COND_EXPR in cases such as floating point comparisons. */
13963 if (integer_zerop (op1
)
13964 && integer_onep (op2
)
13965 && truth_value_p (TREE_CODE (arg0
)))
13966 return pedantic_non_lvalue_loc (loc
,
13967 fold_convert_loc (loc
, type
,
13968 invert_truthvalue_loc (loc
,
13971 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13972 if (TREE_CODE (arg0
) == LT_EXPR
13973 && integer_zerop (TREE_OPERAND (arg0
, 1))
13974 && integer_zerop (op2
)
13975 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13977 /* sign_bit_p only checks ARG1 bits within A's precision.
13978 If <sign bit of A> has wider type than A, bits outside
13979 of A's precision in <sign bit of A> need to be checked.
13980 If they are all 0, this optimization needs to be done
13981 in unsigned A's type, if they are all 1 in signed A's type,
13982 otherwise this can't be done. */
13983 if (TYPE_PRECISION (TREE_TYPE (tem
))
13984 < TYPE_PRECISION (TREE_TYPE (arg1
))
13985 && TYPE_PRECISION (TREE_TYPE (tem
))
13986 < TYPE_PRECISION (type
))
13988 unsigned HOST_WIDE_INT mask_lo
;
13989 HOST_WIDE_INT mask_hi
;
13990 int inner_width
, outer_width
;
13993 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13994 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13995 if (outer_width
> TYPE_PRECISION (type
))
13996 outer_width
= TYPE_PRECISION (type
);
13998 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14000 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
14001 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14007 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
14008 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14010 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14012 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
14013 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14017 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
14018 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14020 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14021 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14023 tem_type
= signed_type_for (TREE_TYPE (tem
));
14024 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14026 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14027 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14029 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14030 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14038 fold_convert_loc (loc
, type
,
14039 fold_build2_loc (loc
, BIT_AND_EXPR
,
14040 TREE_TYPE (tem
), tem
,
14041 fold_convert_loc (loc
,
14046 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14047 already handled above. */
14048 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14049 && integer_onep (TREE_OPERAND (arg0
, 1))
14050 && integer_zerop (op2
)
14051 && integer_pow2p (arg1
))
14053 tree tem
= TREE_OPERAND (arg0
, 0);
14055 if (TREE_CODE (tem
) == RSHIFT_EXPR
14056 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14057 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14058 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14059 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14060 TREE_OPERAND (tem
, 0), arg1
);
14063 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14064 is probably obsolete because the first operand should be a
14065 truth value (that's why we have the two cases above), but let's
14066 leave it in until we can confirm this for all front-ends. */
14067 if (integer_zerop (op2
)
14068 && TREE_CODE (arg0
) == NE_EXPR
14069 && integer_zerop (TREE_OPERAND (arg0
, 1))
14070 && integer_pow2p (arg1
)
14071 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14072 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14073 arg1
, OEP_ONLY_CONST
))
14074 return pedantic_non_lvalue_loc (loc
,
14075 fold_convert_loc (loc
, type
,
14076 TREE_OPERAND (arg0
, 0)));
14078 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14079 if (integer_zerop (op2
)
14080 && truth_value_p (TREE_CODE (arg0
))
14081 && truth_value_p (TREE_CODE (arg1
)))
14082 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14083 fold_convert_loc (loc
, type
, arg0
),
14086 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14087 if (integer_onep (op2
)
14088 && truth_value_p (TREE_CODE (arg0
))
14089 && truth_value_p (TREE_CODE (arg1
)))
14091 location_t loc0
= expr_location_or (arg0
, loc
);
14092 /* Only perform transformation if ARG0 is easily inverted. */
14093 tem
= fold_truth_not_expr (loc0
, arg0
);
14095 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14096 fold_convert_loc (loc
, type
, tem
),
14100 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14101 if (integer_zerop (arg1
)
14102 && truth_value_p (TREE_CODE (arg0
))
14103 && truth_value_p (TREE_CODE (op2
)))
14105 location_t loc0
= expr_location_or (arg0
, loc
);
14106 /* Only perform transformation if ARG0 is easily inverted. */
14107 tem
= fold_truth_not_expr (loc0
, arg0
);
14109 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14110 fold_convert_loc (loc
, type
, tem
),
14114 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14115 if (integer_onep (arg1
)
14116 && truth_value_p (TREE_CODE (arg0
))
14117 && truth_value_p (TREE_CODE (op2
)))
14118 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14119 fold_convert_loc (loc
, type
, arg0
),
14124 case VEC_COND_EXPR
:
14125 if (TREE_CODE (arg0
) == VECTOR_CST
)
14127 if (integer_all_onesp (arg0
) && !TREE_SIDE_EFFECTS (op2
))
14128 return pedantic_non_lvalue_loc (loc
, op1
);
14129 if (integer_zerop (arg0
) && !TREE_SIDE_EFFECTS (op1
))
14130 return pedantic_non_lvalue_loc (loc
, op2
);
14135 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14136 of fold_ternary on them. */
14137 gcc_unreachable ();
14139 case BIT_FIELD_REF
:
14140 if ((TREE_CODE (arg0
) == VECTOR_CST
14141 || (TREE_CODE (arg0
) == CONSTRUCTOR
14142 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14143 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14144 || (TREE_CODE (type
) == VECTOR_TYPE
14145 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14147 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14148 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14149 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14150 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14153 && (idx
% width
) == 0
14154 && (n
% width
) == 0
14155 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14160 if (TREE_CODE (arg0
) == VECTOR_CST
)
14163 return VECTOR_CST_ELT (arg0
, idx
);
14165 tree
*vals
= XALLOCAVEC (tree
, n
);
14166 for (unsigned i
= 0; i
< n
; ++i
)
14167 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14168 return build_vector (type
, vals
);
14171 /* Constructor elements can be subvectors. */
14172 unsigned HOST_WIDE_INT k
= 1;
14173 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14175 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14176 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14177 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14180 /* We keep an exact subset of the constructor elements. */
14181 if ((idx
% k
) == 0 && (n
% k
) == 0)
14183 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14184 return build_constructor (type
, NULL
);
14189 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14190 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14191 return build_zero_cst (type
);
14194 vec
<constructor_elt
, va_gc
> *vals
;
14195 vec_alloc (vals
, n
);
14196 for (unsigned i
= 0;
14197 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14199 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14201 (arg0
, idx
+ i
)->value
);
14202 return build_constructor (type
, vals
);
14204 /* The bitfield references a single constructor element. */
14205 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14207 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14208 return build_zero_cst (type
);
14210 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14212 return fold_build3_loc (loc
, code
, type
,
14213 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14214 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14219 /* A bit-field-ref that referenced the full argument can be stripped. */
14220 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14221 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14222 && integer_zerop (op2
))
14223 return fold_convert_loc (loc
, type
, arg0
);
14225 /* On constants we can use native encode/interpret to constant
14226 fold (nearly) all BIT_FIELD_REFs. */
14227 if (CONSTANT_CLASS_P (arg0
)
14228 && can_native_interpret_type_p (type
)
14229 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14230 /* This limitation should not be necessary, we just need to
14231 round this up to mode size. */
14232 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14233 /* Need bit-shifting of the buffer to relax the following. */
14234 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14236 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14237 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14238 unsigned HOST_WIDE_INT clen
;
14239 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14240 /* ??? We cannot tell native_encode_expr to start at
14241 some random byte only. So limit us to a reasonable amount
14245 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14246 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14248 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14250 tree v
= native_interpret_expr (type
,
14251 b
+ bitpos
/ BITS_PER_UNIT
,
14252 bitsize
/ BITS_PER_UNIT
);
14262 /* For integers we can decompose the FMA if possible. */
14263 if (TREE_CODE (arg0
) == INTEGER_CST
14264 && TREE_CODE (arg1
) == INTEGER_CST
)
14265 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14266 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14267 if (integer_zerop (arg2
))
14268 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14270 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14272 case VEC_PERM_EXPR
:
14273 if (TREE_CODE (arg2
) == VECTOR_CST
)
14275 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14276 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14278 bool need_mask_canon
= false;
14279 bool all_in_vec0
= true;
14280 bool all_in_vec1
= true;
14281 bool maybe_identity
= true;
14282 bool single_arg
= (op0
== op1
);
14283 bool changed
= false;
14285 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14286 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14287 for (i
= 0; i
< nelts
; i
++)
14289 tree val
= VECTOR_CST_ELT (arg2
, i
);
14290 if (TREE_CODE (val
) != INTEGER_CST
)
14293 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14294 if (TREE_INT_CST_HIGH (val
)
14295 || ((unsigned HOST_WIDE_INT
)
14296 TREE_INT_CST_LOW (val
) != sel
[i
]))
14297 need_mask_canon
= true;
14299 if (sel
[i
] < nelts
)
14300 all_in_vec1
= false;
14302 all_in_vec0
= false;
14304 if ((sel
[i
] & (nelts
-1)) != i
)
14305 maybe_identity
= false;
14308 if (maybe_identity
)
14318 else if (all_in_vec1
)
14321 for (i
= 0; i
< nelts
; i
++)
14323 need_mask_canon
= true;
14326 if ((TREE_CODE (op0
) == VECTOR_CST
14327 || TREE_CODE (op0
) == CONSTRUCTOR
)
14328 && (TREE_CODE (op1
) == VECTOR_CST
14329 || TREE_CODE (op1
) == CONSTRUCTOR
))
14331 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14332 if (t
!= NULL_TREE
)
14336 if (op0
== op1
&& !single_arg
)
14339 if (need_mask_canon
&& arg2
== op2
)
14341 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14342 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14343 for (i
= 0; i
< nelts
; i
++)
14344 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14345 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14350 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14356 } /* switch (code) */
14359 /* Perform constant folding and related simplification of EXPR.
14360 The related simplifications include x*1 => x, x*0 => 0, etc.,
14361 and application of the associative law.
14362 NOP_EXPR conversions may be removed freely (as long as we
14363 are careful not to change the type of the overall expression).
14364 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14365 but we can constant-fold them if they have constant operands. */
14367 #ifdef ENABLE_FOLD_CHECKING
14368 # define fold(x) fold_1 (x)
14369 static tree
fold_1 (tree
);
14375 const tree t
= expr
;
14376 enum tree_code code
= TREE_CODE (t
);
14377 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14379 location_t loc
= EXPR_LOCATION (expr
);
14381 /* Return right away if a constant. */
14382 if (kind
== tcc_constant
)
14385 /* CALL_EXPR-like objects with variable numbers of operands are
14386 treated specially. */
14387 if (kind
== tcc_vl_exp
)
14389 if (code
== CALL_EXPR
)
14391 tem
= fold_call_expr (loc
, expr
, false);
14392 return tem
? tem
: expr
;
14397 if (IS_EXPR_CODE_CLASS (kind
))
14399 tree type
= TREE_TYPE (t
);
14400 tree op0
, op1
, op2
;
14402 switch (TREE_CODE_LENGTH (code
))
14405 op0
= TREE_OPERAND (t
, 0);
14406 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14407 return tem
? tem
: expr
;
14409 op0
= TREE_OPERAND (t
, 0);
14410 op1
= TREE_OPERAND (t
, 1);
14411 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14412 return tem
? tem
: expr
;
14414 op0
= TREE_OPERAND (t
, 0);
14415 op1
= TREE_OPERAND (t
, 1);
14416 op2
= TREE_OPERAND (t
, 2);
14417 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14418 return tem
? tem
: expr
;
14428 tree op0
= TREE_OPERAND (t
, 0);
14429 tree op1
= TREE_OPERAND (t
, 1);
14431 if (TREE_CODE (op1
) == INTEGER_CST
14432 && TREE_CODE (op0
) == CONSTRUCTOR
14433 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14435 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14436 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14437 unsigned HOST_WIDE_INT begin
= 0;
14439 /* Find a matching index by means of a binary search. */
14440 while (begin
!= end
)
14442 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14443 tree index
= (*elts
)[middle
].index
;
14445 if (TREE_CODE (index
) == INTEGER_CST
14446 && tree_int_cst_lt (index
, op1
))
14447 begin
= middle
+ 1;
14448 else if (TREE_CODE (index
) == INTEGER_CST
14449 && tree_int_cst_lt (op1
, index
))
14451 else if (TREE_CODE (index
) == RANGE_EXPR
14452 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14453 begin
= middle
+ 1;
14454 else if (TREE_CODE (index
) == RANGE_EXPR
14455 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14458 return (*elts
)[middle
].value
;
14465 /* Return a VECTOR_CST if possible. */
14468 tree type
= TREE_TYPE (t
);
14469 if (TREE_CODE (type
) != VECTOR_TYPE
)
14472 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14473 unsigned HOST_WIDE_INT idx
, pos
= 0;
14476 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14478 if (!CONSTANT_CLASS_P (value
))
14480 if (TREE_CODE (value
) == VECTOR_CST
)
14482 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14483 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14486 vec
[pos
++] = value
;
14488 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14489 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14491 return build_vector (type
, vec
);
14495 return fold (DECL_INITIAL (t
));
14499 } /* switch (code) */
14502 #ifdef ENABLE_FOLD_CHECKING
14505 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14506 hash_table
<pointer_hash
<tree_node
> >);
14507 static void fold_check_failed (const_tree
, const_tree
);
14508 void print_fold_checksum (const_tree
);
14510 /* When --enable-checking=fold, compute a digest of expr before
14511 and after actual fold call to see if fold did not accidentally
14512 change original expr. */
14518 struct md5_ctx ctx
;
14519 unsigned char checksum_before
[16], checksum_after
[16];
14520 hash_table
<pointer_hash
<tree_node
> > ht
;
14523 md5_init_ctx (&ctx
);
14524 fold_checksum_tree (expr
, &ctx
, ht
);
14525 md5_finish_ctx (&ctx
, checksum_before
);
14528 ret
= fold_1 (expr
);
14530 md5_init_ctx (&ctx
);
14531 fold_checksum_tree (expr
, &ctx
, ht
);
14532 md5_finish_ctx (&ctx
, checksum_after
);
14535 if (memcmp (checksum_before
, checksum_after
, 16))
14536 fold_check_failed (expr
, ret
);
14542 print_fold_checksum (const_tree expr
)
14544 struct md5_ctx ctx
;
14545 unsigned char checksum
[16], cnt
;
14546 hash_table
<pointer_hash
<tree_node
> > ht
;
14549 md5_init_ctx (&ctx
);
14550 fold_checksum_tree (expr
, &ctx
, ht
);
14551 md5_finish_ctx (&ctx
, checksum
);
14553 for (cnt
= 0; cnt
< 16; ++cnt
)
14554 fprintf (stderr
, "%02x", checksum
[cnt
]);
14555 putc ('\n', stderr
);
14559 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14561 internal_error ("fold check: original tree changed by fold");
14565 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14566 hash_table
<pointer_hash
<tree_node
> > ht
)
14569 enum tree_code code
;
14570 union tree_node buf
;
14576 slot
= ht
.find_slot (expr
, INSERT
);
14579 *slot
= CONST_CAST_TREE (expr
);
14580 code
= TREE_CODE (expr
);
14581 if (TREE_CODE_CLASS (code
) == tcc_declaration
14582 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14584 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14585 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14586 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14587 expr
= (tree
) &buf
;
14589 else if (TREE_CODE_CLASS (code
) == tcc_type
14590 && (TYPE_POINTER_TO (expr
)
14591 || TYPE_REFERENCE_TO (expr
)
14592 || TYPE_CACHED_VALUES_P (expr
)
14593 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14594 || TYPE_NEXT_VARIANT (expr
)))
14596 /* Allow these fields to be modified. */
14598 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14599 expr
= tmp
= (tree
) &buf
;
14600 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14601 TYPE_POINTER_TO (tmp
) = NULL
;
14602 TYPE_REFERENCE_TO (tmp
) = NULL
;
14603 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14604 if (TYPE_CACHED_VALUES_P (tmp
))
14606 TYPE_CACHED_VALUES_P (tmp
) = 0;
14607 TYPE_CACHED_VALUES (tmp
) = NULL
;
14610 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14611 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14612 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14613 if (TREE_CODE_CLASS (code
) != tcc_type
14614 && TREE_CODE_CLASS (code
) != tcc_declaration
14615 && code
!= TREE_LIST
14616 && code
!= SSA_NAME
14617 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14618 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14619 switch (TREE_CODE_CLASS (code
))
14625 md5_process_bytes (TREE_STRING_POINTER (expr
),
14626 TREE_STRING_LENGTH (expr
), ctx
);
14629 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14630 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14633 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14634 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14640 case tcc_exceptional
:
14644 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14645 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14646 expr
= TREE_CHAIN (expr
);
14647 goto recursive_label
;
14650 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14651 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14657 case tcc_expression
:
14658 case tcc_reference
:
14659 case tcc_comparison
:
14662 case tcc_statement
:
14664 len
= TREE_OPERAND_LENGTH (expr
);
14665 for (i
= 0; i
< len
; ++i
)
14666 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14668 case tcc_declaration
:
14669 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14670 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14671 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14673 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14674 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14675 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14676 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14677 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14679 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14680 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14682 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14684 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14685 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14686 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14690 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14691 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14692 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14693 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14694 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14695 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14696 if (INTEGRAL_TYPE_P (expr
)
14697 || SCALAR_FLOAT_TYPE_P (expr
))
14699 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14700 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14702 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14703 if (TREE_CODE (expr
) == RECORD_TYPE
14704 || TREE_CODE (expr
) == UNION_TYPE
14705 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14706 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14707 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14714 /* Helper function for outputting the checksum of a tree T. When
14715 debugging with gdb, you can "define mynext" to be "next" followed
14716 by "call debug_fold_checksum (op0)", then just trace down till the
14719 DEBUG_FUNCTION
void
14720 debug_fold_checksum (const_tree t
)
14723 unsigned char checksum
[16];
14724 struct md5_ctx ctx
;
14725 hash_table
<pointer_hash
<tree_node
> > ht
;
14728 md5_init_ctx (&ctx
);
14729 fold_checksum_tree (t
, &ctx
, ht
);
14730 md5_finish_ctx (&ctx
, checksum
);
14733 for (i
= 0; i
< 16; i
++)
14734 fprintf (stderr
, "%d ", checksum
[i
]);
14736 fprintf (stderr
, "\n");
14741 /* Fold a unary tree expression with code CODE of type TYPE with an
14742 operand OP0. LOC is the location of the resulting expression.
14743 Return a folded expression if successful. Otherwise, return a tree
14744 expression with code CODE of type TYPE with an operand OP0. */
14747 fold_build1_stat_loc (location_t loc
,
14748 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14751 #ifdef ENABLE_FOLD_CHECKING
14752 unsigned char checksum_before
[16], checksum_after
[16];
14753 struct md5_ctx ctx
;
14754 hash_table
<pointer_hash
<tree_node
> > ht
;
14757 md5_init_ctx (&ctx
);
14758 fold_checksum_tree (op0
, &ctx
, ht
);
14759 md5_finish_ctx (&ctx
, checksum_before
);
14763 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14765 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14767 #ifdef ENABLE_FOLD_CHECKING
14768 md5_init_ctx (&ctx
);
14769 fold_checksum_tree (op0
, &ctx
, ht
);
14770 md5_finish_ctx (&ctx
, checksum_after
);
14773 if (memcmp (checksum_before
, checksum_after
, 16))
14774 fold_check_failed (op0
, tem
);
14779 /* Fold a binary tree expression with code CODE of type TYPE with
14780 operands OP0 and OP1. LOC is the location of the resulting
14781 expression. Return a folded expression if successful. Otherwise,
14782 return a tree expression with code CODE of type TYPE with operands
14786 fold_build2_stat_loc (location_t loc
,
14787 enum tree_code code
, tree type
, tree op0
, tree op1
14791 #ifdef ENABLE_FOLD_CHECKING
14792 unsigned char checksum_before_op0
[16],
14793 checksum_before_op1
[16],
14794 checksum_after_op0
[16],
14795 checksum_after_op1
[16];
14796 struct md5_ctx ctx
;
14797 hash_table
<pointer_hash
<tree_node
> > ht
;
14800 md5_init_ctx (&ctx
);
14801 fold_checksum_tree (op0
, &ctx
, ht
);
14802 md5_finish_ctx (&ctx
, checksum_before_op0
);
14805 md5_init_ctx (&ctx
);
14806 fold_checksum_tree (op1
, &ctx
, ht
);
14807 md5_finish_ctx (&ctx
, checksum_before_op1
);
14811 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14813 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14815 #ifdef ENABLE_FOLD_CHECKING
14816 md5_init_ctx (&ctx
);
14817 fold_checksum_tree (op0
, &ctx
, ht
);
14818 md5_finish_ctx (&ctx
, checksum_after_op0
);
14821 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14822 fold_check_failed (op0
, tem
);
14824 md5_init_ctx (&ctx
);
14825 fold_checksum_tree (op1
, &ctx
, ht
);
14826 md5_finish_ctx (&ctx
, checksum_after_op1
);
14829 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14830 fold_check_failed (op1
, tem
);
14835 /* Fold a ternary tree expression with code CODE of type TYPE with
14836 operands OP0, OP1, and OP2. Return a folded expression if
14837 successful. Otherwise, return a tree expression with code CODE of
14838 type TYPE with operands OP0, OP1, and OP2. */
14841 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14842 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14845 #ifdef ENABLE_FOLD_CHECKING
14846 unsigned char checksum_before_op0
[16],
14847 checksum_before_op1
[16],
14848 checksum_before_op2
[16],
14849 checksum_after_op0
[16],
14850 checksum_after_op1
[16],
14851 checksum_after_op2
[16];
14852 struct md5_ctx ctx
;
14853 hash_table
<pointer_hash
<tree_node
> > ht
;
14856 md5_init_ctx (&ctx
);
14857 fold_checksum_tree (op0
, &ctx
, ht
);
14858 md5_finish_ctx (&ctx
, checksum_before_op0
);
14861 md5_init_ctx (&ctx
);
14862 fold_checksum_tree (op1
, &ctx
, ht
);
14863 md5_finish_ctx (&ctx
, checksum_before_op1
);
14866 md5_init_ctx (&ctx
);
14867 fold_checksum_tree (op2
, &ctx
, ht
);
14868 md5_finish_ctx (&ctx
, checksum_before_op2
);
14872 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14873 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14875 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14877 #ifdef ENABLE_FOLD_CHECKING
14878 md5_init_ctx (&ctx
);
14879 fold_checksum_tree (op0
, &ctx
, ht
);
14880 md5_finish_ctx (&ctx
, checksum_after_op0
);
14883 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14884 fold_check_failed (op0
, tem
);
14886 md5_init_ctx (&ctx
);
14887 fold_checksum_tree (op1
, &ctx
, ht
);
14888 md5_finish_ctx (&ctx
, checksum_after_op1
);
14891 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14892 fold_check_failed (op1
, tem
);
14894 md5_init_ctx (&ctx
);
14895 fold_checksum_tree (op2
, &ctx
, ht
);
14896 md5_finish_ctx (&ctx
, checksum_after_op2
);
14899 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14900 fold_check_failed (op2
, tem
);
14905 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14906 arguments in ARGARRAY, and a null static chain.
14907 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14908 of type TYPE from the given operands as constructed by build_call_array. */
14911 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14912 int nargs
, tree
*argarray
)
14915 #ifdef ENABLE_FOLD_CHECKING
14916 unsigned char checksum_before_fn
[16],
14917 checksum_before_arglist
[16],
14918 checksum_after_fn
[16],
14919 checksum_after_arglist
[16];
14920 struct md5_ctx ctx
;
14921 hash_table
<pointer_hash
<tree_node
> > ht
;
14925 md5_init_ctx (&ctx
);
14926 fold_checksum_tree (fn
, &ctx
, ht
);
14927 md5_finish_ctx (&ctx
, checksum_before_fn
);
14930 md5_init_ctx (&ctx
);
14931 for (i
= 0; i
< nargs
; i
++)
14932 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14933 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14937 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14939 #ifdef ENABLE_FOLD_CHECKING
14940 md5_init_ctx (&ctx
);
14941 fold_checksum_tree (fn
, &ctx
, ht
);
14942 md5_finish_ctx (&ctx
, checksum_after_fn
);
14945 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14946 fold_check_failed (fn
, tem
);
14948 md5_init_ctx (&ctx
);
14949 for (i
= 0; i
< nargs
; i
++)
14950 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14951 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14954 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14955 fold_check_failed (NULL_TREE
, tem
);
14960 /* Perform constant folding and related simplification of initializer
14961 expression EXPR. These behave identically to "fold_buildN" but ignore
14962 potential run-time traps and exceptions that fold must preserve. */
14964 #define START_FOLD_INIT \
14965 int saved_signaling_nans = flag_signaling_nans;\
14966 int saved_trapping_math = flag_trapping_math;\
14967 int saved_rounding_math = flag_rounding_math;\
14968 int saved_trapv = flag_trapv;\
14969 int saved_folding_initializer = folding_initializer;\
14970 flag_signaling_nans = 0;\
14971 flag_trapping_math = 0;\
14972 flag_rounding_math = 0;\
14974 folding_initializer = 1;
14976 #define END_FOLD_INIT \
14977 flag_signaling_nans = saved_signaling_nans;\
14978 flag_trapping_math = saved_trapping_math;\
14979 flag_rounding_math = saved_rounding_math;\
14980 flag_trapv = saved_trapv;\
14981 folding_initializer = saved_folding_initializer;
14984 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14985 tree type
, tree op
)
14990 result
= fold_build1_loc (loc
, code
, type
, op
);
14997 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14998 tree type
, tree op0
, tree op1
)
15003 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15010 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
15011 tree type
, tree op0
, tree op1
, tree op2
)
15016 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
15023 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15024 int nargs
, tree
*argarray
)
15029 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15035 #undef START_FOLD_INIT
15036 #undef END_FOLD_INIT
15038 /* Determine if first argument is a multiple of second argument. Return 0 if
15039 it is not, or we cannot easily determined it to be.
15041 An example of the sort of thing we care about (at this point; this routine
15042 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15043 fold cases do now) is discovering that
15045 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15051 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15053 This code also handles discovering that
15055 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15057 is a multiple of 8 so we don't have to worry about dealing with a
15058 possible remainder.
15060 Note that we *look* inside a SAVE_EXPR only to determine how it was
15061 calculated; it is not safe for fold to do much of anything else with the
15062 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15063 at run time. For example, the latter example above *cannot* be implemented
15064 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15065 evaluation time of the original SAVE_EXPR is not necessarily the same at
15066 the time the new expression is evaluated. The only optimization of this
15067 sort that would be valid is changing
15069 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15073 SAVE_EXPR (I) * SAVE_EXPR (J)
15075 (where the same SAVE_EXPR (J) is used in the original and the
15076 transformed version). */
15079 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15081 if (operand_equal_p (top
, bottom
, 0))
15084 if (TREE_CODE (type
) != INTEGER_TYPE
)
15087 switch (TREE_CODE (top
))
15090 /* Bitwise and provides a power of two multiple. If the mask is
15091 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15092 if (!integer_pow2p (bottom
))
15097 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15098 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15102 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15103 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15106 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15110 op1
= TREE_OPERAND (top
, 1);
15111 /* const_binop may not detect overflow correctly,
15112 so check for it explicitly here. */
15113 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15114 > TREE_INT_CST_LOW (op1
)
15115 && TREE_INT_CST_HIGH (op1
) == 0
15116 && 0 != (t1
= fold_convert (type
,
15117 const_binop (LSHIFT_EXPR
,
15120 && !TREE_OVERFLOW (t1
))
15121 return multiple_of_p (type
, t1
, bottom
);
15126 /* Can't handle conversions from non-integral or wider integral type. */
15127 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15128 || (TYPE_PRECISION (type
)
15129 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15132 /* .. fall through ... */
15135 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15138 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15139 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15142 if (TREE_CODE (bottom
) != INTEGER_CST
15143 || integer_zerop (bottom
)
15144 || (TYPE_UNSIGNED (type
)
15145 && (tree_int_cst_sgn (top
) < 0
15146 || tree_int_cst_sgn (bottom
) < 0)))
15148 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15156 /* Return true if CODE or TYPE is known to be non-negative. */
15159 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15161 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15162 && truth_value_p (code
))
15163 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15164 have a signed:1 type (where the value is -1 and 0). */
15169 /* Return true if (CODE OP0) is known to be non-negative. If the return
15170 value is based on the assumption that signed overflow is undefined,
15171 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15172 *STRICT_OVERFLOW_P. */
15175 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15176 bool *strict_overflow_p
)
15178 if (TYPE_UNSIGNED (type
))
15184 /* We can't return 1 if flag_wrapv is set because
15185 ABS_EXPR<INT_MIN> = INT_MIN. */
15186 if (!INTEGRAL_TYPE_P (type
))
15188 if (TYPE_OVERFLOW_UNDEFINED (type
))
15190 *strict_overflow_p
= true;
15195 case NON_LVALUE_EXPR
:
15197 case FIX_TRUNC_EXPR
:
15198 return tree_expr_nonnegative_warnv_p (op0
,
15199 strict_overflow_p
);
15203 tree inner_type
= TREE_TYPE (op0
);
15204 tree outer_type
= type
;
15206 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15208 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15209 return tree_expr_nonnegative_warnv_p (op0
,
15210 strict_overflow_p
);
15211 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15213 if (TYPE_UNSIGNED (inner_type
))
15215 return tree_expr_nonnegative_warnv_p (op0
,
15216 strict_overflow_p
);
15219 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
15221 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15222 return tree_expr_nonnegative_warnv_p (op0
,
15223 strict_overflow_p
);
15224 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15225 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15226 && TYPE_UNSIGNED (inner_type
);
15232 return tree_simple_nonnegative_warnv_p (code
, type
);
15235 /* We don't know sign of `t', so be conservative and return false. */
15239 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15240 value is based on the assumption that signed overflow is undefined,
15241 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15242 *STRICT_OVERFLOW_P. */
15245 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15246 tree op1
, bool *strict_overflow_p
)
15248 if (TYPE_UNSIGNED (type
))
15253 case POINTER_PLUS_EXPR
:
15255 if (FLOAT_TYPE_P (type
))
15256 return (tree_expr_nonnegative_warnv_p (op0
,
15258 && tree_expr_nonnegative_warnv_p (op1
,
15259 strict_overflow_p
));
15261 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15262 both unsigned and at least 2 bits shorter than the result. */
15263 if (TREE_CODE (type
) == INTEGER_TYPE
15264 && TREE_CODE (op0
) == NOP_EXPR
15265 && TREE_CODE (op1
) == NOP_EXPR
)
15267 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15268 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15269 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15270 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15272 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15273 TYPE_PRECISION (inner2
)) + 1;
15274 return prec
< TYPE_PRECISION (type
);
15280 if (FLOAT_TYPE_P (type
))
15282 /* x * x for floating point x is always non-negative. */
15283 if (operand_equal_p (op0
, op1
, 0))
15285 return (tree_expr_nonnegative_warnv_p (op0
,
15287 && tree_expr_nonnegative_warnv_p (op1
,
15288 strict_overflow_p
));
15291 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15292 both unsigned and their total bits is shorter than the result. */
15293 if (TREE_CODE (type
) == INTEGER_TYPE
15294 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15295 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15297 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15298 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15300 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15301 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15304 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15305 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15307 if (TREE_CODE (op0
) == INTEGER_CST
)
15308 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15310 if (TREE_CODE (op1
) == INTEGER_CST
)
15311 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15313 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15314 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15316 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15317 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15318 : TYPE_PRECISION (inner0
);
15320 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15321 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15322 : TYPE_PRECISION (inner1
);
15324 return precision0
+ precision1
< TYPE_PRECISION (type
);
15331 return (tree_expr_nonnegative_warnv_p (op0
,
15333 || tree_expr_nonnegative_warnv_p (op1
,
15334 strict_overflow_p
));
15340 case TRUNC_DIV_EXPR
:
15341 case CEIL_DIV_EXPR
:
15342 case FLOOR_DIV_EXPR
:
15343 case ROUND_DIV_EXPR
:
15344 return (tree_expr_nonnegative_warnv_p (op0
,
15346 && tree_expr_nonnegative_warnv_p (op1
,
15347 strict_overflow_p
));
15349 case TRUNC_MOD_EXPR
:
15350 case CEIL_MOD_EXPR
:
15351 case FLOOR_MOD_EXPR
:
15352 case ROUND_MOD_EXPR
:
15353 return tree_expr_nonnegative_warnv_p (op0
,
15354 strict_overflow_p
);
15356 return tree_simple_nonnegative_warnv_p (code
, type
);
15359 /* We don't know sign of `t', so be conservative and return false. */
15363 /* Return true if T is known to be non-negative. If the return
15364 value is based on the assumption that signed overflow is undefined,
15365 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15366 *STRICT_OVERFLOW_P. */
15369 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15371 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15374 switch (TREE_CODE (t
))
15377 return tree_int_cst_sgn (t
) >= 0;
15380 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15383 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15386 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15388 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15389 strict_overflow_p
));
15391 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15394 /* We don't know sign of `t', so be conservative and return false. */
15398 /* Return true if T is known to be non-negative. If the return
15399 value is based on the assumption that signed overflow is undefined,
15400 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15401 *STRICT_OVERFLOW_P. */
15404 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15405 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15407 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15408 switch (DECL_FUNCTION_CODE (fndecl
))
15410 CASE_FLT_FN (BUILT_IN_ACOS
):
15411 CASE_FLT_FN (BUILT_IN_ACOSH
):
15412 CASE_FLT_FN (BUILT_IN_CABS
):
15413 CASE_FLT_FN (BUILT_IN_COSH
):
15414 CASE_FLT_FN (BUILT_IN_ERFC
):
15415 CASE_FLT_FN (BUILT_IN_EXP
):
15416 CASE_FLT_FN (BUILT_IN_EXP10
):
15417 CASE_FLT_FN (BUILT_IN_EXP2
):
15418 CASE_FLT_FN (BUILT_IN_FABS
):
15419 CASE_FLT_FN (BUILT_IN_FDIM
):
15420 CASE_FLT_FN (BUILT_IN_HYPOT
):
15421 CASE_FLT_FN (BUILT_IN_POW10
):
15422 CASE_INT_FN (BUILT_IN_FFS
):
15423 CASE_INT_FN (BUILT_IN_PARITY
):
15424 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15425 case BUILT_IN_BSWAP32
:
15426 case BUILT_IN_BSWAP64
:
15430 CASE_FLT_FN (BUILT_IN_SQRT
):
15431 /* sqrt(-0.0) is -0.0. */
15432 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15434 return tree_expr_nonnegative_warnv_p (arg0
,
15435 strict_overflow_p
);
15437 CASE_FLT_FN (BUILT_IN_ASINH
):
15438 CASE_FLT_FN (BUILT_IN_ATAN
):
15439 CASE_FLT_FN (BUILT_IN_ATANH
):
15440 CASE_FLT_FN (BUILT_IN_CBRT
):
15441 CASE_FLT_FN (BUILT_IN_CEIL
):
15442 CASE_FLT_FN (BUILT_IN_ERF
):
15443 CASE_FLT_FN (BUILT_IN_EXPM1
):
15444 CASE_FLT_FN (BUILT_IN_FLOOR
):
15445 CASE_FLT_FN (BUILT_IN_FMOD
):
15446 CASE_FLT_FN (BUILT_IN_FREXP
):
15447 CASE_FLT_FN (BUILT_IN_ICEIL
):
15448 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15449 CASE_FLT_FN (BUILT_IN_IRINT
):
15450 CASE_FLT_FN (BUILT_IN_IROUND
):
15451 CASE_FLT_FN (BUILT_IN_LCEIL
):
15452 CASE_FLT_FN (BUILT_IN_LDEXP
):
15453 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15454 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15455 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15456 CASE_FLT_FN (BUILT_IN_LLRINT
):
15457 CASE_FLT_FN (BUILT_IN_LLROUND
):
15458 CASE_FLT_FN (BUILT_IN_LRINT
):
15459 CASE_FLT_FN (BUILT_IN_LROUND
):
15460 CASE_FLT_FN (BUILT_IN_MODF
):
15461 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15462 CASE_FLT_FN (BUILT_IN_RINT
):
15463 CASE_FLT_FN (BUILT_IN_ROUND
):
15464 CASE_FLT_FN (BUILT_IN_SCALB
):
15465 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15466 CASE_FLT_FN (BUILT_IN_SCALBN
):
15467 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15468 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15469 CASE_FLT_FN (BUILT_IN_SINH
):
15470 CASE_FLT_FN (BUILT_IN_TANH
):
15471 CASE_FLT_FN (BUILT_IN_TRUNC
):
15472 /* True if the 1st argument is nonnegative. */
15473 return tree_expr_nonnegative_warnv_p (arg0
,
15474 strict_overflow_p
);
15476 CASE_FLT_FN (BUILT_IN_FMAX
):
15477 /* True if the 1st OR 2nd arguments are nonnegative. */
15478 return (tree_expr_nonnegative_warnv_p (arg0
,
15480 || (tree_expr_nonnegative_warnv_p (arg1
,
15481 strict_overflow_p
)));
15483 CASE_FLT_FN (BUILT_IN_FMIN
):
15484 /* True if the 1st AND 2nd arguments are nonnegative. */
15485 return (tree_expr_nonnegative_warnv_p (arg0
,
15487 && (tree_expr_nonnegative_warnv_p (arg1
,
15488 strict_overflow_p
)));
15490 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15491 /* True if the 2nd argument is nonnegative. */
15492 return tree_expr_nonnegative_warnv_p (arg1
,
15493 strict_overflow_p
);
15495 CASE_FLT_FN (BUILT_IN_POWI
):
15496 /* True if the 1st argument is nonnegative or the second
15497 argument is an even integer. */
15498 if (TREE_CODE (arg1
) == INTEGER_CST
15499 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15501 return tree_expr_nonnegative_warnv_p (arg0
,
15502 strict_overflow_p
);
15504 CASE_FLT_FN (BUILT_IN_POW
):
15505 /* True if the 1st argument is nonnegative or the second
15506 argument is an even integer valued real. */
15507 if (TREE_CODE (arg1
) == REAL_CST
)
15512 c
= TREE_REAL_CST (arg1
);
15513 n
= real_to_integer (&c
);
15516 REAL_VALUE_TYPE cint
;
15517 real_from_integer (&cint
, VOIDmode
, n
,
15518 n
< 0 ? -1 : 0, 0);
15519 if (real_identical (&c
, &cint
))
15523 return tree_expr_nonnegative_warnv_p (arg0
,
15524 strict_overflow_p
);
15529 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15533 /* Return true if T is known to be non-negative. If the return
15534 value is based on the assumption that signed overflow is undefined,
15535 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15536 *STRICT_OVERFLOW_P. */
15539 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15541 enum tree_code code
= TREE_CODE (t
);
15542 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15549 tree temp
= TARGET_EXPR_SLOT (t
);
15550 t
= TARGET_EXPR_INITIAL (t
);
15552 /* If the initializer is non-void, then it's a normal expression
15553 that will be assigned to the slot. */
15554 if (!VOID_TYPE_P (t
))
15555 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15557 /* Otherwise, the initializer sets the slot in some way. One common
15558 way is an assignment statement at the end of the initializer. */
15561 if (TREE_CODE (t
) == BIND_EXPR
)
15562 t
= expr_last (BIND_EXPR_BODY (t
));
15563 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15564 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15565 t
= expr_last (TREE_OPERAND (t
, 0));
15566 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15571 if (TREE_CODE (t
) == MODIFY_EXPR
15572 && TREE_OPERAND (t
, 0) == temp
)
15573 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15574 strict_overflow_p
);
15581 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15582 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15584 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15585 get_callee_fndecl (t
),
15588 strict_overflow_p
);
15590 case COMPOUND_EXPR
:
15592 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15593 strict_overflow_p
);
15595 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15596 strict_overflow_p
);
15598 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15599 strict_overflow_p
);
15602 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15606 /* We don't know sign of `t', so be conservative and return false. */
15610 /* Return true if T is known to be non-negative. If the return
15611 value is based on the assumption that signed overflow is undefined,
15612 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15613 *STRICT_OVERFLOW_P. */
15616 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15618 enum tree_code code
;
15619 if (t
== error_mark_node
)
15622 code
= TREE_CODE (t
);
15623 switch (TREE_CODE_CLASS (code
))
15626 case tcc_comparison
:
15627 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15629 TREE_OPERAND (t
, 0),
15630 TREE_OPERAND (t
, 1),
15631 strict_overflow_p
);
15634 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15636 TREE_OPERAND (t
, 0),
15637 strict_overflow_p
);
15640 case tcc_declaration
:
15641 case tcc_reference
:
15642 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15650 case TRUTH_AND_EXPR
:
15651 case TRUTH_OR_EXPR
:
15652 case TRUTH_XOR_EXPR
:
15653 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15655 TREE_OPERAND (t
, 0),
15656 TREE_OPERAND (t
, 1),
15657 strict_overflow_p
);
15658 case TRUTH_NOT_EXPR
:
15659 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15661 TREE_OPERAND (t
, 0),
15662 strict_overflow_p
);
15669 case WITH_SIZE_EXPR
:
15671 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15674 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15678 /* Return true if `t' is known to be non-negative. Handle warnings
15679 about undefined signed overflow. */
15682 tree_expr_nonnegative_p (tree t
)
15684 bool ret
, strict_overflow_p
;
15686 strict_overflow_p
= false;
15687 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15688 if (strict_overflow_p
)
15689 fold_overflow_warning (("assuming signed overflow does not occur when "
15690 "determining that expression is always "
15692 WARN_STRICT_OVERFLOW_MISC
);
15697 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15698 For floating point we further ensure that T is not denormal.
15699 Similar logic is present in nonzero_address in rtlanal.h.
15701 If the return value is based on the assumption that signed overflow
15702 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15703 change *STRICT_OVERFLOW_P. */
15706 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15707 bool *strict_overflow_p
)
15712 return tree_expr_nonzero_warnv_p (op0
,
15713 strict_overflow_p
);
15717 tree inner_type
= TREE_TYPE (op0
);
15718 tree outer_type
= type
;
15720 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15721 && tree_expr_nonzero_warnv_p (op0
,
15722 strict_overflow_p
));
15726 case NON_LVALUE_EXPR
:
15727 return tree_expr_nonzero_warnv_p (op0
,
15728 strict_overflow_p
);
15737 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15738 For floating point we further ensure that T is not denormal.
15739 Similar logic is present in nonzero_address in rtlanal.h.
15741 If the return value is based on the assumption that signed overflow
15742 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15743 change *STRICT_OVERFLOW_P. */
15746 tree_binary_nonzero_warnv_p (enum tree_code code
,
15749 tree op1
, bool *strict_overflow_p
)
15751 bool sub_strict_overflow_p
;
15754 case POINTER_PLUS_EXPR
:
15756 if (TYPE_OVERFLOW_UNDEFINED (type
))
15758 /* With the presence of negative values it is hard
15759 to say something. */
15760 sub_strict_overflow_p
= false;
15761 if (!tree_expr_nonnegative_warnv_p (op0
,
15762 &sub_strict_overflow_p
)
15763 || !tree_expr_nonnegative_warnv_p (op1
,
15764 &sub_strict_overflow_p
))
15766 /* One of operands must be positive and the other non-negative. */
15767 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15768 overflows, on a twos-complement machine the sum of two
15769 nonnegative numbers can never be zero. */
15770 return (tree_expr_nonzero_warnv_p (op0
,
15772 || tree_expr_nonzero_warnv_p (op1
,
15773 strict_overflow_p
));
15778 if (TYPE_OVERFLOW_UNDEFINED (type
))
15780 if (tree_expr_nonzero_warnv_p (op0
,
15782 && tree_expr_nonzero_warnv_p (op1
,
15783 strict_overflow_p
))
15785 *strict_overflow_p
= true;
15792 sub_strict_overflow_p
= false;
15793 if (tree_expr_nonzero_warnv_p (op0
,
15794 &sub_strict_overflow_p
)
15795 && tree_expr_nonzero_warnv_p (op1
,
15796 &sub_strict_overflow_p
))
15798 if (sub_strict_overflow_p
)
15799 *strict_overflow_p
= true;
15804 sub_strict_overflow_p
= false;
15805 if (tree_expr_nonzero_warnv_p (op0
,
15806 &sub_strict_overflow_p
))
15808 if (sub_strict_overflow_p
)
15809 *strict_overflow_p
= true;
15811 /* When both operands are nonzero, then MAX must be too. */
15812 if (tree_expr_nonzero_warnv_p (op1
,
15813 strict_overflow_p
))
15816 /* MAX where operand 0 is positive is positive. */
15817 return tree_expr_nonnegative_warnv_p (op0
,
15818 strict_overflow_p
);
15820 /* MAX where operand 1 is positive is positive. */
15821 else if (tree_expr_nonzero_warnv_p (op1
,
15822 &sub_strict_overflow_p
)
15823 && tree_expr_nonnegative_warnv_p (op1
,
15824 &sub_strict_overflow_p
))
15826 if (sub_strict_overflow_p
)
15827 *strict_overflow_p
= true;
15833 return (tree_expr_nonzero_warnv_p (op1
,
15835 || tree_expr_nonzero_warnv_p (op0
,
15836 strict_overflow_p
));
15845 /* Return true when T is an address and is known to be nonzero.
15846 For floating point we further ensure that T is not denormal.
15847 Similar logic is present in nonzero_address in rtlanal.h.
15849 If the return value is based on the assumption that signed overflow
15850 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15851 change *STRICT_OVERFLOW_P. */
15854 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15856 bool sub_strict_overflow_p
;
15857 switch (TREE_CODE (t
))
15860 return !integer_zerop (t
);
15864 tree base
= TREE_OPERAND (t
, 0);
15865 if (!DECL_P (base
))
15866 base
= get_base_address (base
);
15871 /* Weak declarations may link to NULL. Other things may also be NULL
15872 so protect with -fdelete-null-pointer-checks; but not variables
15873 allocated on the stack. */
15875 && (flag_delete_null_pointer_checks
15876 || (DECL_CONTEXT (base
)
15877 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15878 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15879 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15881 /* Constants are never weak. */
15882 if (CONSTANT_CLASS_P (base
))
15889 sub_strict_overflow_p
= false;
15890 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15891 &sub_strict_overflow_p
)
15892 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15893 &sub_strict_overflow_p
))
15895 if (sub_strict_overflow_p
)
15896 *strict_overflow_p
= true;
15907 /* Return true when T is an address and is known to be nonzero.
15908 For floating point we further ensure that T is not denormal.
15909 Similar logic is present in nonzero_address in rtlanal.h.
15911 If the return value is based on the assumption that signed overflow
15912 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15913 change *STRICT_OVERFLOW_P. */
15916 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15918 tree type
= TREE_TYPE (t
);
15919 enum tree_code code
;
15921 /* Doing something useful for floating point would need more work. */
15922 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15925 code
= TREE_CODE (t
);
15926 switch (TREE_CODE_CLASS (code
))
15929 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15930 strict_overflow_p
);
15932 case tcc_comparison
:
15933 return tree_binary_nonzero_warnv_p (code
, type
,
15934 TREE_OPERAND (t
, 0),
15935 TREE_OPERAND (t
, 1),
15936 strict_overflow_p
);
15938 case tcc_declaration
:
15939 case tcc_reference
:
15940 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15948 case TRUTH_NOT_EXPR
:
15949 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15950 strict_overflow_p
);
15952 case TRUTH_AND_EXPR
:
15953 case TRUTH_OR_EXPR
:
15954 case TRUTH_XOR_EXPR
:
15955 return tree_binary_nonzero_warnv_p (code
, type
,
15956 TREE_OPERAND (t
, 0),
15957 TREE_OPERAND (t
, 1),
15958 strict_overflow_p
);
15965 case WITH_SIZE_EXPR
:
15967 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15969 case COMPOUND_EXPR
:
15972 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15973 strict_overflow_p
);
15976 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15977 strict_overflow_p
);
15980 return alloca_call_p (t
);
15988 /* Return true when T is an address and is known to be nonzero.
15989 Handle warnings about undefined signed overflow. */
15992 tree_expr_nonzero_p (tree t
)
15994 bool ret
, strict_overflow_p
;
15996 strict_overflow_p
= false;
15997 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15998 if (strict_overflow_p
)
15999 fold_overflow_warning (("assuming signed overflow does not occur when "
16000 "determining that expression is always "
16002 WARN_STRICT_OVERFLOW_MISC
);
16006 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16007 attempt to fold the expression to a constant without modifying TYPE,
16010 If the expression could be simplified to a constant, then return
16011 the constant. If the expression would not be simplified to a
16012 constant, then return NULL_TREE. */
16015 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16017 tree tem
= fold_binary (code
, type
, op0
, op1
);
16018 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16021 /* Given the components of a unary expression CODE, TYPE and OP0,
16022 attempt to fold the expression to a constant without modifying
16025 If the expression could be simplified to a constant, then return
16026 the constant. If the expression would not be simplified to a
16027 constant, then return NULL_TREE. */
16030 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16032 tree tem
= fold_unary (code
, type
, op0
);
16033 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16036 /* If EXP represents referencing an element in a constant string
16037 (either via pointer arithmetic or array indexing), return the
16038 tree representing the value accessed, otherwise return NULL. */
16041 fold_read_from_constant_string (tree exp
)
16043 if ((TREE_CODE (exp
) == INDIRECT_REF
16044 || TREE_CODE (exp
) == ARRAY_REF
)
16045 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16047 tree exp1
= TREE_OPERAND (exp
, 0);
16050 location_t loc
= EXPR_LOCATION (exp
);
16052 if (TREE_CODE (exp
) == INDIRECT_REF
)
16053 string
= string_constant (exp1
, &index
);
16056 tree low_bound
= array_ref_low_bound (exp
);
16057 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16059 /* Optimize the special-case of a zero lower bound.
16061 We convert the low_bound to sizetype to avoid some problems
16062 with constant folding. (E.g. suppose the lower bound is 1,
16063 and its mode is QI. Without the conversion,l (ARRAY
16064 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16065 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16066 if (! integer_zerop (low_bound
))
16067 index
= size_diffop_loc (loc
, index
,
16068 fold_convert_loc (loc
, sizetype
, low_bound
));
16074 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16075 && TREE_CODE (string
) == STRING_CST
16076 && TREE_CODE (index
) == INTEGER_CST
16077 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16078 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16080 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16081 return build_int_cst_type (TREE_TYPE (exp
),
16082 (TREE_STRING_POINTER (string
)
16083 [TREE_INT_CST_LOW (index
)]));
16088 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16089 an integer constant, real, or fixed-point constant.
16091 TYPE is the type of the result. */
16094 fold_negate_const (tree arg0
, tree type
)
16096 tree t
= NULL_TREE
;
16098 switch (TREE_CODE (arg0
))
16102 double_int val
= tree_to_double_int (arg0
);
16104 val
= val
.neg_with_overflow (&overflow
);
16105 t
= force_fit_type_double (type
, val
, 1,
16106 (overflow
| TREE_OVERFLOW (arg0
))
16107 && !TYPE_UNSIGNED (type
));
16112 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16117 FIXED_VALUE_TYPE f
;
16118 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16119 &(TREE_FIXED_CST (arg0
)), NULL
,
16120 TYPE_SATURATING (type
));
16121 t
= build_fixed (type
, f
);
16122 /* Propagate overflow flags. */
16123 if (overflow_p
| TREE_OVERFLOW (arg0
))
16124 TREE_OVERFLOW (t
) = 1;
16129 gcc_unreachable ();
16135 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16136 an integer constant or real constant.
16138 TYPE is the type of the result. */
16141 fold_abs_const (tree arg0
, tree type
)
16143 tree t
= NULL_TREE
;
16145 switch (TREE_CODE (arg0
))
16149 double_int val
= tree_to_double_int (arg0
);
16151 /* If the value is unsigned or non-negative, then the absolute value
16152 is the same as the ordinary value. */
16153 if (TYPE_UNSIGNED (type
)
16154 || !val
.is_negative ())
16157 /* If the value is negative, then the absolute value is
16162 val
= val
.neg_with_overflow (&overflow
);
16163 t
= force_fit_type_double (type
, val
, -1,
16164 overflow
| TREE_OVERFLOW (arg0
));
16170 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16171 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16177 gcc_unreachable ();
16183 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16184 constant. TYPE is the type of the result. */
16187 fold_not_const (const_tree arg0
, tree type
)
16191 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16193 val
= ~tree_to_double_int (arg0
);
16194 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16197 /* Given CODE, a relational operator, the target type, TYPE and two
16198 constant operands OP0 and OP1, return the result of the
16199 relational operation. If the result is not a compile time
16200 constant, then return NULL_TREE. */
16203 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16205 int result
, invert
;
16207 /* From here on, the only cases we handle are when the result is
16208 known to be a constant. */
16210 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16212 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16213 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16215 /* Handle the cases where either operand is a NaN. */
16216 if (real_isnan (c0
) || real_isnan (c1
))
16226 case UNORDERED_EXPR
:
16240 if (flag_trapping_math
)
16246 gcc_unreachable ();
16249 return constant_boolean_node (result
, type
);
16252 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16255 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16257 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16258 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16259 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16262 /* Handle equality/inequality of complex constants. */
16263 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16265 tree rcond
= fold_relational_const (code
, type
,
16266 TREE_REALPART (op0
),
16267 TREE_REALPART (op1
));
16268 tree icond
= fold_relational_const (code
, type
,
16269 TREE_IMAGPART (op0
),
16270 TREE_IMAGPART (op1
));
16271 if (code
== EQ_EXPR
)
16272 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16273 else if (code
== NE_EXPR
)
16274 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16279 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16281 unsigned count
= VECTOR_CST_NELTS (op0
);
16282 tree
*elts
= XALLOCAVEC (tree
, count
);
16283 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16284 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16286 for (unsigned i
= 0; i
< count
; i
++)
16288 tree elem_type
= TREE_TYPE (type
);
16289 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16290 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16292 tree tem
= fold_relational_const (code
, elem_type
,
16295 if (tem
== NULL_TREE
)
16298 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16301 return build_vector (type
, elts
);
16304 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16306 To compute GT, swap the arguments and do LT.
16307 To compute GE, do LT and invert the result.
16308 To compute LE, swap the arguments, do LT and invert the result.
16309 To compute NE, do EQ and invert the result.
16311 Therefore, the code below must handle only EQ and LT. */
16313 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16318 code
= swap_tree_comparison (code
);
16321 /* Note that it is safe to invert for real values here because we
16322 have already handled the one case that it matters. */
16325 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16328 code
= invert_tree_comparison (code
, false);
16331 /* Compute a result for LT or EQ if args permit;
16332 Otherwise return T. */
16333 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16335 if (code
== EQ_EXPR
)
16336 result
= tree_int_cst_equal (op0
, op1
);
16337 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16338 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16340 result
= INT_CST_LT (op0
, op1
);
16347 return constant_boolean_node (result
, type
);
16350 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16351 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16355 fold_build_cleanup_point_expr (tree type
, tree expr
)
16357 /* If the expression does not have side effects then we don't have to wrap
16358 it with a cleanup point expression. */
16359 if (!TREE_SIDE_EFFECTS (expr
))
16362 /* If the expression is a return, check to see if the expression inside the
16363 return has no side effects or the right hand side of the modify expression
16364 inside the return. If either don't have side effects set we don't need to
16365 wrap the expression in a cleanup point expression. Note we don't check the
16366 left hand side of the modify because it should always be a return decl. */
16367 if (TREE_CODE (expr
) == RETURN_EXPR
)
16369 tree op
= TREE_OPERAND (expr
, 0);
16370 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16372 op
= TREE_OPERAND (op
, 1);
16373 if (!TREE_SIDE_EFFECTS (op
))
16377 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16380 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16381 of an indirection through OP0, or NULL_TREE if no simplification is
16385 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16391 subtype
= TREE_TYPE (sub
);
16392 if (!POINTER_TYPE_P (subtype
))
16395 if (TREE_CODE (sub
) == ADDR_EXPR
)
16397 tree op
= TREE_OPERAND (sub
, 0);
16398 tree optype
= TREE_TYPE (op
);
16399 /* *&CONST_DECL -> to the value of the const decl. */
16400 if (TREE_CODE (op
) == CONST_DECL
)
16401 return DECL_INITIAL (op
);
16402 /* *&p => p; make sure to handle *&"str"[cst] here. */
16403 if (type
== optype
)
16405 tree fop
= fold_read_from_constant_string (op
);
16411 /* *(foo *)&fooarray => fooarray[0] */
16412 else if (TREE_CODE (optype
) == ARRAY_TYPE
16413 && type
== TREE_TYPE (optype
)
16414 && (!in_gimple_form
16415 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16417 tree type_domain
= TYPE_DOMAIN (optype
);
16418 tree min_val
= size_zero_node
;
16419 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16420 min_val
= TYPE_MIN_VALUE (type_domain
);
16422 && TREE_CODE (min_val
) != INTEGER_CST
)
16424 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16425 NULL_TREE
, NULL_TREE
);
16427 /* *(foo *)&complexfoo => __real__ complexfoo */
16428 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16429 && type
== TREE_TYPE (optype
))
16430 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16431 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16432 else if (TREE_CODE (optype
) == VECTOR_TYPE
16433 && type
== TREE_TYPE (optype
))
16435 tree part_width
= TYPE_SIZE (type
);
16436 tree index
= bitsize_int (0);
16437 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16441 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16442 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16444 tree op00
= TREE_OPERAND (sub
, 0);
16445 tree op01
= TREE_OPERAND (sub
, 1);
16448 if (TREE_CODE (op00
) == ADDR_EXPR
)
16451 op00
= TREE_OPERAND (op00
, 0);
16452 op00type
= TREE_TYPE (op00
);
16454 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16455 if (TREE_CODE (op00type
) == VECTOR_TYPE
16456 && type
== TREE_TYPE (op00type
))
16458 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16459 tree part_width
= TYPE_SIZE (type
);
16460 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16461 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16462 tree index
= bitsize_int (indexi
);
16464 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16465 return fold_build3_loc (loc
,
16466 BIT_FIELD_REF
, type
, op00
,
16467 part_width
, index
);
16470 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16471 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16472 && type
== TREE_TYPE (op00type
))
16474 tree size
= TYPE_SIZE_UNIT (type
);
16475 if (tree_int_cst_equal (size
, op01
))
16476 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16478 /* ((foo *)&fooarray)[1] => fooarray[1] */
16479 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16480 && type
== TREE_TYPE (op00type
))
16482 tree type_domain
= TYPE_DOMAIN (op00type
);
16483 tree min_val
= size_zero_node
;
16484 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16485 min_val
= TYPE_MIN_VALUE (type_domain
);
16486 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16487 TYPE_SIZE_UNIT (type
));
16488 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16489 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16490 NULL_TREE
, NULL_TREE
);
16495 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16496 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16497 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16498 && (!in_gimple_form
16499 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16502 tree min_val
= size_zero_node
;
16503 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16504 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16505 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16506 min_val
= TYPE_MIN_VALUE (type_domain
);
16508 && TREE_CODE (min_val
) != INTEGER_CST
)
16510 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16517 /* Builds an expression for an indirection through T, simplifying some
16521 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16523 tree type
= TREE_TYPE (TREE_TYPE (t
));
16524 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16529 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16532 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16535 fold_indirect_ref_loc (location_t loc
, tree t
)
16537 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16545 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16546 whose result is ignored. The type of the returned tree need not be
16547 the same as the original expression. */
16550 fold_ignored_result (tree t
)
16552 if (!TREE_SIDE_EFFECTS (t
))
16553 return integer_zero_node
;
16556 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16559 t
= TREE_OPERAND (t
, 0);
16563 case tcc_comparison
:
16564 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16565 t
= TREE_OPERAND (t
, 0);
16566 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16567 t
= TREE_OPERAND (t
, 1);
16572 case tcc_expression
:
16573 switch (TREE_CODE (t
))
16575 case COMPOUND_EXPR
:
16576 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16578 t
= TREE_OPERAND (t
, 0);
16582 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16583 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16585 t
= TREE_OPERAND (t
, 0);
16598 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16599 This can only be applied to objects of a sizetype. */
16602 round_up_loc (location_t loc
, tree value
, int divisor
)
16604 tree div
= NULL_TREE
;
16606 gcc_assert (divisor
> 0);
16610 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16611 have to do anything. Only do this when we are not given a const,
16612 because in that case, this check is more expensive than just
16614 if (TREE_CODE (value
) != INTEGER_CST
)
16616 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16618 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16622 /* If divisor is a power of two, simplify this to bit manipulation. */
16623 if (divisor
== (divisor
& -divisor
))
16625 if (TREE_CODE (value
) == INTEGER_CST
)
16627 double_int val
= tree_to_double_int (value
);
16630 if ((val
.low
& (divisor
- 1)) == 0)
16633 overflow_p
= TREE_OVERFLOW (value
);
16634 val
.low
&= ~(divisor
- 1);
16635 val
.low
+= divisor
;
16643 return force_fit_type_double (TREE_TYPE (value
), val
,
16650 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16651 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16652 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16653 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16659 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16660 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16661 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16667 /* Likewise, but round down. */
16670 round_down_loc (location_t loc
, tree value
, int divisor
)
16672 tree div
= NULL_TREE
;
16674 gcc_assert (divisor
> 0);
16678 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16679 have to do anything. Only do this when we are not given a const,
16680 because in that case, this check is more expensive than just
16682 if (TREE_CODE (value
) != INTEGER_CST
)
16684 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16686 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16690 /* If divisor is a power of two, simplify this to bit manipulation. */
16691 if (divisor
== (divisor
& -divisor
))
16695 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16696 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16701 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16702 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16703 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16709 /* Returns the pointer to the base of the object addressed by EXP and
16710 extracts the information about the offset of the access, storing it
16711 to PBITPOS and POFFSET. */
16714 split_address_to_core_and_offset (tree exp
,
16715 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16718 enum machine_mode mode
;
16719 int unsignedp
, volatilep
;
16720 HOST_WIDE_INT bitsize
;
16721 location_t loc
= EXPR_LOCATION (exp
);
16723 if (TREE_CODE (exp
) == ADDR_EXPR
)
16725 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16726 poffset
, &mode
, &unsignedp
, &volatilep
,
16728 core
= build_fold_addr_expr_loc (loc
, core
);
16734 *poffset
= NULL_TREE
;
16740 /* Returns true if addresses of E1 and E2 differ by a constant, false
16741 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16744 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16747 HOST_WIDE_INT bitpos1
, bitpos2
;
16748 tree toffset1
, toffset2
, tdiff
, type
;
16750 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16751 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16753 if (bitpos1
% BITS_PER_UNIT
!= 0
16754 || bitpos2
% BITS_PER_UNIT
!= 0
16755 || !operand_equal_p (core1
, core2
, 0))
16758 if (toffset1
&& toffset2
)
16760 type
= TREE_TYPE (toffset1
);
16761 if (type
!= TREE_TYPE (toffset2
))
16762 toffset2
= fold_convert (type
, toffset2
);
16764 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16765 if (!cst_and_fits_in_hwi (tdiff
))
16768 *diff
= int_cst_value (tdiff
);
16770 else if (toffset1
|| toffset2
)
16772 /* If only one of the offsets is non-constant, the difference cannot
16779 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16783 /* Simplify the floating point expression EXP when the sign of the
16784 result is not significant. Return NULL_TREE if no simplification
16788 fold_strip_sign_ops (tree exp
)
16791 location_t loc
= EXPR_LOCATION (exp
);
16793 switch (TREE_CODE (exp
))
16797 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16798 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16802 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16804 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16805 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16806 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16807 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16808 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16809 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16812 case COMPOUND_EXPR
:
16813 arg0
= TREE_OPERAND (exp
, 0);
16814 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16816 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16820 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16821 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16823 return fold_build3_loc (loc
,
16824 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16825 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16826 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16831 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16834 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16835 /* Strip copysign function call, return the 1st argument. */
16836 arg0
= CALL_EXPR_ARG (exp
, 0);
16837 arg1
= CALL_EXPR_ARG (exp
, 1);
16838 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16841 /* Strip sign ops from the argument of "odd" math functions. */
16842 if (negate_mathfn_p (fcode
))
16844 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16846 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);