1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "diagnostic-core.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
65 int folding_initializer
= 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code
{
89 static bool negate_mathfn_p (enum built_in_function
);
90 static bool negate_expr_p (tree
);
91 static tree
negate_expr (tree
);
92 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
93 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
94 static tree
const_binop (enum tree_code
, tree
, tree
);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
101 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (location_t
, tree
, tree
,
103 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
104 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
106 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (const_tree
, int);
111 static tree
sign_bit_p (tree
, const_tree
);
112 static int simple_operand_p (const_tree
);
113 static bool simple_operand_p_2 (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
range_predecessor (tree
);
116 static tree
range_successor (tree
);
117 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
118 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
119 static tree
unextend (tree
, int, int, tree
);
120 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
122 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
123 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
124 static tree
fold_binary_op_with_conditional_arg (location_t
,
125 enum tree_code
, tree
,
128 static tree
fold_mathfn_compare (location_t
,
129 enum built_in_function
, enum tree_code
,
131 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
132 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
133 static bool reorder_operands_p (const_tree
, const_tree
);
134 static tree
fold_negate_const (tree
, tree
);
135 static tree
fold_not_const (const_tree
, tree
);
136 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
143 expr_location_or (tree t
, location_t loc
)
145 location_t tloc
= EXPR_LOCATION (t
);
146 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
153 protected_set_expr_location_unshare (tree x
, location_t loc
)
155 if (CAN_HAVE_LOCATION_P (x
)
156 && EXPR_LOCATION (x
) != loc
157 && !(TREE_CODE (x
) == SAVE_EXPR
158 || TREE_CODE (x
) == TARGET_EXPR
159 || TREE_CODE (x
) == BIND_EXPR
))
162 SET_EXPR_LOCATION (x
, loc
);
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
172 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
182 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
186 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
200 static int fold_deferring_overflow_warnings
;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning
;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings
;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
233 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
238 gcc_assert (fold_deferring_overflow_warnings
> 0);
239 --fold_deferring_overflow_warnings
;
240 if (fold_deferring_overflow_warnings
> 0)
242 if (fold_deferred_overflow_warning
!= NULL
244 && code
< (int) fold_deferred_overflow_code
)
245 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
249 warnmsg
= fold_deferred_overflow_warning
;
250 fold_deferred_overflow_warning
= NULL
;
252 if (!issue
|| warnmsg
== NULL
)
255 if (gimple_no_warning_p (stmt
))
258 /* Use the smallest code level when deciding to issue the
260 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
261 code
= fold_deferred_overflow_code
;
263 if (!issue_strict_overflow_warning (code
))
267 locus
= input_location
;
269 locus
= gimple_location (stmt
);
270 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
273 /* Stop deferring overflow warnings, ignoring any deferred
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL
, 0);
282 /* Whether we are deferring overflow warnings. */
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings
> 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
294 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
296 if (fold_deferring_overflow_warnings
> 0)
298 if (fold_deferred_overflow_warning
== NULL
299 || wc
< fold_deferred_overflow_code
)
301 fold_deferred_overflow_warning
= gmsgid
;
302 fold_deferred_overflow_code
= wc
;
305 else if (issue_strict_overflow_warning (wc
))
306 warning (OPT_Wstrict_overflow
, gmsgid
);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
313 negate_mathfn_p (enum built_in_function code
)
317 CASE_FLT_FN (BUILT_IN_ASIN
):
318 CASE_FLT_FN (BUILT_IN_ASINH
):
319 CASE_FLT_FN (BUILT_IN_ATAN
):
320 CASE_FLT_FN (BUILT_IN_ATANH
):
321 CASE_FLT_FN (BUILT_IN_CASIN
):
322 CASE_FLT_FN (BUILT_IN_CASINH
):
323 CASE_FLT_FN (BUILT_IN_CATAN
):
324 CASE_FLT_FN (BUILT_IN_CATANH
):
325 CASE_FLT_FN (BUILT_IN_CBRT
):
326 CASE_FLT_FN (BUILT_IN_CPROJ
):
327 CASE_FLT_FN (BUILT_IN_CSIN
):
328 CASE_FLT_FN (BUILT_IN_CSINH
):
329 CASE_FLT_FN (BUILT_IN_CTAN
):
330 CASE_FLT_FN (BUILT_IN_CTANH
):
331 CASE_FLT_FN (BUILT_IN_ERF
):
332 CASE_FLT_FN (BUILT_IN_LLROUND
):
333 CASE_FLT_FN (BUILT_IN_LROUND
):
334 CASE_FLT_FN (BUILT_IN_ROUND
):
335 CASE_FLT_FN (BUILT_IN_SIN
):
336 CASE_FLT_FN (BUILT_IN_SINH
):
337 CASE_FLT_FN (BUILT_IN_TAN
):
338 CASE_FLT_FN (BUILT_IN_TANH
):
339 CASE_FLT_FN (BUILT_IN_TRUNC
):
342 CASE_FLT_FN (BUILT_IN_LLRINT
):
343 CASE_FLT_FN (BUILT_IN_LRINT
):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
345 CASE_FLT_FN (BUILT_IN_RINT
):
346 return !flag_rounding_math
;
354 /* Check whether we may negate an integer constant T without causing
358 may_negate_without_overflow_p (const_tree t
)
360 unsigned HOST_WIDE_INT val
;
364 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
366 type
= TREE_TYPE (t
);
367 if (TYPE_UNSIGNED (type
))
370 prec
= TYPE_PRECISION (type
);
371 if (prec
> HOST_BITS_PER_WIDE_INT
)
373 if (TREE_INT_CST_LOW (t
) != 0)
375 prec
-= HOST_BITS_PER_WIDE_INT
;
376 val
= TREE_INT_CST_HIGH (t
);
379 val
= TREE_INT_CST_LOW (t
);
380 if (prec
< HOST_BITS_PER_WIDE_INT
)
381 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
382 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t
)
396 type
= TREE_TYPE (t
);
399 switch (TREE_CODE (t
))
402 if (TYPE_OVERFLOW_WRAPS (type
))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t
);
408 return (INTEGRAL_TYPE_P (type
)
409 && TYPE_OVERFLOW_WRAPS (type
));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
421 return negate_expr_p (TREE_REALPART (t
))
422 && negate_expr_p (TREE_IMAGPART (t
));
425 return negate_expr_p (TREE_OPERAND (t
, 0))
426 && negate_expr_p (TREE_OPERAND (t
, 1));
429 return negate_expr_p (TREE_OPERAND (t
, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t
, 1))
437 && reorder_operands_p (TREE_OPERAND (t
, 0),
438 TREE_OPERAND (t
, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t
, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
447 && reorder_operands_p (TREE_OPERAND (t
, 0),
448 TREE_OPERAND (t
, 1));
451 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
458 return negate_expr_p (TREE_OPERAND (t
, 1))
459 || negate_expr_p (TREE_OPERAND (t
, 0));
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type
) == REAL_TYPE
)
482 tree tem
= strip_float_extensions (t
);
484 return negate_expr_p (tem
);
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t
)))
491 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
498 tree op1
= TREE_OPERAND (t
, 1);
499 if (TREE_INT_CST_HIGH (op1
) == 0
500 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
501 == TREE_INT_CST_LOW (op1
))
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
518 fold_negate_expr (location_t loc
, tree t
)
520 tree type
= TREE_TYPE (t
);
523 switch (TREE_CODE (t
))
525 /* Convert - (~A) to A + 1. */
527 if (INTEGRAL_TYPE_P (type
))
528 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
529 build_int_cst (type
, 1));
533 tem
= fold_negate_const (t
, type
);
534 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
535 || !TYPE_OVERFLOW_TRAPS (type
))
540 tem
= fold_negate_const (t
, type
);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
547 tem
= fold_negate_const (t
, type
);
552 tree rpart
= negate_expr (TREE_REALPART (t
));
553 tree ipart
= negate_expr (TREE_IMAGPART (t
));
555 if ((TREE_CODE (rpart
) == REAL_CST
556 && TREE_CODE (ipart
) == REAL_CST
)
557 || (TREE_CODE (rpart
) == INTEGER_CST
558 && TREE_CODE (ipart
) == INTEGER_CST
))
559 return build_complex (type
, rpart
, ipart
);
564 if (negate_expr_p (t
))
565 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
566 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
567 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
571 if (negate_expr_p (t
))
572 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
573 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
577 return TREE_OPERAND (t
, 0);
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t
, 1))
585 && reorder_operands_p (TREE_OPERAND (t
, 0),
586 TREE_OPERAND (t
, 1)))
588 tem
= negate_expr (TREE_OPERAND (t
, 1));
589 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
590 tem
, TREE_OPERAND (t
, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t
, 0)))
596 tem
= negate_expr (TREE_OPERAND (t
, 0));
597 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
598 tem
, TREE_OPERAND (t
, 1));
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
607 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
608 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
609 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
613 if (TYPE_UNSIGNED (type
))
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
621 tem
= TREE_OPERAND (t
, 1);
622 if (negate_expr_p (tem
))
623 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
624 TREE_OPERAND (t
, 0), negate_expr (tem
));
625 tem
= TREE_OPERAND (t
, 0);
626 if (negate_expr_p (tem
))
627 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
628 negate_expr (tem
), TREE_OPERAND (t
, 1));
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
642 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
644 const char * const warnmsg
= G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem
= TREE_OPERAND (t
, 1);
647 if (negate_expr_p (tem
))
649 if (INTEGRAL_TYPE_P (type
)
650 && (TREE_CODE (tem
) != INTEGER_CST
651 || integer_onep (tem
)))
652 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
653 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
654 TREE_OPERAND (t
, 0), negate_expr (tem
));
656 tem
= TREE_OPERAND (t
, 0);
657 if (negate_expr_p (tem
))
659 if (INTEGRAL_TYPE_P (type
)
660 && (TREE_CODE (tem
) != INTEGER_CST
661 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
662 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
663 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
664 negate_expr (tem
), TREE_OPERAND (t
, 1));
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type
) == REAL_TYPE
)
673 tem
= strip_float_extensions (t
);
674 if (tem
!= t
&& negate_expr_p (tem
))
675 return fold_convert_loc (loc
, type
, negate_expr (tem
));
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t
))
682 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
686 fndecl
= get_callee_fndecl (t
);
687 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
688 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
696 tree op1
= TREE_OPERAND (t
, 1);
697 if (TREE_INT_CST_HIGH (op1
) == 0
698 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
699 == TREE_INT_CST_LOW (op1
))
701 tree ntype
= TYPE_UNSIGNED (type
)
702 ? signed_type_for (type
)
703 : unsigned_type_for (type
);
704 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
705 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
706 return fold_convert_loc (loc
, type
, temp
);
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 loc
= EXPR_LOCATION (t
);
732 type
= TREE_TYPE (t
);
735 tem
= fold_negate_expr (loc
, t
);
737 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
738 return fold_convert_loc (loc
, type
, tem
);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
762 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
763 tree
*minus_litp
, int negate_p
)
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in
);
774 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
775 || TREE_CODE (in
) == FIXED_CST
)
777 else if (TREE_CODE (in
) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
785 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
787 tree op0
= TREE_OPERAND (in
, 0);
788 tree op1
= TREE_OPERAND (in
, 1);
789 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
790 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
794 || TREE_CODE (op0
) == FIXED_CST
)
795 *litp
= op0
, op0
= 0;
796 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
797 || TREE_CODE (op1
) == FIXED_CST
)
798 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
800 if (op0
!= 0 && TREE_CONSTANT (op0
))
801 *conp
= op0
, op0
= 0;
802 else if (op1
!= 0 && TREE_CONSTANT (op1
))
803 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0
!= 0 && op1
!= 0)
812 var
= op1
, neg_var_p
= neg1_p
;
814 /* Now do any needed negations. */
816 *minus_litp
= *litp
, *litp
= 0;
818 *conp
= negate_expr (*conp
);
820 var
= negate_expr (var
);
822 else if (TREE_CODE (in
) == BIT_NOT_EXPR
823 && code
== PLUS_EXPR
)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp
= build_one_cst (TREE_TYPE (in
));
827 var
= negate_expr (TREE_OPERAND (in
, 0));
829 else if (TREE_CONSTANT (in
))
837 *minus_litp
= *litp
, *litp
= 0;
838 else if (*minus_litp
)
839 *litp
= *minus_litp
, *minus_litp
= 0;
840 *conp
= negate_expr (*conp
);
841 var
= negate_expr (var
);
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
853 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
864 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
866 if (code
== PLUS_EXPR
)
868 if (TREE_CODE (t1
) == NEGATE_EXPR
)
869 return build2_loc (loc
, MINUS_EXPR
, type
,
870 fold_convert_loc (loc
, type
, t2
),
871 fold_convert_loc (loc
, type
,
872 TREE_OPERAND (t1
, 0)));
873 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
874 return build2_loc (loc
, MINUS_EXPR
, type
,
875 fold_convert_loc (loc
, type
, t1
),
876 fold_convert_loc (loc
, type
,
877 TREE_OPERAND (t2
, 0)));
878 else if (integer_zerop (t2
))
879 return fold_convert_loc (loc
, type
, t1
);
881 else if (code
== MINUS_EXPR
)
883 if (integer_zerop (t2
))
884 return fold_convert_loc (loc
, type
, t1
);
887 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
888 fold_convert_loc (loc
, type
, t2
));
891 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
892 fold_convert_loc (loc
, type
, t2
));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
899 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
901 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
903 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
918 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
919 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
920 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
929 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
932 double_int op1
, op2
, res
, tmp
;
934 tree type
= TREE_TYPE (arg1
);
935 bool uns
= TYPE_UNSIGNED (type
);
936 bool overflow
= false;
938 op1
= tree_to_double_int (arg1
);
939 op2
= tree_to_double_int (arg2
);
956 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
967 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
971 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
975 res
= op1
.add_with_sign (op2
, false, &overflow
);
979 res
= op1
.sub_with_overflow (op2
, &overflow
);
983 res
= op1
.mul_with_sign (op2
, false, &overflow
);
986 case MULT_HIGHPART_EXPR
:
987 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
990 if (TYPE_PRECISION (type
) != 2 * HOST_BITS_PER_WIDE_INT
)
992 op1
.wide_mul_with_sign (op2
, uns
, &res
, &dummy_overflow
);
997 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
998 is performed in twice the precision of arguments. */
999 tmp
= op1
.mul_with_sign (op2
, false, &dummy_overflow
);
1000 res
= tmp
.rshift (TYPE_PRECISION (type
),
1001 2 * TYPE_PRECISION (type
), !uns
);
1005 case TRUNC_DIV_EXPR
:
1006 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1007 case EXACT_DIV_EXPR
:
1008 /* This is a shortcut for a common special case. */
1009 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1010 && !TREE_OVERFLOW (arg1
)
1011 && !TREE_OVERFLOW (arg2
)
1012 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1014 if (code
== CEIL_DIV_EXPR
)
1015 op1
.low
+= op2
.low
- 1;
1017 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1021 /* ... fall through ... */
1023 case ROUND_DIV_EXPR
:
1031 if (op1
== op2
&& !op1
.is_zero ())
1033 res
= double_int_one
;
1036 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1039 case TRUNC_MOD_EXPR
:
1040 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1041 /* This is a shortcut for a common special case. */
1042 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1043 && !TREE_OVERFLOW (arg1
)
1044 && !TREE_OVERFLOW (arg2
)
1045 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1047 if (code
== CEIL_MOD_EXPR
)
1048 op1
.low
+= op2
.low
- 1;
1049 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1053 /* ... fall through ... */
1055 case ROUND_MOD_EXPR
:
1058 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1062 res
= op1
.min (op2
, uns
);
1066 res
= op1
.max (op2
, uns
);
1073 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1075 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1081 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1083 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1086 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1087 constant. We assume ARG1 and ARG2 have the same data type, or at least
1088 are the same kind of constant and the same machine mode. Return zero if
1089 combining the constants is not allowed in the current operating mode. */
1092 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1094 /* Sanity check for the recursive cases. */
1101 if (TREE_CODE (arg1
) == INTEGER_CST
)
1102 return int_const_binop (code
, arg1
, arg2
);
1104 if (TREE_CODE (arg1
) == REAL_CST
)
1106 enum machine_mode mode
;
1109 REAL_VALUE_TYPE value
;
1110 REAL_VALUE_TYPE result
;
1114 /* The following codes are handled by real_arithmetic. */
1129 d1
= TREE_REAL_CST (arg1
);
1130 d2
= TREE_REAL_CST (arg2
);
1132 type
= TREE_TYPE (arg1
);
1133 mode
= TYPE_MODE (type
);
1135 /* Don't perform operation if we honor signaling NaNs and
1136 either operand is a NaN. */
1137 if (HONOR_SNANS (mode
)
1138 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1141 /* Don't perform operation if it would raise a division
1142 by zero exception. */
1143 if (code
== RDIV_EXPR
1144 && REAL_VALUES_EQUAL (d2
, dconst0
)
1145 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1148 /* If either operand is a NaN, just return it. Otherwise, set up
1149 for floating-point trap; we return an overflow. */
1150 if (REAL_VALUE_ISNAN (d1
))
1152 else if (REAL_VALUE_ISNAN (d2
))
1155 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1156 real_convert (&result
, mode
, &value
);
1158 /* Don't constant fold this floating point operation if
1159 the result has overflowed and flag_trapping_math. */
1160 if (flag_trapping_math
1161 && MODE_HAS_INFINITIES (mode
)
1162 && REAL_VALUE_ISINF (result
)
1163 && !REAL_VALUE_ISINF (d1
)
1164 && !REAL_VALUE_ISINF (d2
))
1167 /* Don't constant fold this floating point operation if the
1168 result may dependent upon the run-time rounding mode and
1169 flag_rounding_math is set, or if GCC's software emulation
1170 is unable to accurately represent the result. */
1171 if ((flag_rounding_math
1172 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1173 && (inexact
|| !real_identical (&result
, &value
)))
1176 t
= build_real (type
, result
);
1178 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1182 if (TREE_CODE (arg1
) == FIXED_CST
)
1184 FIXED_VALUE_TYPE f1
;
1185 FIXED_VALUE_TYPE f2
;
1186 FIXED_VALUE_TYPE result
;
1191 /* The following codes are handled by fixed_arithmetic. */
1197 case TRUNC_DIV_EXPR
:
1198 f2
= TREE_FIXED_CST (arg2
);
1203 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1204 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1212 f1
= TREE_FIXED_CST (arg1
);
1213 type
= TREE_TYPE (arg1
);
1214 sat_p
= TYPE_SATURATING (type
);
1215 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1216 t
= build_fixed (type
, result
);
1217 /* Propagate overflow flags. */
1218 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1219 TREE_OVERFLOW (t
) = 1;
1223 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1225 tree type
= TREE_TYPE (arg1
);
1226 tree r1
= TREE_REALPART (arg1
);
1227 tree i1
= TREE_IMAGPART (arg1
);
1228 tree r2
= TREE_REALPART (arg2
);
1229 tree i2
= TREE_IMAGPART (arg2
);
1236 real
= const_binop (code
, r1
, r2
);
1237 imag
= const_binop (code
, i1
, i2
);
1241 if (COMPLEX_FLOAT_TYPE_P (type
))
1242 return do_mpc_arg2 (arg1
, arg2
, type
,
1243 /* do_nonfinite= */ folding_initializer
,
1246 real
= const_binop (MINUS_EXPR
,
1247 const_binop (MULT_EXPR
, r1
, r2
),
1248 const_binop (MULT_EXPR
, i1
, i2
));
1249 imag
= const_binop (PLUS_EXPR
,
1250 const_binop (MULT_EXPR
, r1
, i2
),
1251 const_binop (MULT_EXPR
, i1
, r2
));
1255 if (COMPLEX_FLOAT_TYPE_P (type
))
1256 return do_mpc_arg2 (arg1
, arg2
, type
,
1257 /* do_nonfinite= */ folding_initializer
,
1260 case TRUNC_DIV_EXPR
:
1262 case FLOOR_DIV_EXPR
:
1263 case ROUND_DIV_EXPR
:
1264 if (flag_complex_method
== 0)
1266 /* Keep this algorithm in sync with
1267 tree-complex.c:expand_complex_div_straight().
1269 Expand complex division to scalars, straightforward algorithm.
1270 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1274 = const_binop (PLUS_EXPR
,
1275 const_binop (MULT_EXPR
, r2
, r2
),
1276 const_binop (MULT_EXPR
, i2
, i2
));
1278 = const_binop (PLUS_EXPR
,
1279 const_binop (MULT_EXPR
, r1
, r2
),
1280 const_binop (MULT_EXPR
, i1
, i2
));
1282 = const_binop (MINUS_EXPR
,
1283 const_binop (MULT_EXPR
, i1
, r2
),
1284 const_binop (MULT_EXPR
, r1
, i2
));
1286 real
= const_binop (code
, t1
, magsquared
);
1287 imag
= const_binop (code
, t2
, magsquared
);
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_wide().
1294 Expand complex division to scalars, modified algorithm to minimize
1295 overflow with wide input ranges. */
1296 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1297 fold_abs_const (r2
, TREE_TYPE (type
)),
1298 fold_abs_const (i2
, TREE_TYPE (type
)));
1300 if (integer_nonzerop (compare
))
1302 /* In the TRUE branch, we compute
1304 div = (br * ratio) + bi;
1305 tr = (ar * ratio) + ai;
1306 ti = (ai * ratio) - ar;
1309 tree ratio
= const_binop (code
, r2
, i2
);
1310 tree div
= const_binop (PLUS_EXPR
, i2
,
1311 const_binop (MULT_EXPR
, r2
, ratio
));
1312 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1313 real
= const_binop (PLUS_EXPR
, real
, i1
);
1314 real
= const_binop (code
, real
, div
);
1316 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1317 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1318 imag
= const_binop (code
, imag
, div
);
1322 /* In the FALSE branch, we compute
1324 divisor = (d * ratio) + c;
1325 tr = (b * ratio) + a;
1326 ti = b - (a * ratio);
1329 tree ratio
= const_binop (code
, i2
, r2
);
1330 tree div
= const_binop (PLUS_EXPR
, r2
,
1331 const_binop (MULT_EXPR
, i2
, ratio
));
1333 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1334 real
= const_binop (PLUS_EXPR
, real
, r1
);
1335 real
= const_binop (code
, real
, div
);
1337 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1338 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1339 imag
= const_binop (code
, imag
, div
);
1349 return build_complex (type
, real
, imag
);
1352 if (TREE_CODE (arg1
) == VECTOR_CST
1353 && TREE_CODE (arg2
) == VECTOR_CST
)
1355 tree type
= TREE_TYPE (arg1
);
1356 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1357 tree
*elts
= XALLOCAVEC (tree
, count
);
1359 for (i
= 0; i
< count
; i
++)
1361 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1362 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1364 elts
[i
] = const_binop (code
, elem1
, elem2
);
1366 /* It is possible that const_binop cannot handle the given
1367 code and return NULL_TREE */
1368 if (elts
[i
] == NULL_TREE
)
1372 return build_vector (type
, elts
);
1375 /* Shifts allow a scalar offset for a vector. */
1376 if (TREE_CODE (arg1
) == VECTOR_CST
1377 && TREE_CODE (arg2
) == INTEGER_CST
)
1379 tree type
= TREE_TYPE (arg1
);
1380 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1381 tree
*elts
= XALLOCAVEC (tree
, count
);
1383 for (i
= 0; i
< count
; i
++)
1385 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1387 elts
[i
] = const_binop (code
, elem1
, arg2
);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if (elts
[i
] == NULL_TREE
)
1395 return build_vector (type
, elts
);
1400 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1401 indicates which particular sizetype to create. */
1404 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1406 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1409 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1410 is a tree code. The type of the result is taken from the operands.
1411 Both must be equivalent integer types, ala int_binop_types_match_p.
1412 If the operands are constant, so is the result. */
1415 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1417 tree type
= TREE_TYPE (arg0
);
1419 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1420 return error_mark_node
;
1422 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1425 /* Handle the special case of two integer constants faster. */
1426 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1428 /* And some specific cases even faster than that. */
1429 if (code
== PLUS_EXPR
)
1431 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1433 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1436 else if (code
== MINUS_EXPR
)
1438 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1441 else if (code
== MULT_EXPR
)
1443 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1447 /* Handle general case of two integer constants. For sizetype
1448 constant calculations we always want to know about overflow,
1449 even in the unsigned case. */
1450 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1453 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1456 /* Given two values, either both of sizetype or both of bitsizetype,
1457 compute the difference between the two values. Return the value
1458 in signed type corresponding to the type of the operands. */
1461 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1463 tree type
= TREE_TYPE (arg0
);
1466 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1469 /* If the type is already signed, just do the simple thing. */
1470 if (!TYPE_UNSIGNED (type
))
1471 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1473 if (type
== sizetype
)
1475 else if (type
== bitsizetype
)
1476 ctype
= sbitsizetype
;
1478 ctype
= signed_type_for (type
);
1480 /* If either operand is not a constant, do the conversions to the signed
1481 type and subtract. The hardware will do the right thing with any
1482 overflow in the subtraction. */
1483 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1484 return size_binop_loc (loc
, MINUS_EXPR
,
1485 fold_convert_loc (loc
, ctype
, arg0
),
1486 fold_convert_loc (loc
, ctype
, arg1
));
1488 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1489 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1490 overflow) and negate (which can't either). Special-case a result
1491 of zero while we're here. */
1492 if (tree_int_cst_equal (arg0
, arg1
))
1493 return build_int_cst (ctype
, 0);
1494 else if (tree_int_cst_lt (arg1
, arg0
))
1495 return fold_convert_loc (loc
, ctype
,
1496 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1498 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1499 fold_convert_loc (loc
, ctype
,
1500 size_binop_loc (loc
,
1505 /* A subroutine of fold_convert_const handling conversions of an
1506 INTEGER_CST to another integer type. */
1509 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1513 /* Given an integer constant, make new constant with new type,
1514 appropriately sign-extended or truncated. */
1515 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1516 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1517 (TREE_INT_CST_HIGH (arg1
) < 0
1518 && (TYPE_UNSIGNED (type
)
1519 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1520 | TREE_OVERFLOW (arg1
));
1525 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1526 to an integer type. */
1529 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1534 /* The following code implements the floating point to integer
1535 conversion rules required by the Java Language Specification,
1536 that IEEE NaNs are mapped to zero and values that overflow
1537 the target precision saturate, i.e. values greater than
1538 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1539 are mapped to INT_MIN. These semantics are allowed by the
1540 C and C++ standards that simply state that the behavior of
1541 FP-to-integer conversion is unspecified upon overflow. */
1545 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1549 case FIX_TRUNC_EXPR
:
1550 real_trunc (&r
, VOIDmode
, &x
);
1557 /* If R is NaN, return zero and show we have an overflow. */
1558 if (REAL_VALUE_ISNAN (r
))
1561 val
= double_int_zero
;
1564 /* See if R is less than the lower bound or greater than the
1569 tree lt
= TYPE_MIN_VALUE (type
);
1570 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1571 if (REAL_VALUES_LESS (r
, l
))
1574 val
= tree_to_double_int (lt
);
1580 tree ut
= TYPE_MAX_VALUE (type
);
1583 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1584 if (REAL_VALUES_LESS (u
, r
))
1587 val
= tree_to_double_int (ut
);
1593 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1595 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1599 /* A subroutine of fold_convert_const handling conversions of a
1600 FIXED_CST to an integer type. */
1603 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1606 double_int temp
, temp_trunc
;
1609 /* Right shift FIXED_CST to temp by fbit. */
1610 temp
= TREE_FIXED_CST (arg1
).data
;
1611 mode
= TREE_FIXED_CST (arg1
).mode
;
1612 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1614 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1615 HOST_BITS_PER_DOUBLE_INT
,
1616 SIGNED_FIXED_POINT_MODE_P (mode
));
1618 /* Left shift temp to temp_trunc by fbit. */
1619 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1620 HOST_BITS_PER_DOUBLE_INT
,
1621 SIGNED_FIXED_POINT_MODE_P (mode
));
1625 temp
= double_int_zero
;
1626 temp_trunc
= double_int_zero
;
1629 /* If FIXED_CST is negative, we need to round the value toward 0.
1630 By checking if the fractional bits are not zero to add 1 to temp. */
1631 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1632 && temp_trunc
.is_negative ()
1633 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1634 temp
+= double_int_one
;
1636 /* Given a fixed-point constant, make new constant with new type,
1637 appropriately sign-extended or truncated. */
1638 t
= force_fit_type_double (type
, temp
, -1,
1639 (temp
.is_negative ()
1640 && (TYPE_UNSIGNED (type
)
1641 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1642 | TREE_OVERFLOW (arg1
));
1647 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1648 to another floating point type. */
1651 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1653 REAL_VALUE_TYPE value
;
1656 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1657 t
= build_real (type
, value
);
1659 /* If converting an infinity or NAN to a representation that doesn't
1660 have one, set the overflow bit so that we can produce some kind of
1661 error message at the appropriate point if necessary. It's not the
1662 most user-friendly message, but it's better than nothing. */
1663 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1664 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1665 TREE_OVERFLOW (t
) = 1;
1666 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1667 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1668 TREE_OVERFLOW (t
) = 1;
1669 /* Regular overflow, conversion produced an infinity in a mode that
1670 can't represent them. */
1671 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1672 && REAL_VALUE_ISINF (value
)
1673 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1674 TREE_OVERFLOW (t
) = 1;
1676 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1680 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1681 to a floating point type. */
1684 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1686 REAL_VALUE_TYPE value
;
1689 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1690 t
= build_real (type
, value
);
1692 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1696 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1697 to another fixed-point type. */
1700 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1702 FIXED_VALUE_TYPE value
;
1706 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1707 TYPE_SATURATING (type
));
1708 t
= build_fixed (type
, value
);
1710 /* Propagate overflow flags. */
1711 if (overflow_p
| TREE_OVERFLOW (arg1
))
1712 TREE_OVERFLOW (t
) = 1;
1716 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1717 to a fixed-point type. */
1720 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1722 FIXED_VALUE_TYPE value
;
1726 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1727 TREE_INT_CST (arg1
),
1728 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1729 TYPE_SATURATING (type
));
1730 t
= build_fixed (type
, value
);
1732 /* Propagate overflow flags. */
1733 if (overflow_p
| TREE_OVERFLOW (arg1
))
1734 TREE_OVERFLOW (t
) = 1;
1738 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1739 to a fixed-point type. */
1742 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1744 FIXED_VALUE_TYPE value
;
1748 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1749 &TREE_REAL_CST (arg1
),
1750 TYPE_SATURATING (type
));
1751 t
= build_fixed (type
, value
);
1753 /* Propagate overflow flags. */
1754 if (overflow_p
| TREE_OVERFLOW (arg1
))
1755 TREE_OVERFLOW (t
) = 1;
1759 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1760 type TYPE. If no simplification can be done return NULL_TREE. */
1763 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1765 if (TREE_TYPE (arg1
) == type
)
1768 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1769 || TREE_CODE (type
) == OFFSET_TYPE
)
1771 if (TREE_CODE (arg1
) == INTEGER_CST
)
1772 return fold_convert_const_int_from_int (type
, arg1
);
1773 else if (TREE_CODE (arg1
) == REAL_CST
)
1774 return fold_convert_const_int_from_real (code
, type
, arg1
);
1775 else if (TREE_CODE (arg1
) == FIXED_CST
)
1776 return fold_convert_const_int_from_fixed (type
, arg1
);
1778 else if (TREE_CODE (type
) == REAL_TYPE
)
1780 if (TREE_CODE (arg1
) == INTEGER_CST
)
1781 return build_real_from_int_cst (type
, arg1
);
1782 else if (TREE_CODE (arg1
) == REAL_CST
)
1783 return fold_convert_const_real_from_real (type
, arg1
);
1784 else if (TREE_CODE (arg1
) == FIXED_CST
)
1785 return fold_convert_const_real_from_fixed (type
, arg1
);
1787 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1789 if (TREE_CODE (arg1
) == FIXED_CST
)
1790 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1791 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1792 return fold_convert_const_fixed_from_int (type
, arg1
);
1793 else if (TREE_CODE (arg1
) == REAL_CST
)
1794 return fold_convert_const_fixed_from_real (type
, arg1
);
1799 /* Construct a vector of zero elements of vector type TYPE. */
1802 build_zero_vector (tree type
)
1806 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1807 return build_vector_from_val (type
, t
);
1810 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1813 fold_convertible_p (const_tree type
, const_tree arg
)
1815 tree orig
= TREE_TYPE (arg
);
1820 if (TREE_CODE (arg
) == ERROR_MARK
1821 || TREE_CODE (type
) == ERROR_MARK
1822 || TREE_CODE (orig
) == ERROR_MARK
)
1825 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1828 switch (TREE_CODE (type
))
1830 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1831 case POINTER_TYPE
: case REFERENCE_TYPE
:
1833 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1834 || TREE_CODE (orig
) == OFFSET_TYPE
)
1836 return (TREE_CODE (orig
) == VECTOR_TYPE
1837 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1840 case FIXED_POINT_TYPE
:
1844 return TREE_CODE (type
) == TREE_CODE (orig
);
1851 /* Convert expression ARG to type TYPE. Used by the middle-end for
1852 simple conversions in preference to calling the front-end's convert. */
1855 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1857 tree orig
= TREE_TYPE (arg
);
1863 if (TREE_CODE (arg
) == ERROR_MARK
1864 || TREE_CODE (type
) == ERROR_MARK
1865 || TREE_CODE (orig
) == ERROR_MARK
)
1866 return error_mark_node
;
1868 switch (TREE_CODE (type
))
1871 case REFERENCE_TYPE
:
1872 /* Handle conversions between pointers to different address spaces. */
1873 if (POINTER_TYPE_P (orig
)
1874 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1875 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1876 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1879 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1881 if (TREE_CODE (arg
) == INTEGER_CST
)
1883 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1884 if (tem
!= NULL_TREE
)
1887 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1888 || TREE_CODE (orig
) == OFFSET_TYPE
)
1889 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1890 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1891 return fold_convert_loc (loc
, type
,
1892 fold_build1_loc (loc
, REALPART_EXPR
,
1893 TREE_TYPE (orig
), arg
));
1894 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1895 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1896 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1899 if (TREE_CODE (arg
) == INTEGER_CST
)
1901 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1902 if (tem
!= NULL_TREE
)
1905 else if (TREE_CODE (arg
) == REAL_CST
)
1907 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1908 if (tem
!= NULL_TREE
)
1911 else if (TREE_CODE (arg
) == FIXED_CST
)
1913 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1914 if (tem
!= NULL_TREE
)
1918 switch (TREE_CODE (orig
))
1921 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1922 case POINTER_TYPE
: case REFERENCE_TYPE
:
1923 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1926 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1928 case FIXED_POINT_TYPE
:
1929 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1932 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1933 return fold_convert_loc (loc
, type
, tem
);
1939 case FIXED_POINT_TYPE
:
1940 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1941 || TREE_CODE (arg
) == REAL_CST
)
1943 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1944 if (tem
!= NULL_TREE
)
1945 goto fold_convert_exit
;
1948 switch (TREE_CODE (orig
))
1950 case FIXED_POINT_TYPE
:
1955 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1958 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1959 return fold_convert_loc (loc
, type
, tem
);
1966 switch (TREE_CODE (orig
))
1969 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1970 case POINTER_TYPE
: case REFERENCE_TYPE
:
1972 case FIXED_POINT_TYPE
:
1973 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1974 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1975 fold_convert_loc (loc
, TREE_TYPE (type
),
1976 integer_zero_node
));
1981 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1983 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1984 TREE_OPERAND (arg
, 0));
1985 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1986 TREE_OPERAND (arg
, 1));
1987 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1990 arg
= save_expr (arg
);
1991 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1992 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1993 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1994 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1995 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2003 if (integer_zerop (arg
))
2004 return build_zero_vector (type
);
2005 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2006 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2007 || TREE_CODE (orig
) == VECTOR_TYPE
);
2008 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2011 tem
= fold_ignored_result (arg
);
2012 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2015 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2016 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2020 protected_set_expr_location_unshare (tem
, loc
);
2024 /* Return false if expr can be assumed not to be an lvalue, true
2028 maybe_lvalue_p (const_tree x
)
2030 /* We only need to wrap lvalue tree codes. */
2031 switch (TREE_CODE (x
))
2044 case ARRAY_RANGE_REF
:
2050 case PREINCREMENT_EXPR
:
2051 case PREDECREMENT_EXPR
:
2053 case TRY_CATCH_EXPR
:
2054 case WITH_CLEANUP_EXPR
:
2063 /* Assume the worst for front-end tree codes. */
2064 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2072 /* Return an expr equal to X but certainly not valid as an lvalue. */
2075 non_lvalue_loc (location_t loc
, tree x
)
2077 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2082 if (! maybe_lvalue_p (x
))
2084 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2087 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2088 Zero means allow extended lvalues. */
2090 int pedantic_lvalues
;
2092 /* When pedantic, return an expr equal to X but certainly not valid as a
2093 pedantic lvalue. Otherwise, return X. */
2096 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2098 if (pedantic_lvalues
)
2099 return non_lvalue_loc (loc
, x
);
2101 return protected_set_expr_location_unshare (x
, loc
);
2104 /* Given a tree comparison code, return the code that is the logical inverse.
2105 It is generally not safe to do this for floating-point comparisons, except
2106 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2107 ERROR_MARK in this case. */
2110 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2112 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2113 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2123 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2125 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2127 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2129 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2143 return UNORDERED_EXPR
;
2144 case UNORDERED_EXPR
:
2145 return ORDERED_EXPR
;
2151 /* Similar, but return the comparison that results if the operands are
2152 swapped. This is safe for floating-point. */
2155 swap_tree_comparison (enum tree_code code
)
2162 case UNORDERED_EXPR
:
2188 /* Convert a comparison tree code from an enum tree_code representation
2189 into a compcode bit-based encoding. This function is the inverse of
2190 compcode_to_comparison. */
2192 static enum comparison_code
2193 comparison_to_compcode (enum tree_code code
)
2210 return COMPCODE_ORD
;
2211 case UNORDERED_EXPR
:
2212 return COMPCODE_UNORD
;
2214 return COMPCODE_UNLT
;
2216 return COMPCODE_UNEQ
;
2218 return COMPCODE_UNLE
;
2220 return COMPCODE_UNGT
;
2222 return COMPCODE_LTGT
;
2224 return COMPCODE_UNGE
;
2230 /* Convert a compcode bit-based encoding of a comparison operator back
2231 to GCC's enum tree_code representation. This function is the
2232 inverse of comparison_to_compcode. */
2234 static enum tree_code
2235 compcode_to_comparison (enum comparison_code code
)
2252 return ORDERED_EXPR
;
2253 case COMPCODE_UNORD
:
2254 return UNORDERED_EXPR
;
2272 /* Return a tree for the comparison which is the combination of
2273 doing the AND or OR (depending on CODE) of the two operations LCODE
2274 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2275 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2276 if this makes the transformation invalid. */
2279 combine_comparisons (location_t loc
,
2280 enum tree_code code
, enum tree_code lcode
,
2281 enum tree_code rcode
, tree truth_type
,
2282 tree ll_arg
, tree lr_arg
)
2284 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2285 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2286 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2291 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2292 compcode
= lcompcode
& rcompcode
;
2295 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2296 compcode
= lcompcode
| rcompcode
;
2305 /* Eliminate unordered comparisons, as well as LTGT and ORD
2306 which are not used unless the mode has NaNs. */
2307 compcode
&= ~COMPCODE_UNORD
;
2308 if (compcode
== COMPCODE_LTGT
)
2309 compcode
= COMPCODE_NE
;
2310 else if (compcode
== COMPCODE_ORD
)
2311 compcode
= COMPCODE_TRUE
;
2313 else if (flag_trapping_math
)
2315 /* Check that the original operation and the optimized ones will trap
2316 under the same condition. */
2317 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2318 && (lcompcode
!= COMPCODE_EQ
)
2319 && (lcompcode
!= COMPCODE_ORD
);
2320 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2321 && (rcompcode
!= COMPCODE_EQ
)
2322 && (rcompcode
!= COMPCODE_ORD
);
2323 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2324 && (compcode
!= COMPCODE_EQ
)
2325 && (compcode
!= COMPCODE_ORD
);
2327 /* In a short-circuited boolean expression the LHS might be
2328 such that the RHS, if evaluated, will never trap. For
2329 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2330 if neither x nor y is NaN. (This is a mixed blessing: for
2331 example, the expression above will never trap, hence
2332 optimizing it to x < y would be invalid). */
2333 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2334 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2337 /* If the comparison was short-circuited, and only the RHS
2338 trapped, we may now generate a spurious trap. */
2340 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2343 /* If we changed the conditions that cause a trap, we lose. */
2344 if ((ltrap
|| rtrap
) != trap
)
2348 if (compcode
== COMPCODE_TRUE
)
2349 return constant_boolean_node (true, truth_type
);
2350 else if (compcode
== COMPCODE_FALSE
)
2351 return constant_boolean_node (false, truth_type
);
2354 enum tree_code tcode
;
2356 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2357 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2361 /* Return nonzero if two operands (typically of the same tree node)
2362 are necessarily equal. If either argument has side-effects this
2363 function returns zero. FLAGS modifies behavior as follows:
2365 If OEP_ONLY_CONST is set, only return nonzero for constants.
2366 This function tests whether the operands are indistinguishable;
2367 it does not test whether they are equal using C's == operation.
2368 The distinction is important for IEEE floating point, because
2369 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2370 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2372 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2373 even though it may hold multiple values during a function.
2374 This is because a GCC tree node guarantees that nothing else is
2375 executed between the evaluation of its "operands" (which may often
2376 be evaluated in arbitrary order). Hence if the operands themselves
2377 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2378 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2379 unset means assuming isochronic (or instantaneous) tree equivalence.
2380 Unless comparing arbitrary expression trees, such as from different
2381 statements, this flag can usually be left unset.
2383 If OEP_PURE_SAME is set, then pure functions with identical arguments
2384 are considered the same. It is used when the caller has other ways
2385 to ensure that global memory is unchanged in between. */
2388 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2390 /* If either is ERROR_MARK, they aren't equal. */
2391 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2392 || TREE_TYPE (arg0
) == error_mark_node
2393 || TREE_TYPE (arg1
) == error_mark_node
)
2396 /* Similar, if either does not have a type (like a released SSA name),
2397 they aren't equal. */
2398 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2401 /* Check equality of integer constants before bailing out due to
2402 precision differences. */
2403 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2404 return tree_int_cst_equal (arg0
, arg1
);
2406 /* If both types don't have the same signedness, then we can't consider
2407 them equal. We must check this before the STRIP_NOPS calls
2408 because they may change the signedness of the arguments. As pointers
2409 strictly don't have a signedness, require either two pointers or
2410 two non-pointers as well. */
2411 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2412 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2415 /* We cannot consider pointers to different address space equal. */
2416 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2417 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2418 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2421 /* If both types don't have the same precision, then it is not safe
2423 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2429 /* In case both args are comparisons but with different comparison
2430 code, try to swap the comparison operands of one arg to produce
2431 a match and compare that variant. */
2432 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2433 && COMPARISON_CLASS_P (arg0
)
2434 && COMPARISON_CLASS_P (arg1
))
2436 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2438 if (TREE_CODE (arg0
) == swap_code
)
2439 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2440 TREE_OPERAND (arg1
, 1), flags
)
2441 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2442 TREE_OPERAND (arg1
, 0), flags
);
2445 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2446 /* This is needed for conversions and for COMPONENT_REF.
2447 Might as well play it safe and always test this. */
2448 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2449 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2450 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2453 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2454 We don't care about side effects in that case because the SAVE_EXPR
2455 takes care of that for us. In all other cases, two expressions are
2456 equal if they have no side effects. If we have two identical
2457 expressions with side effects that should be treated the same due
2458 to the only side effects being identical SAVE_EXPR's, that will
2459 be detected in the recursive calls below.
2460 If we are taking an invariant address of two identical objects
2461 they are necessarily equal as well. */
2462 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2463 && (TREE_CODE (arg0
) == SAVE_EXPR
2464 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2465 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2468 /* Next handle constant cases, those for which we can return 1 even
2469 if ONLY_CONST is set. */
2470 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2471 switch (TREE_CODE (arg0
))
2474 return tree_int_cst_equal (arg0
, arg1
);
2477 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2478 TREE_FIXED_CST (arg1
));
2481 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2482 TREE_REAL_CST (arg1
)))
2486 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2488 /* If we do not distinguish between signed and unsigned zero,
2489 consider them equal. */
2490 if (real_zerop (arg0
) && real_zerop (arg1
))
2499 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2502 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2504 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2505 VECTOR_CST_ELT (arg1
, i
), flags
))
2512 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2514 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2518 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2519 && ! memcmp (TREE_STRING_POINTER (arg0
),
2520 TREE_STRING_POINTER (arg1
),
2521 TREE_STRING_LENGTH (arg0
)));
2524 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2525 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2526 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2531 if (flags
& OEP_ONLY_CONST
)
2534 /* Define macros to test an operand from arg0 and arg1 for equality and a
2535 variant that allows null and views null as being different from any
2536 non-null value. In the latter case, if either is null, the both
2537 must be; otherwise, do the normal comparison. */
2538 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2539 TREE_OPERAND (arg1, N), flags)
2541 #define OP_SAME_WITH_NULL(N) \
2542 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2543 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2545 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2548 /* Two conversions are equal only if signedness and modes match. */
2549 switch (TREE_CODE (arg0
))
2552 case FIX_TRUNC_EXPR
:
2553 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2554 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2564 case tcc_comparison
:
2566 if (OP_SAME (0) && OP_SAME (1))
2569 /* For commutative ops, allow the other order. */
2570 return (commutative_tree_code (TREE_CODE (arg0
))
2571 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2572 TREE_OPERAND (arg1
, 1), flags
)
2573 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2574 TREE_OPERAND (arg1
, 0), flags
));
2577 /* If either of the pointer (or reference) expressions we are
2578 dereferencing contain a side effect, these cannot be equal,
2579 but their addresses can be. */
2580 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2581 && (TREE_SIDE_EFFECTS (arg0
)
2582 || TREE_SIDE_EFFECTS (arg1
)))
2585 switch (TREE_CODE (arg0
))
2588 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2595 case TARGET_MEM_REF
:
2596 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2597 /* Require equal extra operands and then fall through to MEM_REF
2598 handling of the two common operands. */
2599 if (!OP_SAME_WITH_NULL (2)
2600 || !OP_SAME_WITH_NULL (3)
2601 || !OP_SAME_WITH_NULL (4))
2605 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2606 /* Require equal access sizes, and similar pointer types.
2607 We can have incomplete types for array references of
2608 variable-sized arrays from the Fortran frontend
2609 though. Also verify the types are compatible. */
2610 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2611 || (TYPE_SIZE (TREE_TYPE (arg0
))
2612 && TYPE_SIZE (TREE_TYPE (arg1
))
2613 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2614 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2615 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2616 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2617 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2618 && OP_SAME (0) && OP_SAME (1));
2621 case ARRAY_RANGE_REF
:
2622 /* Operands 2 and 3 may be null.
2623 Compare the array index by value if it is constant first as we
2624 may have different types but same value here. */
2627 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2628 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2629 TREE_OPERAND (arg1
, 1))
2631 && OP_SAME_WITH_NULL (2)
2632 && OP_SAME_WITH_NULL (3));
2635 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2636 may be NULL when we're called to compare MEM_EXPRs. */
2637 if (!OP_SAME_WITH_NULL (0))
2639 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2640 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2645 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2646 return OP_SAME (1) && OP_SAME (2);
2652 case tcc_expression
:
2653 switch (TREE_CODE (arg0
))
2656 case TRUTH_NOT_EXPR
:
2659 case TRUTH_ANDIF_EXPR
:
2660 case TRUTH_ORIF_EXPR
:
2661 return OP_SAME (0) && OP_SAME (1);
2664 case WIDEN_MULT_PLUS_EXPR
:
2665 case WIDEN_MULT_MINUS_EXPR
:
2668 /* The multiplcation operands are commutative. */
2671 case TRUTH_AND_EXPR
:
2673 case TRUTH_XOR_EXPR
:
2674 if (OP_SAME (0) && OP_SAME (1))
2677 /* Otherwise take into account this is a commutative operation. */
2678 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2679 TREE_OPERAND (arg1
, 1), flags
)
2680 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2681 TREE_OPERAND (arg1
, 0), flags
));
2686 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2693 switch (TREE_CODE (arg0
))
2696 /* If the CALL_EXPRs call different functions, then they
2697 clearly can not be equal. */
2698 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2703 unsigned int cef
= call_expr_flags (arg0
);
2704 if (flags
& OEP_PURE_SAME
)
2705 cef
&= ECF_CONST
| ECF_PURE
;
2712 /* Now see if all the arguments are the same. */
2714 const_call_expr_arg_iterator iter0
, iter1
;
2716 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2717 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2719 a0
= next_const_call_expr_arg (&iter0
),
2720 a1
= next_const_call_expr_arg (&iter1
))
2721 if (! operand_equal_p (a0
, a1
, flags
))
2724 /* If we get here and both argument lists are exhausted
2725 then the CALL_EXPRs are equal. */
2726 return ! (a0
|| a1
);
2732 case tcc_declaration
:
2733 /* Consider __builtin_sqrt equal to sqrt. */
2734 return (TREE_CODE (arg0
) == FUNCTION_DECL
2735 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2736 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2737 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2744 #undef OP_SAME_WITH_NULL
2747 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2748 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2750 When in doubt, return 0. */
2753 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2755 int unsignedp1
, unsignedpo
;
2756 tree primarg0
, primarg1
, primother
;
2757 unsigned int correct_width
;
2759 if (operand_equal_p (arg0
, arg1
, 0))
2762 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2763 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2766 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2767 and see if the inner values are the same. This removes any
2768 signedness comparison, which doesn't matter here. */
2769 primarg0
= arg0
, primarg1
= arg1
;
2770 STRIP_NOPS (primarg0
);
2771 STRIP_NOPS (primarg1
);
2772 if (operand_equal_p (primarg0
, primarg1
, 0))
2775 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2776 actual comparison operand, ARG0.
2778 First throw away any conversions to wider types
2779 already present in the operands. */
2781 primarg1
= get_narrower (arg1
, &unsignedp1
);
2782 primother
= get_narrower (other
, &unsignedpo
);
2784 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2785 if (unsignedp1
== unsignedpo
2786 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2787 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2789 tree type
= TREE_TYPE (arg0
);
2791 /* Make sure shorter operand is extended the right way
2792 to match the longer operand. */
2793 primarg1
= fold_convert (signed_or_unsigned_type_for
2794 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2796 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2803 /* See if ARG is an expression that is either a comparison or is performing
2804 arithmetic on comparisons. The comparisons must only be comparing
2805 two different values, which will be stored in *CVAL1 and *CVAL2; if
2806 they are nonzero it means that some operands have already been found.
2807 No variables may be used anywhere else in the expression except in the
2808 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2809 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2811 If this is true, return 1. Otherwise, return zero. */
2814 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2816 enum tree_code code
= TREE_CODE (arg
);
2817 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2819 /* We can handle some of the tcc_expression cases here. */
2820 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2822 else if (tclass
== tcc_expression
2823 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2824 || code
== COMPOUND_EXPR
))
2825 tclass
= tcc_binary
;
2827 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2828 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2830 /* If we've already found a CVAL1 or CVAL2, this expression is
2831 two complex to handle. */
2832 if (*cval1
|| *cval2
)
2842 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2845 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2846 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2847 cval1
, cval2
, save_p
));
2852 case tcc_expression
:
2853 if (code
== COND_EXPR
)
2854 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2855 cval1
, cval2
, save_p
)
2856 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2857 cval1
, cval2
, save_p
)
2858 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2859 cval1
, cval2
, save_p
));
2862 case tcc_comparison
:
2863 /* First see if we can handle the first operand, then the second. For
2864 the second operand, we know *CVAL1 can't be zero. It must be that
2865 one side of the comparison is each of the values; test for the
2866 case where this isn't true by failing if the two operands
2869 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2870 TREE_OPERAND (arg
, 1), 0))
2874 *cval1
= TREE_OPERAND (arg
, 0);
2875 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2877 else if (*cval2
== 0)
2878 *cval2
= TREE_OPERAND (arg
, 0);
2879 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2884 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2886 else if (*cval2
== 0)
2887 *cval2
= TREE_OPERAND (arg
, 1);
2888 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2900 /* ARG is a tree that is known to contain just arithmetic operations and
2901 comparisons. Evaluate the operations in the tree substituting NEW0 for
2902 any occurrence of OLD0 as an operand of a comparison and likewise for
2906 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2907 tree old1
, tree new1
)
2909 tree type
= TREE_TYPE (arg
);
2910 enum tree_code code
= TREE_CODE (arg
);
2911 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2913 /* We can handle some of the tcc_expression cases here. */
2914 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2916 else if (tclass
== tcc_expression
2917 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2918 tclass
= tcc_binary
;
2923 return fold_build1_loc (loc
, code
, type
,
2924 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2925 old0
, new0
, old1
, new1
));
2928 return fold_build2_loc (loc
, code
, type
,
2929 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2930 old0
, new0
, old1
, new1
),
2931 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2932 old0
, new0
, old1
, new1
));
2934 case tcc_expression
:
2938 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2942 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2946 return fold_build3_loc (loc
, code
, type
,
2947 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2948 old0
, new0
, old1
, new1
),
2949 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2950 old0
, new0
, old1
, new1
),
2951 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2952 old0
, new0
, old1
, new1
));
2956 /* Fall through - ??? */
2958 case tcc_comparison
:
2960 tree arg0
= TREE_OPERAND (arg
, 0);
2961 tree arg1
= TREE_OPERAND (arg
, 1);
2963 /* We need to check both for exact equality and tree equality. The
2964 former will be true if the operand has a side-effect. In that
2965 case, we know the operand occurred exactly once. */
2967 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2969 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2972 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2974 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2977 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2985 /* Return a tree for the case when the result of an expression is RESULT
2986 converted to TYPE and OMITTED was previously an operand of the expression
2987 but is now not needed (e.g., we folded OMITTED * 0).
2989 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2990 the conversion of RESULT to TYPE. */
2993 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2995 tree t
= fold_convert_loc (loc
, type
, result
);
2997 /* If the resulting operand is an empty statement, just return the omitted
2998 statement casted to void. */
2999 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3000 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3001 fold_ignored_result (omitted
));
3003 if (TREE_SIDE_EFFECTS (omitted
))
3004 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3005 fold_ignored_result (omitted
), t
);
3007 return non_lvalue_loc (loc
, t
);
3010 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3013 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3016 tree t
= fold_convert_loc (loc
, type
, result
);
3018 /* If the resulting operand is an empty statement, just return the omitted
3019 statement casted to void. */
3020 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3021 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3022 fold_ignored_result (omitted
));
3024 if (TREE_SIDE_EFFECTS (omitted
))
3025 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3026 fold_ignored_result (omitted
), t
);
3028 return pedantic_non_lvalue_loc (loc
, t
);
3031 /* Return a tree for the case when the result of an expression is RESULT
3032 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3033 of the expression but are now not needed.
3035 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3036 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3037 evaluated before OMITTED2. Otherwise, if neither has side effects,
3038 just do the conversion of RESULT to TYPE. */
3041 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3042 tree omitted1
, tree omitted2
)
3044 tree t
= fold_convert_loc (loc
, type
, result
);
3046 if (TREE_SIDE_EFFECTS (omitted2
))
3047 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3048 if (TREE_SIDE_EFFECTS (omitted1
))
3049 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3051 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3055 /* Return a simplified tree node for the truth-negation of ARG. This
3056 never alters ARG itself. We assume that ARG is an operation that
3057 returns a truth value (0 or 1).
3059 FIXME: one would think we would fold the result, but it causes
3060 problems with the dominator optimizer. */
3063 fold_truth_not_expr (location_t loc
, tree arg
)
3065 tree type
= TREE_TYPE (arg
);
3066 enum tree_code code
= TREE_CODE (arg
);
3067 location_t loc1
, loc2
;
3069 /* If this is a comparison, we can simply invert it, except for
3070 floating-point non-equality comparisons, in which case we just
3071 enclose a TRUTH_NOT_EXPR around what we have. */
3073 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3075 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3076 if (FLOAT_TYPE_P (op_type
)
3077 && flag_trapping_math
3078 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3079 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3082 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3083 if (code
== ERROR_MARK
)
3086 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3087 TREE_OPERAND (arg
, 1));
3093 return constant_boolean_node (integer_zerop (arg
), type
);
3095 case TRUTH_AND_EXPR
:
3096 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3097 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3098 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3099 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3100 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3103 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3104 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3105 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3106 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3107 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3109 case TRUTH_XOR_EXPR
:
3110 /* Here we can invert either operand. We invert the first operand
3111 unless the second operand is a TRUTH_NOT_EXPR in which case our
3112 result is the XOR of the first operand with the inside of the
3113 negation of the second operand. */
3115 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3116 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3117 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3119 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3120 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3121 TREE_OPERAND (arg
, 1));
3123 case TRUTH_ANDIF_EXPR
:
3124 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3125 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3126 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3127 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3128 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3130 case TRUTH_ORIF_EXPR
:
3131 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3132 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3133 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3134 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3135 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3137 case TRUTH_NOT_EXPR
:
3138 return TREE_OPERAND (arg
, 0);
3142 tree arg1
= TREE_OPERAND (arg
, 1);
3143 tree arg2
= TREE_OPERAND (arg
, 2);
3145 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3146 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3148 /* A COND_EXPR may have a throw as one operand, which
3149 then has void type. Just leave void operands
3151 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3152 VOID_TYPE_P (TREE_TYPE (arg1
))
3153 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3154 VOID_TYPE_P (TREE_TYPE (arg2
))
3155 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3159 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3160 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3161 TREE_OPERAND (arg
, 0),
3162 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3164 case NON_LVALUE_EXPR
:
3165 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3166 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3169 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3170 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3172 /* ... fall through ... */
3175 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3176 return build1_loc (loc
, TREE_CODE (arg
), type
,
3177 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3180 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3182 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3185 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3187 case CLEANUP_POINT_EXPR
:
3188 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3189 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3190 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3197 /* Return a simplified tree node for the truth-negation of ARG. This
3198 never alters ARG itself. We assume that ARG is an operation that
3199 returns a truth value (0 or 1).
3201 FIXME: one would think we would fold the result, but it causes
3202 problems with the dominator optimizer. */
3205 invert_truthvalue_loc (location_t loc
, tree arg
)
3209 if (TREE_CODE (arg
) == ERROR_MARK
)
3212 tem
= fold_truth_not_expr (loc
, arg
);
3214 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3219 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3220 operands are another bit-wise operation with a common input. If so,
3221 distribute the bit operations to save an operation and possibly two if
3222 constants are involved. For example, convert
3223 (A | B) & (A | C) into A | (B & C)
3224 Further simplification will occur if B and C are constants.
3226 If this optimization cannot be done, 0 will be returned. */
3229 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3230 tree arg0
, tree arg1
)
3235 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3236 || TREE_CODE (arg0
) == code
3237 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3238 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3241 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3243 common
= TREE_OPERAND (arg0
, 0);
3244 left
= TREE_OPERAND (arg0
, 1);
3245 right
= TREE_OPERAND (arg1
, 1);
3247 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3249 common
= TREE_OPERAND (arg0
, 0);
3250 left
= TREE_OPERAND (arg0
, 1);
3251 right
= TREE_OPERAND (arg1
, 0);
3253 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3255 common
= TREE_OPERAND (arg0
, 1);
3256 left
= TREE_OPERAND (arg0
, 0);
3257 right
= TREE_OPERAND (arg1
, 1);
3259 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3261 common
= TREE_OPERAND (arg0
, 1);
3262 left
= TREE_OPERAND (arg0
, 0);
3263 right
= TREE_OPERAND (arg1
, 0);
3268 common
= fold_convert_loc (loc
, type
, common
);
3269 left
= fold_convert_loc (loc
, type
, left
);
3270 right
= fold_convert_loc (loc
, type
, right
);
3271 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3272 fold_build2_loc (loc
, code
, type
, left
, right
));
3275 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3276 with code CODE. This optimization is unsafe. */
3278 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3279 tree arg0
, tree arg1
)
3281 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3282 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3284 /* (A / C) +- (B / C) -> (A +- B) / C. */
3286 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3287 TREE_OPERAND (arg1
, 1), 0))
3288 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3289 fold_build2_loc (loc
, code
, type
,
3290 TREE_OPERAND (arg0
, 0),
3291 TREE_OPERAND (arg1
, 0)),
3292 TREE_OPERAND (arg0
, 1));
3294 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3295 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3296 TREE_OPERAND (arg1
, 0), 0)
3297 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3298 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3300 REAL_VALUE_TYPE r0
, r1
;
3301 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3302 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3304 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3306 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3307 real_arithmetic (&r0
, code
, &r0
, &r1
);
3308 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3309 TREE_OPERAND (arg0
, 0),
3310 build_real (type
, r0
));
3316 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3317 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3320 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3321 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3323 tree result
, bftype
;
3327 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3328 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3329 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3330 && host_integerp (size
, 0)
3331 && tree_low_cst (size
, 0) == bitsize
)
3332 return fold_convert_loc (loc
, type
, inner
);
3336 if (TYPE_PRECISION (bftype
) != bitsize
3337 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3338 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3340 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3341 size_int (bitsize
), bitsize_int (bitpos
));
3344 result
= fold_convert_loc (loc
, type
, result
);
3349 /* Optimize a bit-field compare.
3351 There are two cases: First is a compare against a constant and the
3352 second is a comparison of two items where the fields are at the same
3353 bit position relative to the start of a chunk (byte, halfword, word)
3354 large enough to contain it. In these cases we can avoid the shift
3355 implicit in bitfield extractions.
3357 For constants, we emit a compare of the shifted constant with the
3358 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3359 compared. For two fields at the same position, we do the ANDs with the
3360 similar mask and compare the result of the ANDs.
3362 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3363 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3364 are the left and right operands of the comparison, respectively.
3366 If the optimization described above can be done, we return the resulting
3367 tree. Otherwise we return zero. */
3370 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3371 tree compare_type
, tree lhs
, tree rhs
)
3373 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3374 tree type
= TREE_TYPE (lhs
);
3375 tree signed_type
, unsigned_type
;
3376 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3377 enum machine_mode lmode
, rmode
, nmode
;
3378 int lunsignedp
, runsignedp
;
3379 int lvolatilep
= 0, rvolatilep
= 0;
3380 tree linner
, rinner
= NULL_TREE
;
3384 /* In the strict volatile bitfields case, doing code changes here may prevent
3385 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3386 if (flag_strict_volatile_bitfields
> 0)
3389 /* Get all the information about the extractions being done. If the bit size
3390 if the same as the size of the underlying object, we aren't doing an
3391 extraction at all and so can do nothing. We also don't want to
3392 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3393 then will no longer be able to replace it. */
3394 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3395 &lunsignedp
, &lvolatilep
, false);
3396 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3397 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3402 /* If this is not a constant, we can only do something if bit positions,
3403 sizes, and signedness are the same. */
3404 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3405 &runsignedp
, &rvolatilep
, false);
3407 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3408 || lunsignedp
!= runsignedp
|| offset
!= 0
3409 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3413 /* See if we can find a mode to refer to this field. We should be able to,
3414 but fail if we can't. */
3416 && GET_MODE_BITSIZE (lmode
) > 0
3417 && flag_strict_volatile_bitfields
> 0)
3420 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3421 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3422 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3423 TYPE_ALIGN (TREE_TYPE (rinner
))),
3424 word_mode
, lvolatilep
|| rvolatilep
);
3425 if (nmode
== VOIDmode
)
3428 /* Set signed and unsigned types of the precision of this mode for the
3430 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3431 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3433 /* Compute the bit position and size for the new reference and our offset
3434 within it. If the new reference is the same size as the original, we
3435 won't optimize anything, so return zero. */
3436 nbitsize
= GET_MODE_BITSIZE (nmode
);
3437 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3439 if (nbitsize
== lbitsize
)
3442 if (BYTES_BIG_ENDIAN
)
3443 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3445 /* Make the mask to be used against the extracted field. */
3446 mask
= build_int_cst_type (unsigned_type
, -1);
3447 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3448 mask
= const_binop (RSHIFT_EXPR
, mask
,
3449 size_int (nbitsize
- lbitsize
- lbitpos
));
3452 /* If not comparing with constant, just rework the comparison
3454 return fold_build2_loc (loc
, code
, compare_type
,
3455 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3456 make_bit_field_ref (loc
, linner
,
3461 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3462 make_bit_field_ref (loc
, rinner
,
3468 /* Otherwise, we are handling the constant case. See if the constant is too
3469 big for the field. Warn and return a tree of for 0 (false) if so. We do
3470 this not only for its own sake, but to avoid having to test for this
3471 error case below. If we didn't, we might generate wrong code.
3473 For unsigned fields, the constant shifted right by the field length should
3474 be all zero. For signed fields, the high-order bits should agree with
3479 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3480 fold_convert_loc (loc
,
3481 unsigned_type
, rhs
),
3482 size_int (lbitsize
))))
3484 warning (0, "comparison is always %d due to width of bit-field",
3486 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3491 tree tem
= const_binop (RSHIFT_EXPR
,
3492 fold_convert_loc (loc
, signed_type
, rhs
),
3493 size_int (lbitsize
- 1));
3494 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3496 warning (0, "comparison is always %d due to width of bit-field",
3498 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3502 /* Single-bit compares should always be against zero. */
3503 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3505 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3506 rhs
= build_int_cst (type
, 0);
3509 /* Make a new bitfield reference, shift the constant over the
3510 appropriate number of bits and mask it with the computed mask
3511 (in case this was a signed field). If we changed it, make a new one. */
3512 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3515 TREE_SIDE_EFFECTS (lhs
) = 1;
3516 TREE_THIS_VOLATILE (lhs
) = 1;
3519 rhs
= const_binop (BIT_AND_EXPR
,
3520 const_binop (LSHIFT_EXPR
,
3521 fold_convert_loc (loc
, unsigned_type
, rhs
),
3522 size_int (lbitpos
)),
3525 lhs
= build2_loc (loc
, code
, compare_type
,
3526 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3530 /* Subroutine for fold_truth_andor_1: decode a field reference.
3532 If EXP is a comparison reference, we return the innermost reference.
3534 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3535 set to the starting bit number.
3537 If the innermost field can be completely contained in a mode-sized
3538 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3540 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3541 otherwise it is not changed.
3543 *PUNSIGNEDP is set to the signedness of the field.
3545 *PMASK is set to the mask used. This is either contained in a
3546 BIT_AND_EXPR or derived from the width of the field.
3548 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3550 Return 0 if this is not a component reference or is one that we can't
3551 do anything with. */
3554 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3555 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3556 int *punsignedp
, int *pvolatilep
,
3557 tree
*pmask
, tree
*pand_mask
)
3559 tree outer_type
= 0;
3561 tree mask
, inner
, offset
;
3563 unsigned int precision
;
3565 /* All the optimizations using this function assume integer fields.
3566 There are problems with FP fields since the type_for_size call
3567 below can fail for, e.g., XFmode. */
3568 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3571 /* We are interested in the bare arrangement of bits, so strip everything
3572 that doesn't affect the machine mode. However, record the type of the
3573 outermost expression if it may matter below. */
3574 if (CONVERT_EXPR_P (exp
)
3575 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3576 outer_type
= TREE_TYPE (exp
);
3579 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3581 and_mask
= TREE_OPERAND (exp
, 1);
3582 exp
= TREE_OPERAND (exp
, 0);
3583 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3584 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3588 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3589 punsignedp
, pvolatilep
, false);
3590 if ((inner
== exp
&& and_mask
== 0)
3591 || *pbitsize
< 0 || offset
!= 0
3592 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3595 /* If the number of bits in the reference is the same as the bitsize of
3596 the outer type, then the outer type gives the signedness. Otherwise
3597 (in case of a small bitfield) the signedness is unchanged. */
3598 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3599 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3601 /* Compute the mask to access the bitfield. */
3602 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3603 precision
= TYPE_PRECISION (unsigned_type
);
3605 mask
= build_int_cst_type (unsigned_type
, -1);
3607 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3608 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3610 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3612 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3613 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3616 *pand_mask
= and_mask
;
3620 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3624 all_ones_mask_p (const_tree mask
, int size
)
3626 tree type
= TREE_TYPE (mask
);
3627 unsigned int precision
= TYPE_PRECISION (type
);
3630 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3633 tree_int_cst_equal (mask
,
3634 const_binop (RSHIFT_EXPR
,
3635 const_binop (LSHIFT_EXPR
, tmask
,
3636 size_int (precision
- size
)),
3637 size_int (precision
- size
)));
3640 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3641 represents the sign bit of EXP's type. If EXP represents a sign
3642 or zero extension, also test VAL against the unextended type.
3643 The return value is the (sub)expression whose sign bit is VAL,
3644 or NULL_TREE otherwise. */
3647 sign_bit_p (tree exp
, const_tree val
)
3649 unsigned HOST_WIDE_INT mask_lo
, lo
;
3650 HOST_WIDE_INT mask_hi
, hi
;
3654 /* Tree EXP must have an integral type. */
3655 t
= TREE_TYPE (exp
);
3656 if (! INTEGRAL_TYPE_P (t
))
3659 /* Tree VAL must be an integer constant. */
3660 if (TREE_CODE (val
) != INTEGER_CST
3661 || TREE_OVERFLOW (val
))
3664 width
= TYPE_PRECISION (t
);
3665 if (width
> HOST_BITS_PER_WIDE_INT
)
3667 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3670 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3671 >> (HOST_BITS_PER_DOUBLE_INT
- width
));
3677 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3680 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3681 >> (HOST_BITS_PER_WIDE_INT
- width
));
3684 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3685 treat VAL as if it were unsigned. */
3686 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3687 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3690 /* Handle extension from a narrower type. */
3691 if (TREE_CODE (exp
) == NOP_EXPR
3692 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3693 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3698 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3699 to be evaluated unconditionally. */
3702 simple_operand_p (const_tree exp
)
3704 /* Strip any conversions that don't change the machine mode. */
3707 return (CONSTANT_CLASS_P (exp
)
3708 || TREE_CODE (exp
) == SSA_NAME
3710 && ! TREE_ADDRESSABLE (exp
)
3711 && ! TREE_THIS_VOLATILE (exp
)
3712 && ! DECL_NONLOCAL (exp
)
3713 /* Don't regard global variables as simple. They may be
3714 allocated in ways unknown to the compiler (shared memory,
3715 #pragma weak, etc). */
3716 && ! TREE_PUBLIC (exp
)
3717 && ! DECL_EXTERNAL (exp
)
3718 /* Loading a static variable is unduly expensive, but global
3719 registers aren't expensive. */
3720 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3723 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3724 to be evaluated unconditionally.
3725 I addition to simple_operand_p, we assume that comparisons, conversions,
3726 and logic-not operations are simple, if their operands are simple, too. */
3729 simple_operand_p_2 (tree exp
)
3731 enum tree_code code
;
3733 if (TREE_SIDE_EFFECTS (exp
)
3734 || tree_could_trap_p (exp
))
3737 while (CONVERT_EXPR_P (exp
))
3738 exp
= TREE_OPERAND (exp
, 0);
3740 code
= TREE_CODE (exp
);
3742 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3743 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3744 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3746 if (code
== TRUTH_NOT_EXPR
)
3747 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3749 return simple_operand_p (exp
);
3753 /* The following functions are subroutines to fold_range_test and allow it to
3754 try to change a logical combination of comparisons into a range test.
3757 X == 2 || X == 3 || X == 4 || X == 5
3761 (unsigned) (X - 2) <= 3
3763 We describe each set of comparisons as being either inside or outside
3764 a range, using a variable named like IN_P, and then describe the
3765 range with a lower and upper bound. If one of the bounds is omitted,
3766 it represents either the highest or lowest value of the type.
3768 In the comments below, we represent a range by two numbers in brackets
3769 preceded by a "+" to designate being inside that range, or a "-" to
3770 designate being outside that range, so the condition can be inverted by
3771 flipping the prefix. An omitted bound is represented by a "-". For
3772 example, "- [-, 10]" means being outside the range starting at the lowest
3773 possible value and ending at 10, in other words, being greater than 10.
3774 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3777 We set up things so that the missing bounds are handled in a consistent
3778 manner so neither a missing bound nor "true" and "false" need to be
3779 handled using a special case. */
3781 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3782 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3783 and UPPER1_P are nonzero if the respective argument is an upper bound
3784 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3785 must be specified for a comparison. ARG1 will be converted to ARG0's
3786 type if both are specified. */
3789 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3790 tree arg1
, int upper1_p
)
3796 /* If neither arg represents infinity, do the normal operation.
3797 Else, if not a comparison, return infinity. Else handle the special
3798 comparison rules. Note that most of the cases below won't occur, but
3799 are handled for consistency. */
3801 if (arg0
!= 0 && arg1
!= 0)
3803 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3804 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3806 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3809 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3812 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3813 for neither. In real maths, we cannot assume open ended ranges are
3814 the same. But, this is computer arithmetic, where numbers are finite.
3815 We can therefore make the transformation of any unbounded range with
3816 the value Z, Z being greater than any representable number. This permits
3817 us to treat unbounded ranges as equal. */
3818 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3819 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3823 result
= sgn0
== sgn1
;
3826 result
= sgn0
!= sgn1
;
3829 result
= sgn0
< sgn1
;
3832 result
= sgn0
<= sgn1
;
3835 result
= sgn0
> sgn1
;
3838 result
= sgn0
>= sgn1
;
3844 return constant_boolean_node (result
, type
);
3847 /* Helper routine for make_range. Perform one step for it, return
3848 new expression if the loop should continue or NULL_TREE if it should
3852 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3853 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3854 bool *strict_overflow_p
)
3856 tree arg0_type
= TREE_TYPE (arg0
);
3857 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3858 int in_p
= *p_in_p
, n_in_p
;
3862 case TRUTH_NOT_EXPR
:
3863 /* We can only do something if the range is testing for zero. */
3864 if (low
== NULL_TREE
|| high
== NULL_TREE
3865 || ! integer_zerop (low
) || ! integer_zerop (high
))
3870 case EQ_EXPR
: case NE_EXPR
:
3871 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3872 /* We can only do something if the range is testing for zero
3873 and if the second operand is an integer constant. Note that
3874 saying something is "in" the range we make is done by
3875 complementing IN_P since it will set in the initial case of
3876 being not equal to zero; "out" is leaving it alone. */
3877 if (low
== NULL_TREE
|| high
== NULL_TREE
3878 || ! integer_zerop (low
) || ! integer_zerop (high
)
3879 || TREE_CODE (arg1
) != INTEGER_CST
)
3884 case NE_EXPR
: /* - [c, c] */
3887 case EQ_EXPR
: /* + [c, c] */
3888 in_p
= ! in_p
, low
= high
= arg1
;
3890 case GT_EXPR
: /* - [-, c] */
3891 low
= 0, high
= arg1
;
3893 case GE_EXPR
: /* + [c, -] */
3894 in_p
= ! in_p
, low
= arg1
, high
= 0;
3896 case LT_EXPR
: /* - [c, -] */
3897 low
= arg1
, high
= 0;
3899 case LE_EXPR
: /* + [-, c] */
3900 in_p
= ! in_p
, low
= 0, high
= arg1
;
3906 /* If this is an unsigned comparison, we also know that EXP is
3907 greater than or equal to zero. We base the range tests we make
3908 on that fact, so we record it here so we can parse existing
3909 range tests. We test arg0_type since often the return type
3910 of, e.g. EQ_EXPR, is boolean. */
3911 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3913 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3915 build_int_cst (arg0_type
, 0),
3919 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3921 /* If the high bound is missing, but we have a nonzero low
3922 bound, reverse the range so it goes from zero to the low bound
3924 if (high
== 0 && low
&& ! integer_zerop (low
))
3927 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3928 integer_one_node
, 0);
3929 low
= build_int_cst (arg0_type
, 0);
3939 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3940 low and high are non-NULL, then normalize will DTRT. */
3941 if (!TYPE_UNSIGNED (arg0_type
)
3942 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3944 if (low
== NULL_TREE
)
3945 low
= TYPE_MIN_VALUE (arg0_type
);
3946 if (high
== NULL_TREE
)
3947 high
= TYPE_MAX_VALUE (arg0_type
);
3950 /* (-x) IN [a,b] -> x in [-b, -a] */
3951 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3952 build_int_cst (exp_type
, 0),
3954 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3955 build_int_cst (exp_type
, 0),
3957 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3963 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3964 build_int_cst (exp_type
, 1));
3968 if (TREE_CODE (arg1
) != INTEGER_CST
)
3971 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3972 move a constant to the other side. */
3973 if (!TYPE_UNSIGNED (arg0_type
)
3974 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3977 /* If EXP is signed, any overflow in the computation is undefined,
3978 so we don't worry about it so long as our computations on
3979 the bounds don't overflow. For unsigned, overflow is defined
3980 and this is exactly the right thing. */
3981 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3982 arg0_type
, low
, 0, arg1
, 0);
3983 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3984 arg0_type
, high
, 1, arg1
, 0);
3985 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3986 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3989 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3990 *strict_overflow_p
= true;
3993 /* Check for an unsigned range which has wrapped around the maximum
3994 value thus making n_high < n_low, and normalize it. */
3995 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3997 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3998 integer_one_node
, 0);
3999 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4000 integer_one_node
, 0);
4002 /* If the range is of the form +/- [ x+1, x ], we won't
4003 be able to normalize it. But then, it represents the
4004 whole range or the empty set, so make it
4006 if (tree_int_cst_equal (n_low
, low
)
4007 && tree_int_cst_equal (n_high
, high
))
4013 low
= n_low
, high
= n_high
;
4021 case NON_LVALUE_EXPR
:
4022 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4025 if (! INTEGRAL_TYPE_P (arg0_type
)
4026 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4027 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4030 n_low
= low
, n_high
= high
;
4033 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4036 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4038 /* If we're converting arg0 from an unsigned type, to exp,
4039 a signed type, we will be doing the comparison as unsigned.
4040 The tests above have already verified that LOW and HIGH
4043 So we have to ensure that we will handle large unsigned
4044 values the same way that the current signed bounds treat
4047 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4051 /* For fixed-point modes, we need to pass the saturating flag
4052 as the 2nd parameter. */
4053 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4055 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4056 TYPE_SATURATING (arg0_type
));
4059 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4061 /* A range without an upper bound is, naturally, unbounded.
4062 Since convert would have cropped a very large value, use
4063 the max value for the destination type. */
4065 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4066 : TYPE_MAX_VALUE (arg0_type
);
4068 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4069 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4070 fold_convert_loc (loc
, arg0_type
,
4072 build_int_cst (arg0_type
, 1));
4074 /* If the low bound is specified, "and" the range with the
4075 range for which the original unsigned value will be
4079 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4080 1, fold_convert_loc (loc
, arg0_type
,
4085 in_p
= (n_in_p
== in_p
);
4089 /* Otherwise, "or" the range with the range of the input
4090 that will be interpreted as negative. */
4091 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4092 1, fold_convert_loc (loc
, arg0_type
,
4097 in_p
= (in_p
!= n_in_p
);
4111 /* Given EXP, a logical expression, set the range it is testing into
4112 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4113 actually being tested. *PLOW and *PHIGH will be made of the same
4114 type as the returned expression. If EXP is not a comparison, we
4115 will most likely not be returning a useful value and range. Set
4116 *STRICT_OVERFLOW_P to true if the return value is only valid
4117 because signed overflow is undefined; otherwise, do not change
4118 *STRICT_OVERFLOW_P. */
4121 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4122 bool *strict_overflow_p
)
4124 enum tree_code code
;
4125 tree arg0
, arg1
= NULL_TREE
;
4126 tree exp_type
, nexp
;
4129 location_t loc
= EXPR_LOCATION (exp
);
4131 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4132 and see if we can refine the range. Some of the cases below may not
4133 happen, but it doesn't seem worth worrying about this. We "continue"
4134 the outer loop when we've changed something; otherwise we "break"
4135 the switch, which will "break" the while. */
4138 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4142 code
= TREE_CODE (exp
);
4143 exp_type
= TREE_TYPE (exp
);
4146 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4148 if (TREE_OPERAND_LENGTH (exp
) > 0)
4149 arg0
= TREE_OPERAND (exp
, 0);
4150 if (TREE_CODE_CLASS (code
) == tcc_binary
4151 || TREE_CODE_CLASS (code
) == tcc_comparison
4152 || (TREE_CODE_CLASS (code
) == tcc_expression
4153 && TREE_OPERAND_LENGTH (exp
) > 1))
4154 arg1
= TREE_OPERAND (exp
, 1);
4156 if (arg0
== NULL_TREE
)
4159 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4160 &high
, &in_p
, strict_overflow_p
);
4161 if (nexp
== NULL_TREE
)
4166 /* If EXP is a constant, we can evaluate whether this is true or false. */
4167 if (TREE_CODE (exp
) == INTEGER_CST
)
4169 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4171 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4177 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4181 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4182 type, TYPE, return an expression to test if EXP is in (or out of, depending
4183 on IN_P) the range. Return 0 if the test couldn't be created. */
4186 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4187 tree low
, tree high
)
4189 tree etype
= TREE_TYPE (exp
), value
;
4191 #ifdef HAVE_canonicalize_funcptr_for_compare
4192 /* Disable this optimization for function pointer expressions
4193 on targets that require function pointer canonicalization. */
4194 if (HAVE_canonicalize_funcptr_for_compare
4195 && TREE_CODE (etype
) == POINTER_TYPE
4196 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4202 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4204 return invert_truthvalue_loc (loc
, value
);
4209 if (low
== 0 && high
== 0)
4210 return build_int_cst (type
, 1);
4213 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4214 fold_convert_loc (loc
, etype
, high
));
4217 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4218 fold_convert_loc (loc
, etype
, low
));
4220 if (operand_equal_p (low
, high
, 0))
4221 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4222 fold_convert_loc (loc
, etype
, low
));
4224 if (integer_zerop (low
))
4226 if (! TYPE_UNSIGNED (etype
))
4228 etype
= unsigned_type_for (etype
);
4229 high
= fold_convert_loc (loc
, etype
, high
);
4230 exp
= fold_convert_loc (loc
, etype
, exp
);
4232 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4235 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4236 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4238 unsigned HOST_WIDE_INT lo
;
4242 prec
= TYPE_PRECISION (etype
);
4243 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4246 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4250 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4251 lo
= (unsigned HOST_WIDE_INT
) -1;
4254 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4256 if (TYPE_UNSIGNED (etype
))
4258 tree signed_etype
= signed_type_for (etype
);
4259 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4261 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4263 etype
= signed_etype
;
4264 exp
= fold_convert_loc (loc
, etype
, exp
);
4266 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4267 build_int_cst (etype
, 0));
4271 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4272 This requires wrap-around arithmetics for the type of the expression.
4273 First make sure that arithmetics in this type is valid, then make sure
4274 that it wraps around. */
4275 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4276 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4277 TYPE_UNSIGNED (etype
));
4279 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4281 tree utype
, minv
, maxv
;
4283 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4284 for the type in question, as we rely on this here. */
4285 utype
= unsigned_type_for (etype
);
4286 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4287 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4288 integer_one_node
, 1);
4289 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4291 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4298 high
= fold_convert_loc (loc
, etype
, high
);
4299 low
= fold_convert_loc (loc
, etype
, low
);
4300 exp
= fold_convert_loc (loc
, etype
, exp
);
4302 value
= const_binop (MINUS_EXPR
, high
, low
);
4305 if (POINTER_TYPE_P (etype
))
4307 if (value
!= 0 && !TREE_OVERFLOW (value
))
4309 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4310 return build_range_check (loc
, type
,
4311 fold_build_pointer_plus_loc (loc
, exp
, low
),
4312 1, build_int_cst (etype
, 0), value
);
4317 if (value
!= 0 && !TREE_OVERFLOW (value
))
4318 return build_range_check (loc
, type
,
4319 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4320 1, build_int_cst (etype
, 0), value
);
4325 /* Return the predecessor of VAL in its type, handling the infinite case. */
4328 range_predecessor (tree val
)
4330 tree type
= TREE_TYPE (val
);
4332 if (INTEGRAL_TYPE_P (type
)
4333 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4336 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4339 /* Return the successor of VAL in its type, handling the infinite case. */
4342 range_successor (tree val
)
4344 tree type
= TREE_TYPE (val
);
4346 if (INTEGRAL_TYPE_P (type
)
4347 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4350 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4353 /* Given two ranges, see if we can merge them into one. Return 1 if we
4354 can, 0 if we can't. Set the output range into the specified parameters. */
4357 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4358 tree high0
, int in1_p
, tree low1
, tree high1
)
4366 int lowequal
= ((low0
== 0 && low1
== 0)
4367 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4368 low0
, 0, low1
, 0)));
4369 int highequal
= ((high0
== 0 && high1
== 0)
4370 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4371 high0
, 1, high1
, 1)));
4373 /* Make range 0 be the range that starts first, or ends last if they
4374 start at the same value. Swap them if it isn't. */
4375 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4378 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4379 high1
, 1, high0
, 1))))
4381 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4382 tem
= low0
, low0
= low1
, low1
= tem
;
4383 tem
= high0
, high0
= high1
, high1
= tem
;
4386 /* Now flag two cases, whether the ranges are disjoint or whether the
4387 second range is totally subsumed in the first. Note that the tests
4388 below are simplified by the ones above. */
4389 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4390 high0
, 1, low1
, 0));
4391 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4392 high1
, 1, high0
, 1));
4394 /* We now have four cases, depending on whether we are including or
4395 excluding the two ranges. */
4398 /* If they don't overlap, the result is false. If the second range
4399 is a subset it is the result. Otherwise, the range is from the start
4400 of the second to the end of the first. */
4402 in_p
= 0, low
= high
= 0;
4404 in_p
= 1, low
= low1
, high
= high1
;
4406 in_p
= 1, low
= low1
, high
= high0
;
4409 else if (in0_p
&& ! in1_p
)
4411 /* If they don't overlap, the result is the first range. If they are
4412 equal, the result is false. If the second range is a subset of the
4413 first, and the ranges begin at the same place, we go from just after
4414 the end of the second range to the end of the first. If the second
4415 range is not a subset of the first, or if it is a subset and both
4416 ranges end at the same place, the range starts at the start of the
4417 first range and ends just before the second range.
4418 Otherwise, we can't describe this as a single range. */
4420 in_p
= 1, low
= low0
, high
= high0
;
4421 else if (lowequal
&& highequal
)
4422 in_p
= 0, low
= high
= 0;
4423 else if (subset
&& lowequal
)
4425 low
= range_successor (high1
);
4430 /* We are in the weird situation where high0 > high1 but
4431 high1 has no successor. Punt. */
4435 else if (! subset
|| highequal
)
4438 high
= range_predecessor (low1
);
4442 /* low0 < low1 but low1 has no predecessor. Punt. */
4450 else if (! in0_p
&& in1_p
)
4452 /* If they don't overlap, the result is the second range. If the second
4453 is a subset of the first, the result is false. Otherwise,
4454 the range starts just after the first range and ends at the
4455 end of the second. */
4457 in_p
= 1, low
= low1
, high
= high1
;
4458 else if (subset
|| highequal
)
4459 in_p
= 0, low
= high
= 0;
4462 low
= range_successor (high0
);
4467 /* high1 > high0 but high0 has no successor. Punt. */
4475 /* The case where we are excluding both ranges. Here the complex case
4476 is if they don't overlap. In that case, the only time we have a
4477 range is if they are adjacent. If the second is a subset of the
4478 first, the result is the first. Otherwise, the range to exclude
4479 starts at the beginning of the first range and ends at the end of the
4483 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4484 range_successor (high0
),
4486 in_p
= 0, low
= low0
, high
= high1
;
4489 /* Canonicalize - [min, x] into - [-, x]. */
4490 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4491 switch (TREE_CODE (TREE_TYPE (low0
)))
4494 if (TYPE_PRECISION (TREE_TYPE (low0
))
4495 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4499 if (tree_int_cst_equal (low0
,
4500 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4504 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4505 && integer_zerop (low0
))
4512 /* Canonicalize - [x, max] into - [x, -]. */
4513 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4514 switch (TREE_CODE (TREE_TYPE (high1
)))
4517 if (TYPE_PRECISION (TREE_TYPE (high1
))
4518 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4522 if (tree_int_cst_equal (high1
,
4523 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4527 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4528 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4530 integer_one_node
, 1)))
4537 /* The ranges might be also adjacent between the maximum and
4538 minimum values of the given type. For
4539 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4540 return + [x + 1, y - 1]. */
4541 if (low0
== 0 && high1
== 0)
4543 low
= range_successor (high0
);
4544 high
= range_predecessor (low1
);
4545 if (low
== 0 || high
== 0)
4555 in_p
= 0, low
= low0
, high
= high0
;
4557 in_p
= 0, low
= low0
, high
= high1
;
4560 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4565 /* Subroutine of fold, looking inside expressions of the form
4566 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4567 of the COND_EXPR. This function is being used also to optimize
4568 A op B ? C : A, by reversing the comparison first.
4570 Return a folded expression whose code is not a COND_EXPR
4571 anymore, or NULL_TREE if no folding opportunity is found. */
4574 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4575 tree arg0
, tree arg1
, tree arg2
)
4577 enum tree_code comp_code
= TREE_CODE (arg0
);
4578 tree arg00
= TREE_OPERAND (arg0
, 0);
4579 tree arg01
= TREE_OPERAND (arg0
, 1);
4580 tree arg1_type
= TREE_TYPE (arg1
);
4586 /* If we have A op 0 ? A : -A, consider applying the following
4589 A == 0? A : -A same as -A
4590 A != 0? A : -A same as A
4591 A >= 0? A : -A same as abs (A)
4592 A > 0? A : -A same as abs (A)
4593 A <= 0? A : -A same as -abs (A)
4594 A < 0? A : -A same as -abs (A)
4596 None of these transformations work for modes with signed
4597 zeros. If A is +/-0, the first two transformations will
4598 change the sign of the result (from +0 to -0, or vice
4599 versa). The last four will fix the sign of the result,
4600 even though the original expressions could be positive or
4601 negative, depending on the sign of A.
4603 Note that all these transformations are correct if A is
4604 NaN, since the two alternatives (A and -A) are also NaNs. */
4605 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4606 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4607 ? real_zerop (arg01
)
4608 : integer_zerop (arg01
))
4609 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4610 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4611 /* In the case that A is of the form X-Y, '-A' (arg2) may
4612 have already been folded to Y-X, check for that. */
4613 || (TREE_CODE (arg1
) == MINUS_EXPR
4614 && TREE_CODE (arg2
) == MINUS_EXPR
4615 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4616 TREE_OPERAND (arg2
, 1), 0)
4617 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4618 TREE_OPERAND (arg2
, 0), 0))))
4623 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4624 return pedantic_non_lvalue_loc (loc
,
4625 fold_convert_loc (loc
, type
,
4626 negate_expr (tem
)));
4629 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4632 if (flag_trapping_math
)
4637 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4638 arg1
= fold_convert_loc (loc
, signed_type_for
4639 (TREE_TYPE (arg1
)), arg1
);
4640 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4641 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4644 if (flag_trapping_math
)
4648 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4649 arg1
= fold_convert_loc (loc
, signed_type_for
4650 (TREE_TYPE (arg1
)), arg1
);
4651 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4652 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4654 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4658 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4659 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4660 both transformations are correct when A is NaN: A != 0
4661 is then true, and A == 0 is false. */
4663 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4664 && integer_zerop (arg01
) && integer_zerop (arg2
))
4666 if (comp_code
== NE_EXPR
)
4667 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4668 else if (comp_code
== EQ_EXPR
)
4669 return build_zero_cst (type
);
4672 /* Try some transformations of A op B ? A : B.
4674 A == B? A : B same as B
4675 A != B? A : B same as A
4676 A >= B? A : B same as max (A, B)
4677 A > B? A : B same as max (B, A)
4678 A <= B? A : B same as min (A, B)
4679 A < B? A : B same as min (B, A)
4681 As above, these transformations don't work in the presence
4682 of signed zeros. For example, if A and B are zeros of
4683 opposite sign, the first two transformations will change
4684 the sign of the result. In the last four, the original
4685 expressions give different results for (A=+0, B=-0) and
4686 (A=-0, B=+0), but the transformed expressions do not.
4688 The first two transformations are correct if either A or B
4689 is a NaN. In the first transformation, the condition will
4690 be false, and B will indeed be chosen. In the case of the
4691 second transformation, the condition A != B will be true,
4692 and A will be chosen.
4694 The conversions to max() and min() are not correct if B is
4695 a number and A is not. The conditions in the original
4696 expressions will be false, so all four give B. The min()
4697 and max() versions would give a NaN instead. */
4698 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4699 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4700 /* Avoid these transformations if the COND_EXPR may be used
4701 as an lvalue in the C++ front-end. PR c++/19199. */
4703 || VECTOR_TYPE_P (type
)
4704 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4705 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4706 || ! maybe_lvalue_p (arg1
)
4707 || ! maybe_lvalue_p (arg2
)))
4709 tree comp_op0
= arg00
;
4710 tree comp_op1
= arg01
;
4711 tree comp_type
= TREE_TYPE (comp_op0
);
4713 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4714 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4724 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4726 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4731 /* In C++ a ?: expression can be an lvalue, so put the
4732 operand which will be used if they are equal first
4733 so that we can convert this back to the
4734 corresponding COND_EXPR. */
4735 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4737 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4738 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4739 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4740 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4741 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4742 comp_op1
, comp_op0
);
4743 return pedantic_non_lvalue_loc (loc
,
4744 fold_convert_loc (loc
, type
, tem
));
4751 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4753 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4754 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4755 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4756 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4757 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4758 comp_op1
, comp_op0
);
4759 return pedantic_non_lvalue_loc (loc
,
4760 fold_convert_loc (loc
, type
, tem
));
4764 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4765 return pedantic_non_lvalue_loc (loc
,
4766 fold_convert_loc (loc
, type
, arg2
));
4769 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4770 return pedantic_non_lvalue_loc (loc
,
4771 fold_convert_loc (loc
, type
, arg1
));
4774 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4779 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4780 we might still be able to simplify this. For example,
4781 if C1 is one less or one more than C2, this might have started
4782 out as a MIN or MAX and been transformed by this function.
4783 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4785 if (INTEGRAL_TYPE_P (type
)
4786 && TREE_CODE (arg01
) == INTEGER_CST
4787 && TREE_CODE (arg2
) == INTEGER_CST
)
4791 if (TREE_CODE (arg1
) == INTEGER_CST
)
4793 /* We can replace A with C1 in this case. */
4794 arg1
= fold_convert_loc (loc
, type
, arg01
);
4795 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4798 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4799 MIN_EXPR, to preserve the signedness of the comparison. */
4800 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4802 && operand_equal_p (arg01
,
4803 const_binop (PLUS_EXPR
, arg2
,
4804 build_int_cst (type
, 1)),
4807 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4808 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4810 return pedantic_non_lvalue_loc (loc
,
4811 fold_convert_loc (loc
, type
, tem
));
4816 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4818 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4820 && operand_equal_p (arg01
,
4821 const_binop (MINUS_EXPR
, arg2
,
4822 build_int_cst (type
, 1)),
4825 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4826 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4828 return pedantic_non_lvalue_loc (loc
,
4829 fold_convert_loc (loc
, type
, tem
));
4834 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4835 MAX_EXPR, to preserve the signedness of the comparison. */
4836 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4838 && operand_equal_p (arg01
,
4839 const_binop (MINUS_EXPR
, arg2
,
4840 build_int_cst (type
, 1)),
4843 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4844 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4846 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4851 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4852 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4854 && operand_equal_p (arg01
,
4855 const_binop (PLUS_EXPR
, arg2
,
4856 build_int_cst (type
, 1)),
4859 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4860 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4862 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4876 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4877 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4878 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4882 /* EXP is some logical combination of boolean tests. See if we can
4883 merge it into some range test. Return the new tree if so. */
4886 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4889 int or_op
= (code
== TRUTH_ORIF_EXPR
4890 || code
== TRUTH_OR_EXPR
);
4891 int in0_p
, in1_p
, in_p
;
4892 tree low0
, low1
, low
, high0
, high1
, high
;
4893 bool strict_overflow_p
= false;
4894 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4895 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4897 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4898 "when simplifying range test");
4900 /* If this is an OR operation, invert both sides; we will invert
4901 again at the end. */
4903 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4905 /* If both expressions are the same, if we can merge the ranges, and we
4906 can build the range test, return it or it inverted. If one of the
4907 ranges is always true or always false, consider it to be the same
4908 expression as the other. */
4909 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4910 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4912 && 0 != (tem
= (build_range_check (loc
, type
,
4914 : rhs
!= 0 ? rhs
: integer_zero_node
,
4917 if (strict_overflow_p
)
4918 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4919 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4922 /* On machines where the branch cost is expensive, if this is a
4923 short-circuited branch and the underlying object on both sides
4924 is the same, make a non-short-circuit operation. */
4925 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4926 && lhs
!= 0 && rhs
!= 0
4927 && (code
== TRUTH_ANDIF_EXPR
4928 || code
== TRUTH_ORIF_EXPR
)
4929 && operand_equal_p (lhs
, rhs
, 0))
4931 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4932 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4933 which cases we can't do this. */
4934 if (simple_operand_p (lhs
))
4935 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4936 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4939 else if (!lang_hooks
.decls
.global_bindings_p ()
4940 && !CONTAINS_PLACEHOLDER_P (lhs
))
4942 tree common
= save_expr (lhs
);
4944 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4945 or_op
? ! in0_p
: in0_p
,
4947 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4948 or_op
? ! in1_p
: in1_p
,
4951 if (strict_overflow_p
)
4952 fold_overflow_warning (warnmsg
,
4953 WARN_STRICT_OVERFLOW_COMPARISON
);
4954 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4955 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4964 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4965 bit value. Arrange things so the extra bits will be set to zero if and
4966 only if C is signed-extended to its full width. If MASK is nonzero,
4967 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4970 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4972 tree type
= TREE_TYPE (c
);
4973 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4976 if (p
== modesize
|| unsignedp
)
4979 /* We work by getting just the sign bit into the low-order bit, then
4980 into the high-order bit, then sign-extend. We then XOR that value
4982 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4983 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4985 /* We must use a signed type in order to get an arithmetic right shift.
4986 However, we must also avoid introducing accidental overflows, so that
4987 a subsequent call to integer_zerop will work. Hence we must
4988 do the type conversion here. At this point, the constant is either
4989 zero or one, and the conversion to a signed type can never overflow.
4990 We could get an overflow if this conversion is done anywhere else. */
4991 if (TYPE_UNSIGNED (type
))
4992 temp
= fold_convert (signed_type_for (type
), temp
);
4994 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4995 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4997 temp
= const_binop (BIT_AND_EXPR
, temp
,
4998 fold_convert (TREE_TYPE (c
), mask
));
4999 /* If necessary, convert the type back to match the type of C. */
5000 if (TYPE_UNSIGNED (type
))
5001 temp
= fold_convert (type
, temp
);
5003 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5006 /* For an expression that has the form
5010 we can drop one of the inner expressions and simplify to
5014 LOC is the location of the resulting expression. OP is the inner
5015 logical operation; the left-hand side in the examples above, while CMPOP
5016 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5017 removing a condition that guards another, as in
5018 (A != NULL && A->...) || A == NULL
5019 which we must not transform. If RHS_ONLY is true, only eliminate the
5020 right-most operand of the inner logical operation. */
5023 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5026 tree type
= TREE_TYPE (cmpop
);
5027 enum tree_code code
= TREE_CODE (cmpop
);
5028 enum tree_code truthop_code
= TREE_CODE (op
);
5029 tree lhs
= TREE_OPERAND (op
, 0);
5030 tree rhs
= TREE_OPERAND (op
, 1);
5031 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5032 enum tree_code rhs_code
= TREE_CODE (rhs
);
5033 enum tree_code lhs_code
= TREE_CODE (lhs
);
5034 enum tree_code inv_code
;
5036 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5039 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5042 if (rhs_code
== truthop_code
)
5044 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5045 if (newrhs
!= NULL_TREE
)
5048 rhs_code
= TREE_CODE (rhs
);
5051 if (lhs_code
== truthop_code
&& !rhs_only
)
5053 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5054 if (newlhs
!= NULL_TREE
)
5057 lhs_code
= TREE_CODE (lhs
);
5061 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5062 if (inv_code
== rhs_code
5063 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5064 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5066 if (!rhs_only
&& inv_code
== lhs_code
5067 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5068 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5070 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5071 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5076 /* Find ways of folding logical expressions of LHS and RHS:
5077 Try to merge two comparisons to the same innermost item.
5078 Look for range tests like "ch >= '0' && ch <= '9'".
5079 Look for combinations of simple terms on machines with expensive branches
5080 and evaluate the RHS unconditionally.
5082 For example, if we have p->a == 2 && p->b == 4 and we can make an
5083 object large enough to span both A and B, we can do this with a comparison
5084 against the object ANDed with the a mask.
5086 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5087 operations to do this with one comparison.
5089 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5090 function and the one above.
5092 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5093 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5095 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5098 We return the simplified tree or 0 if no optimization is possible. */
5101 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5104 /* If this is the "or" of two comparisons, we can do something if
5105 the comparisons are NE_EXPR. If this is the "and", we can do something
5106 if the comparisons are EQ_EXPR. I.e.,
5107 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5109 WANTED_CODE is this operation code. For single bit fields, we can
5110 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5111 comparison for one-bit fields. */
5113 enum tree_code wanted_code
;
5114 enum tree_code lcode
, rcode
;
5115 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5116 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5117 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5118 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5119 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5120 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5121 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5122 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5123 enum machine_mode lnmode
, rnmode
;
5124 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5125 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5126 tree l_const
, r_const
;
5127 tree lntype
, rntype
, result
;
5128 HOST_WIDE_INT first_bit
, end_bit
;
5131 /* Start by getting the comparison codes. Fail if anything is volatile.
5132 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5133 it were surrounded with a NE_EXPR. */
5135 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5138 lcode
= TREE_CODE (lhs
);
5139 rcode
= TREE_CODE (rhs
);
5141 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5143 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5144 build_int_cst (TREE_TYPE (lhs
), 0));
5148 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5150 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5151 build_int_cst (TREE_TYPE (rhs
), 0));
5155 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5156 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5159 ll_arg
= TREE_OPERAND (lhs
, 0);
5160 lr_arg
= TREE_OPERAND (lhs
, 1);
5161 rl_arg
= TREE_OPERAND (rhs
, 0);
5162 rr_arg
= TREE_OPERAND (rhs
, 1);
5164 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5165 if (simple_operand_p (ll_arg
)
5166 && simple_operand_p (lr_arg
))
5168 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5169 && operand_equal_p (lr_arg
, rr_arg
, 0))
5171 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5172 truth_type
, ll_arg
, lr_arg
);
5176 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5177 && operand_equal_p (lr_arg
, rl_arg
, 0))
5179 result
= combine_comparisons (loc
, code
, lcode
,
5180 swap_tree_comparison (rcode
),
5181 truth_type
, ll_arg
, lr_arg
);
5187 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5188 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5190 /* If the RHS can be evaluated unconditionally and its operands are
5191 simple, it wins to evaluate the RHS unconditionally on machines
5192 with expensive branches. In this case, this isn't a comparison
5193 that can be merged. */
5195 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5197 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5198 && simple_operand_p (rl_arg
)
5199 && simple_operand_p (rr_arg
))
5201 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5202 if (code
== TRUTH_OR_EXPR
5203 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5204 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5205 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5206 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5207 return build2_loc (loc
, NE_EXPR
, truth_type
,
5208 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5210 build_int_cst (TREE_TYPE (ll_arg
), 0));
5212 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5213 if (code
== TRUTH_AND_EXPR
5214 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5215 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5216 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5217 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5218 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5219 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5221 build_int_cst (TREE_TYPE (ll_arg
), 0));
5224 /* See if the comparisons can be merged. Then get all the parameters for
5227 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5228 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5232 ll_inner
= decode_field_reference (loc
, ll_arg
,
5233 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5234 &ll_unsignedp
, &volatilep
, &ll_mask
,
5236 lr_inner
= decode_field_reference (loc
, lr_arg
,
5237 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5238 &lr_unsignedp
, &volatilep
, &lr_mask
,
5240 rl_inner
= decode_field_reference (loc
, rl_arg
,
5241 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5242 &rl_unsignedp
, &volatilep
, &rl_mask
,
5244 rr_inner
= decode_field_reference (loc
, rr_arg
,
5245 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5246 &rr_unsignedp
, &volatilep
, &rr_mask
,
5249 /* It must be true that the inner operation on the lhs of each
5250 comparison must be the same if we are to be able to do anything.
5251 Then see if we have constants. If not, the same must be true for
5253 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5254 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5257 if (TREE_CODE (lr_arg
) == INTEGER_CST
5258 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5259 l_const
= lr_arg
, r_const
= rr_arg
;
5260 else if (lr_inner
== 0 || rr_inner
== 0
5261 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5264 l_const
= r_const
= 0;
5266 /* If either comparison code is not correct for our logical operation,
5267 fail. However, we can convert a one-bit comparison against zero into
5268 the opposite comparison against that bit being set in the field. */
5270 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5271 if (lcode
!= wanted_code
)
5273 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5275 /* Make the left operand unsigned, since we are only interested
5276 in the value of one bit. Otherwise we are doing the wrong
5285 /* This is analogous to the code for l_const above. */
5286 if (rcode
!= wanted_code
)
5288 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5297 /* See if we can find a mode that contains both fields being compared on
5298 the left. If we can't, fail. Otherwise, update all constants and masks
5299 to be relative to a field of that size. */
5300 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5301 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5302 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5303 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5305 if (lnmode
== VOIDmode
)
5308 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5309 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5310 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5311 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5313 if (BYTES_BIG_ENDIAN
)
5315 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5316 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5319 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5320 size_int (xll_bitpos
));
5321 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5322 size_int (xrl_bitpos
));
5326 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5327 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5328 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5329 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5330 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5333 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5335 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5340 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5341 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5342 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5343 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5344 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5347 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5349 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5353 /* If the right sides are not constant, do the same for it. Also,
5354 disallow this optimization if a size or signedness mismatch occurs
5355 between the left and right sides. */
5358 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5359 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5360 /* Make sure the two fields on the right
5361 correspond to the left without being swapped. */
5362 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5365 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5366 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5367 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5368 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5370 if (rnmode
== VOIDmode
)
5373 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5374 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5375 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5376 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5378 if (BYTES_BIG_ENDIAN
)
5380 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5381 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5384 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5386 size_int (xlr_bitpos
));
5387 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5389 size_int (xrr_bitpos
));
5391 /* Make a mask that corresponds to both fields being compared.
5392 Do this for both items being compared. If the operands are the
5393 same size and the bits being compared are in the same position
5394 then we can do this by masking both and comparing the masked
5396 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5397 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5398 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5400 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5401 ll_unsignedp
|| rl_unsignedp
);
5402 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5403 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5405 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5406 lr_unsignedp
|| rr_unsignedp
);
5407 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5408 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5410 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5413 /* There is still another way we can do something: If both pairs of
5414 fields being compared are adjacent, we may be able to make a wider
5415 field containing them both.
5417 Note that we still must mask the lhs/rhs expressions. Furthermore,
5418 the mask must be shifted to account for the shift done by
5419 make_bit_field_ref. */
5420 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5421 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5422 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5423 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5427 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5428 ll_bitsize
+ rl_bitsize
,
5429 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5430 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5431 lr_bitsize
+ rr_bitsize
,
5432 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5434 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5435 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5436 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5437 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5439 /* Convert to the smaller type before masking out unwanted bits. */
5441 if (lntype
!= rntype
)
5443 if (lnbitsize
> rnbitsize
)
5445 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5446 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5449 else if (lnbitsize
< rnbitsize
)
5451 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5452 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5457 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5458 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5460 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5461 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5463 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5469 /* Handle the case of comparisons with constants. If there is something in
5470 common between the masks, those bits of the constants must be the same.
5471 If not, the condition is always false. Test for this to avoid generating
5472 incorrect code below. */
5473 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5474 if (! integer_zerop (result
)
5475 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5476 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5478 if (wanted_code
== NE_EXPR
)
5480 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5481 return constant_boolean_node (true, truth_type
);
5485 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5486 return constant_boolean_node (false, truth_type
);
5490 /* Construct the expression we will return. First get the component
5491 reference we will make. Unless the mask is all ones the width of
5492 that field, perform the mask operation. Then compare with the
5494 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5495 ll_unsignedp
|| rl_unsignedp
);
5497 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5498 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5499 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5501 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5502 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5505 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5509 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5513 enum tree_code op_code
;
5516 int consts_equal
, consts_lt
;
5519 STRIP_SIGN_NOPS (arg0
);
5521 op_code
= TREE_CODE (arg0
);
5522 minmax_const
= TREE_OPERAND (arg0
, 1);
5523 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5524 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5525 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5526 inner
= TREE_OPERAND (arg0
, 0);
5528 /* If something does not permit us to optimize, return the original tree. */
5529 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5530 || TREE_CODE (comp_const
) != INTEGER_CST
5531 || TREE_OVERFLOW (comp_const
)
5532 || TREE_CODE (minmax_const
) != INTEGER_CST
5533 || TREE_OVERFLOW (minmax_const
))
5536 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5537 and GT_EXPR, doing the rest with recursive calls using logical
5541 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5544 = optimize_minmax_comparison (loc
,
5545 invert_tree_comparison (code
, false),
5548 return invert_truthvalue_loc (loc
, tem
);
5554 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5555 optimize_minmax_comparison
5556 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5557 optimize_minmax_comparison
5558 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5561 if (op_code
== MAX_EXPR
&& consts_equal
)
5562 /* MAX (X, 0) == 0 -> X <= 0 */
5563 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5565 else if (op_code
== MAX_EXPR
&& consts_lt
)
5566 /* MAX (X, 0) == 5 -> X == 5 */
5567 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5569 else if (op_code
== MAX_EXPR
)
5570 /* MAX (X, 0) == -1 -> false */
5571 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5573 else if (consts_equal
)
5574 /* MIN (X, 0) == 0 -> X >= 0 */
5575 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5578 /* MIN (X, 0) == 5 -> false */
5579 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5582 /* MIN (X, 0) == -1 -> X == -1 */
5583 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5586 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5587 /* MAX (X, 0) > 0 -> X > 0
5588 MAX (X, 0) > 5 -> X > 5 */
5589 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5591 else if (op_code
== MAX_EXPR
)
5592 /* MAX (X, 0) > -1 -> true */
5593 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5595 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5596 /* MIN (X, 0) > 0 -> false
5597 MIN (X, 0) > 5 -> false */
5598 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5601 /* MIN (X, 0) > -1 -> X > -1 */
5602 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5609 /* T is an integer expression that is being multiplied, divided, or taken a
5610 modulus (CODE says which and what kind of divide or modulus) by a
5611 constant C. See if we can eliminate that operation by folding it with
5612 other operations already in T. WIDE_TYPE, if non-null, is a type that
5613 should be used for the computation if wider than our type.
5615 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5616 (X * 2) + (Y * 4). We must, however, be assured that either the original
5617 expression would not overflow or that overflow is undefined for the type
5618 in the language in question.
5620 If we return a non-null expression, it is an equivalent form of the
5621 original computation, but need not be in the original type.
5623 We set *STRICT_OVERFLOW_P to true if the return values depends on
5624 signed overflow being undefined. Otherwise we do not change
5625 *STRICT_OVERFLOW_P. */
5628 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5629 bool *strict_overflow_p
)
5631 /* To avoid exponential search depth, refuse to allow recursion past
5632 three levels. Beyond that (1) it's highly unlikely that we'll find
5633 something interesting and (2) we've probably processed it before
5634 when we built the inner expression. */
5643 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5650 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5651 bool *strict_overflow_p
)
5653 tree type
= TREE_TYPE (t
);
5654 enum tree_code tcode
= TREE_CODE (t
);
5655 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5656 > GET_MODE_SIZE (TYPE_MODE (type
)))
5657 ? wide_type
: type
);
5659 int same_p
= tcode
== code
;
5660 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5661 bool sub_strict_overflow_p
;
5663 /* Don't deal with constants of zero here; they confuse the code below. */
5664 if (integer_zerop (c
))
5667 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5668 op0
= TREE_OPERAND (t
, 0);
5670 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5671 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5673 /* Note that we need not handle conditional operations here since fold
5674 already handles those cases. So just do arithmetic here. */
5678 /* For a constant, we can always simplify if we are a multiply
5679 or (for divide and modulus) if it is a multiple of our constant. */
5680 if (code
== MULT_EXPR
5681 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5682 return const_binop (code
, fold_convert (ctype
, t
),
5683 fold_convert (ctype
, c
));
5686 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5687 /* If op0 is an expression ... */
5688 if ((COMPARISON_CLASS_P (op0
)
5689 || UNARY_CLASS_P (op0
)
5690 || BINARY_CLASS_P (op0
)
5691 || VL_EXP_CLASS_P (op0
)
5692 || EXPRESSION_CLASS_P (op0
))
5693 /* ... and has wrapping overflow, and its type is smaller
5694 than ctype, then we cannot pass through as widening. */
5695 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5696 && (TYPE_PRECISION (ctype
)
5697 > TYPE_PRECISION (TREE_TYPE (op0
))))
5698 /* ... or this is a truncation (t is narrower than op0),
5699 then we cannot pass through this narrowing. */
5700 || (TYPE_PRECISION (type
)
5701 < TYPE_PRECISION (TREE_TYPE (op0
)))
5702 /* ... or signedness changes for division or modulus,
5703 then we cannot pass through this conversion. */
5704 || (code
!= MULT_EXPR
5705 && (TYPE_UNSIGNED (ctype
)
5706 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5707 /* ... or has undefined overflow while the converted to
5708 type has not, we cannot do the operation in the inner type
5709 as that would introduce undefined overflow. */
5710 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5711 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5714 /* Pass the constant down and see if we can make a simplification. If
5715 we can, replace this expression with the inner simplification for
5716 possible later conversion to our or some other type. */
5717 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5718 && TREE_CODE (t2
) == INTEGER_CST
5719 && !TREE_OVERFLOW (t2
)
5720 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5722 ? ctype
: NULL_TREE
,
5723 strict_overflow_p
))))
5728 /* If widening the type changes it from signed to unsigned, then we
5729 must avoid building ABS_EXPR itself as unsigned. */
5730 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5732 tree cstype
= (*signed_type_for
) (ctype
);
5733 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5736 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5737 return fold_convert (ctype
, t1
);
5741 /* If the constant is negative, we cannot simplify this. */
5742 if (tree_int_cst_sgn (c
) == -1)
5746 /* For division and modulus, type can't be unsigned, as e.g.
5747 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5748 For signed types, even with wrapping overflow, this is fine. */
5749 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5751 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5753 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5756 case MIN_EXPR
: case MAX_EXPR
:
5757 /* If widening the type changes the signedness, then we can't perform
5758 this optimization as that changes the result. */
5759 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5762 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5763 sub_strict_overflow_p
= false;
5764 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5765 &sub_strict_overflow_p
)) != 0
5766 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5767 &sub_strict_overflow_p
)) != 0)
5769 if (tree_int_cst_sgn (c
) < 0)
5770 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5771 if (sub_strict_overflow_p
)
5772 *strict_overflow_p
= true;
5773 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5774 fold_convert (ctype
, t2
));
5778 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5779 /* If the second operand is constant, this is a multiplication
5780 or floor division, by a power of two, so we can treat it that
5781 way unless the multiplier or divisor overflows. Signed
5782 left-shift overflow is implementation-defined rather than
5783 undefined in C90, so do not convert signed left shift into
5785 if (TREE_CODE (op1
) == INTEGER_CST
5786 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5787 /* const_binop may not detect overflow correctly,
5788 so check for it explicitly here. */
5789 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5790 && TREE_INT_CST_HIGH (op1
) == 0
5791 && 0 != (t1
= fold_convert (ctype
,
5792 const_binop (LSHIFT_EXPR
,
5795 && !TREE_OVERFLOW (t1
))
5796 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5797 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5799 fold_convert (ctype
, op0
),
5801 c
, code
, wide_type
, strict_overflow_p
);
5804 case PLUS_EXPR
: case MINUS_EXPR
:
5805 /* See if we can eliminate the operation on both sides. If we can, we
5806 can return a new PLUS or MINUS. If we can't, the only remaining
5807 cases where we can do anything are if the second operand is a
5809 sub_strict_overflow_p
= false;
5810 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5811 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5812 if (t1
!= 0 && t2
!= 0
5813 && (code
== MULT_EXPR
5814 /* If not multiplication, we can only do this if both operands
5815 are divisible by c. */
5816 || (multiple_of_p (ctype
, op0
, c
)
5817 && multiple_of_p (ctype
, op1
, c
))))
5819 if (sub_strict_overflow_p
)
5820 *strict_overflow_p
= true;
5821 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5822 fold_convert (ctype
, t2
));
5825 /* If this was a subtraction, negate OP1 and set it to be an addition.
5826 This simplifies the logic below. */
5827 if (tcode
== MINUS_EXPR
)
5829 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5830 /* If OP1 was not easily negatable, the constant may be OP0. */
5831 if (TREE_CODE (op0
) == INTEGER_CST
)
5842 if (TREE_CODE (op1
) != INTEGER_CST
)
5845 /* If either OP1 or C are negative, this optimization is not safe for
5846 some of the division and remainder types while for others we need
5847 to change the code. */
5848 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5850 if (code
== CEIL_DIV_EXPR
)
5851 code
= FLOOR_DIV_EXPR
;
5852 else if (code
== FLOOR_DIV_EXPR
)
5853 code
= CEIL_DIV_EXPR
;
5854 else if (code
!= MULT_EXPR
5855 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5859 /* If it's a multiply or a division/modulus operation of a multiple
5860 of our constant, do the operation and verify it doesn't overflow. */
5861 if (code
== MULT_EXPR
5862 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5864 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5865 fold_convert (ctype
, c
));
5866 /* We allow the constant to overflow with wrapping semantics. */
5868 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5874 /* If we have an unsigned type, we cannot widen the operation since it
5875 will change the result if the original computation overflowed. */
5876 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5879 /* If we were able to eliminate our operation from the first side,
5880 apply our operation to the second side and reform the PLUS. */
5881 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5882 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5884 /* The last case is if we are a multiply. In that case, we can
5885 apply the distributive law to commute the multiply and addition
5886 if the multiplication of the constants doesn't overflow
5887 and overflow is defined. With undefined overflow
5888 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5889 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5890 return fold_build2 (tcode
, ctype
,
5891 fold_build2 (code
, ctype
,
5892 fold_convert (ctype
, op0
),
5893 fold_convert (ctype
, c
)),
5899 /* We have a special case here if we are doing something like
5900 (C * 8) % 4 since we know that's zero. */
5901 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5902 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5903 /* If the multiplication can overflow we cannot optimize this. */
5904 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5905 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5906 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5908 *strict_overflow_p
= true;
5909 return omit_one_operand (type
, integer_zero_node
, op0
);
5912 /* ... fall through ... */
5914 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5915 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5916 /* If we can extract our operation from the LHS, do so and return a
5917 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5918 do something only if the second operand is a constant. */
5920 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5921 strict_overflow_p
)) != 0)
5922 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5923 fold_convert (ctype
, op1
));
5924 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5925 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5926 strict_overflow_p
)) != 0)
5927 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5928 fold_convert (ctype
, t1
));
5929 else if (TREE_CODE (op1
) != INTEGER_CST
)
5932 /* If these are the same operation types, we can associate them
5933 assuming no overflow. */
5938 unsigned prec
= TYPE_PRECISION (ctype
);
5939 bool uns
= TYPE_UNSIGNED (ctype
);
5940 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
5941 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
5942 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
5943 overflow_p
= ((!uns
&& overflow_p
)
5944 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5945 if (!double_int_fits_to_tree_p (ctype
, mul
)
5946 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
5949 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5950 double_int_to_tree (ctype
, mul
));
5953 /* If these operations "cancel" each other, we have the main
5954 optimizations of this pass, which occur when either constant is a
5955 multiple of the other, in which case we replace this with either an
5956 operation or CODE or TCODE.
5958 If we have an unsigned type, we cannot do this since it will change
5959 the result if the original computation overflowed. */
5960 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5961 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5962 || (tcode
== MULT_EXPR
5963 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5964 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5965 && code
!= MULT_EXPR
)))
5967 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5969 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5970 *strict_overflow_p
= true;
5971 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5972 fold_convert (ctype
,
5973 const_binop (TRUNC_DIV_EXPR
,
5976 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5978 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5979 *strict_overflow_p
= true;
5980 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5981 fold_convert (ctype
,
5982 const_binop (TRUNC_DIV_EXPR
,
5995 /* Return a node which has the indicated constant VALUE (either 0 or
5996 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5997 and is of the indicated TYPE. */
6000 constant_boolean_node (bool value
, tree type
)
6002 if (type
== integer_type_node
)
6003 return value
? integer_one_node
: integer_zero_node
;
6004 else if (type
== boolean_type_node
)
6005 return value
? boolean_true_node
: boolean_false_node
;
6006 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6007 return build_vector_from_val (type
,
6008 build_int_cst (TREE_TYPE (type
),
6011 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6015 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6016 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6017 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6018 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6019 COND is the first argument to CODE; otherwise (as in the example
6020 given here), it is the second argument. TYPE is the type of the
6021 original expression. Return NULL_TREE if no simplification is
6025 fold_binary_op_with_conditional_arg (location_t loc
,
6026 enum tree_code code
,
6027 tree type
, tree op0
, tree op1
,
6028 tree cond
, tree arg
, int cond_first_p
)
6030 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6031 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6032 tree test
, true_value
, false_value
;
6033 tree lhs
= NULL_TREE
;
6034 tree rhs
= NULL_TREE
;
6035 enum tree_code cond_code
= COND_EXPR
;
6037 if (TREE_CODE (cond
) == COND_EXPR
6038 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6040 test
= TREE_OPERAND (cond
, 0);
6041 true_value
= TREE_OPERAND (cond
, 1);
6042 false_value
= TREE_OPERAND (cond
, 2);
6043 /* If this operand throws an expression, then it does not make
6044 sense to try to perform a logical or arithmetic operation
6046 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6048 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6053 tree testtype
= TREE_TYPE (cond
);
6055 true_value
= constant_boolean_node (true, testtype
);
6056 false_value
= constant_boolean_node (false, testtype
);
6059 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6060 cond_code
= VEC_COND_EXPR
;
6062 /* This transformation is only worthwhile if we don't have to wrap ARG
6063 in a SAVE_EXPR and the operation can be simplified without recursing
6064 on at least one of the branches once its pushed inside the COND_EXPR. */
6065 if (!TREE_CONSTANT (arg
)
6066 && (TREE_SIDE_EFFECTS (arg
)
6067 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6068 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6071 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6074 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6076 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6078 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6082 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6084 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6086 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6089 /* Check that we have simplified at least one of the branches. */
6090 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6093 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6097 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6099 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6100 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6101 ADDEND is the same as X.
6103 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6104 and finite. The problematic cases are when X is zero, and its mode
6105 has signed zeros. In the case of rounding towards -infinity,
6106 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6107 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6110 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6112 if (!real_zerop (addend
))
6115 /* Don't allow the fold with -fsignaling-nans. */
6116 if (HONOR_SNANS (TYPE_MODE (type
)))
6119 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6120 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6123 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6124 if (TREE_CODE (addend
) == REAL_CST
6125 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6128 /* The mode has signed zeros, and we have to honor their sign.
6129 In this situation, there is only one case we can return true for.
6130 X - 0 is the same as X unless rounding towards -infinity is
6132 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6135 /* Subroutine of fold() that checks comparisons of built-in math
6136 functions against real constants.
6138 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6139 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6140 is the type of the result and ARG0 and ARG1 are the operands of the
6141 comparison. ARG1 must be a TREE_REAL_CST.
6143 The function returns the constant folded tree if a simplification
6144 can be made, and NULL_TREE otherwise. */
6147 fold_mathfn_compare (location_t loc
,
6148 enum built_in_function fcode
, enum tree_code code
,
6149 tree type
, tree arg0
, tree arg1
)
6153 if (BUILTIN_SQRT_P (fcode
))
6155 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6156 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6158 c
= TREE_REAL_CST (arg1
);
6159 if (REAL_VALUE_NEGATIVE (c
))
6161 /* sqrt(x) < y is always false, if y is negative. */
6162 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6163 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6165 /* sqrt(x) > y is always true, if y is negative and we
6166 don't care about NaNs, i.e. negative values of x. */
6167 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6168 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6170 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6171 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6172 build_real (TREE_TYPE (arg
), dconst0
));
6174 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6178 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6179 real_convert (&c2
, mode
, &c2
);
6181 if (REAL_VALUE_ISINF (c2
))
6183 /* sqrt(x) > y is x == +Inf, when y is very large. */
6184 if (HONOR_INFINITIES (mode
))
6185 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6186 build_real (TREE_TYPE (arg
), c2
));
6188 /* sqrt(x) > y is always false, when y is very large
6189 and we don't care about infinities. */
6190 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6193 /* sqrt(x) > c is the same as x > c*c. */
6194 return fold_build2_loc (loc
, code
, type
, arg
,
6195 build_real (TREE_TYPE (arg
), c2
));
6197 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6201 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6202 real_convert (&c2
, mode
, &c2
);
6204 if (REAL_VALUE_ISINF (c2
))
6206 /* sqrt(x) < y is always true, when y is a very large
6207 value and we don't care about NaNs or Infinities. */
6208 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6209 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6211 /* sqrt(x) < y is x != +Inf when y is very large and we
6212 don't care about NaNs. */
6213 if (! HONOR_NANS (mode
))
6214 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6215 build_real (TREE_TYPE (arg
), c2
));
6217 /* sqrt(x) < y is x >= 0 when y is very large and we
6218 don't care about Infinities. */
6219 if (! HONOR_INFINITIES (mode
))
6220 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6221 build_real (TREE_TYPE (arg
), dconst0
));
6223 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6224 arg
= save_expr (arg
);
6225 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6226 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6227 build_real (TREE_TYPE (arg
),
6229 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6230 build_real (TREE_TYPE (arg
),
6234 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6235 if (! HONOR_NANS (mode
))
6236 return fold_build2_loc (loc
, code
, type
, arg
,
6237 build_real (TREE_TYPE (arg
), c2
));
6239 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6240 arg
= save_expr (arg
);
6241 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6242 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6243 build_real (TREE_TYPE (arg
),
6245 fold_build2_loc (loc
, code
, type
, arg
,
6246 build_real (TREE_TYPE (arg
),
6254 /* Subroutine of fold() that optimizes comparisons against Infinities,
6255 either +Inf or -Inf.
6257 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6258 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6259 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6261 The function returns the constant folded tree if a simplification
6262 can be made, and NULL_TREE otherwise. */
6265 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6266 tree arg0
, tree arg1
)
6268 enum machine_mode mode
;
6269 REAL_VALUE_TYPE max
;
6273 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6275 /* For negative infinity swap the sense of the comparison. */
6276 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6278 code
= swap_tree_comparison (code
);
6283 /* x > +Inf is always false, if with ignore sNANs. */
6284 if (HONOR_SNANS (mode
))
6286 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6289 /* x <= +Inf is always true, if we don't case about NaNs. */
6290 if (! HONOR_NANS (mode
))
6291 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6293 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6294 arg0
= save_expr (arg0
);
6295 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6299 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6300 real_maxval (&max
, neg
, mode
);
6301 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6302 arg0
, build_real (TREE_TYPE (arg0
), max
));
6305 /* x < +Inf is always equal to x <= DBL_MAX. */
6306 real_maxval (&max
, neg
, mode
);
6307 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6308 arg0
, build_real (TREE_TYPE (arg0
), max
));
6311 /* x != +Inf is always equal to !(x > DBL_MAX). */
6312 real_maxval (&max
, neg
, mode
);
6313 if (! HONOR_NANS (mode
))
6314 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6315 arg0
, build_real (TREE_TYPE (arg0
), max
));
6317 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6318 arg0
, build_real (TREE_TYPE (arg0
), max
));
6319 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6328 /* Subroutine of fold() that optimizes comparisons of a division by
6329 a nonzero integer constant against an integer constant, i.e.
6332 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6333 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6334 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6336 The function returns the constant folded tree if a simplification
6337 can be made, and NULL_TREE otherwise. */
6340 fold_div_compare (location_t loc
,
6341 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6343 tree prod
, tmp
, hi
, lo
;
6344 tree arg00
= TREE_OPERAND (arg0
, 0);
6345 tree arg01
= TREE_OPERAND (arg0
, 1);
6347 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6351 /* We have to do this the hard way to detect unsigned overflow.
6352 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6353 val
= TREE_INT_CST (arg01
)
6354 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6355 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6356 neg_overflow
= false;
6360 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6361 build_int_cst (TREE_TYPE (arg01
), 1));
6364 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6365 val
= TREE_INT_CST (prod
)
6366 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6367 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6368 -1, overflow
| TREE_OVERFLOW (prod
));
6370 else if (tree_int_cst_sgn (arg01
) >= 0)
6372 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6373 build_int_cst (TREE_TYPE (arg01
), 1));
6374 switch (tree_int_cst_sgn (arg1
))
6377 neg_overflow
= true;
6378 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6383 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6388 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6398 /* A negative divisor reverses the relational operators. */
6399 code
= swap_tree_comparison (code
);
6401 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6402 build_int_cst (TREE_TYPE (arg01
), 1));
6403 switch (tree_int_cst_sgn (arg1
))
6406 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6411 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6416 neg_overflow
= true;
6417 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6429 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6430 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6431 if (TREE_OVERFLOW (hi
))
6432 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6433 if (TREE_OVERFLOW (lo
))
6434 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6435 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6438 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6439 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6440 if (TREE_OVERFLOW (hi
))
6441 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6442 if (TREE_OVERFLOW (lo
))
6443 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6444 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6447 if (TREE_OVERFLOW (lo
))
6449 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6450 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6452 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6455 if (TREE_OVERFLOW (hi
))
6457 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6458 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6460 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6463 if (TREE_OVERFLOW (hi
))
6465 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6466 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6468 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6471 if (TREE_OVERFLOW (lo
))
6473 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6474 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6476 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6486 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6487 equality/inequality test, then return a simplified form of the test
6488 using a sign testing. Otherwise return NULL. TYPE is the desired
6492 fold_single_bit_test_into_sign_test (location_t loc
,
6493 enum tree_code code
, tree arg0
, tree arg1
,
6496 /* If this is testing a single bit, we can optimize the test. */
6497 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6498 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6499 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6501 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6502 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6503 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6505 if (arg00
!= NULL_TREE
6506 /* This is only a win if casting to a signed type is cheap,
6507 i.e. when arg00's type is not a partial mode. */
6508 && TYPE_PRECISION (TREE_TYPE (arg00
))
6509 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6511 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6512 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6514 fold_convert_loc (loc
, stype
, arg00
),
6515 build_int_cst (stype
, 0));
6522 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6523 equality/inequality test, then return a simplified form of
6524 the test using shifts and logical operations. Otherwise return
6525 NULL. TYPE is the desired result type. */
6528 fold_single_bit_test (location_t loc
, enum tree_code code
,
6529 tree arg0
, tree arg1
, tree result_type
)
6531 /* If this is testing a single bit, we can optimize the test. */
6532 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6533 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6534 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6536 tree inner
= TREE_OPERAND (arg0
, 0);
6537 tree type
= TREE_TYPE (arg0
);
6538 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6539 enum machine_mode operand_mode
= TYPE_MODE (type
);
6541 tree signed_type
, unsigned_type
, intermediate_type
;
6544 /* First, see if we can fold the single bit test into a sign-bit
6546 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6551 /* Otherwise we have (A & C) != 0 where C is a single bit,
6552 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6553 Similarly for (A & C) == 0. */
6555 /* If INNER is a right shift of a constant and it plus BITNUM does
6556 not overflow, adjust BITNUM and INNER. */
6557 if (TREE_CODE (inner
) == RSHIFT_EXPR
6558 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6559 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6560 && bitnum
< TYPE_PRECISION (type
)
6561 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6562 bitnum
- TYPE_PRECISION (type
)))
6564 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6565 inner
= TREE_OPERAND (inner
, 0);
6568 /* If we are going to be able to omit the AND below, we must do our
6569 operations as unsigned. If we must use the AND, we have a choice.
6570 Normally unsigned is faster, but for some machines signed is. */
6571 #ifdef LOAD_EXTEND_OP
6572 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6573 && !flag_syntax_only
) ? 0 : 1;
6578 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6579 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6580 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6581 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6584 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6585 inner
, size_int (bitnum
));
6587 one
= build_int_cst (intermediate_type
, 1);
6589 if (code
== EQ_EXPR
)
6590 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6592 /* Put the AND last so it can combine with more things. */
6593 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6595 /* Make sure to return the proper type. */
6596 inner
= fold_convert_loc (loc
, result_type
, inner
);
6603 /* Check whether we are allowed to reorder operands arg0 and arg1,
6604 such that the evaluation of arg1 occurs before arg0. */
6607 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6609 if (! flag_evaluation_order
)
6611 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6613 return ! TREE_SIDE_EFFECTS (arg0
)
6614 && ! TREE_SIDE_EFFECTS (arg1
);
6617 /* Test whether it is preferable two swap two operands, ARG0 and
6618 ARG1, for example because ARG0 is an integer constant and ARG1
6619 isn't. If REORDER is true, only recommend swapping if we can
6620 evaluate the operands in reverse order. */
6623 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6625 STRIP_SIGN_NOPS (arg0
);
6626 STRIP_SIGN_NOPS (arg1
);
6628 if (TREE_CODE (arg1
) == INTEGER_CST
)
6630 if (TREE_CODE (arg0
) == INTEGER_CST
)
6633 if (TREE_CODE (arg1
) == REAL_CST
)
6635 if (TREE_CODE (arg0
) == REAL_CST
)
6638 if (TREE_CODE (arg1
) == FIXED_CST
)
6640 if (TREE_CODE (arg0
) == FIXED_CST
)
6643 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6645 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6648 if (TREE_CONSTANT (arg1
))
6650 if (TREE_CONSTANT (arg0
))
6653 if (optimize_function_for_size_p (cfun
))
6656 if (reorder
&& flag_evaluation_order
6657 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6660 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6661 for commutative and comparison operators. Ensuring a canonical
6662 form allows the optimizers to find additional redundancies without
6663 having to explicitly check for both orderings. */
6664 if (TREE_CODE (arg0
) == SSA_NAME
6665 && TREE_CODE (arg1
) == SSA_NAME
6666 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6669 /* Put SSA_NAMEs last. */
6670 if (TREE_CODE (arg1
) == SSA_NAME
)
6672 if (TREE_CODE (arg0
) == SSA_NAME
)
6675 /* Put variables last. */
6684 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6685 ARG0 is extended to a wider type. */
6688 fold_widened_comparison (location_t loc
, enum tree_code code
,
6689 tree type
, tree arg0
, tree arg1
)
6691 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6693 tree shorter_type
, outer_type
;
6697 if (arg0_unw
== arg0
)
6699 shorter_type
= TREE_TYPE (arg0_unw
);
6701 #ifdef HAVE_canonicalize_funcptr_for_compare
6702 /* Disable this optimization if we're casting a function pointer
6703 type on targets that require function pointer canonicalization. */
6704 if (HAVE_canonicalize_funcptr_for_compare
6705 && TREE_CODE (shorter_type
) == POINTER_TYPE
6706 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6710 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6713 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6715 /* If possible, express the comparison in the shorter mode. */
6716 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6717 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6718 && (TREE_TYPE (arg1_unw
) == shorter_type
6719 || ((TYPE_PRECISION (shorter_type
)
6720 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6721 && (TYPE_UNSIGNED (shorter_type
)
6722 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6723 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6724 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6725 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6726 && int_fits_type_p (arg1_unw
, shorter_type
))))
6727 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6728 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6730 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6731 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6732 || !int_fits_type_p (arg1_unw
, shorter_type
))
6735 /* If we are comparing with the integer that does not fit into the range
6736 of the shorter type, the result is known. */
6737 outer_type
= TREE_TYPE (arg1_unw
);
6738 min
= lower_bound_in_type (outer_type
, shorter_type
);
6739 max
= upper_bound_in_type (outer_type
, shorter_type
);
6741 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6743 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6750 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6755 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6761 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6763 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6768 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6770 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6779 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6780 ARG0 just the signedness is changed. */
6783 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6784 tree arg0
, tree arg1
)
6787 tree inner_type
, outer_type
;
6789 if (!CONVERT_EXPR_P (arg0
))
6792 outer_type
= TREE_TYPE (arg0
);
6793 arg0_inner
= TREE_OPERAND (arg0
, 0);
6794 inner_type
= TREE_TYPE (arg0_inner
);
6796 #ifdef HAVE_canonicalize_funcptr_for_compare
6797 /* Disable this optimization if we're casting a function pointer
6798 type on targets that require function pointer canonicalization. */
6799 if (HAVE_canonicalize_funcptr_for_compare
6800 && TREE_CODE (inner_type
) == POINTER_TYPE
6801 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6805 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6808 if (TREE_CODE (arg1
) != INTEGER_CST
6809 && !(CONVERT_EXPR_P (arg1
)
6810 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6813 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6818 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6821 if (TREE_CODE (arg1
) == INTEGER_CST
)
6822 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6823 0, TREE_OVERFLOW (arg1
));
6825 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6827 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6830 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6831 step of the array. Reconstructs s and delta in the case of s *
6832 delta being an integer constant (and thus already folded). ADDR is
6833 the address. MULT is the multiplicative expression. If the
6834 function succeeds, the new address expression is returned.
6835 Otherwise NULL_TREE is returned. LOC is the location of the
6836 resulting expression. */
6839 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6841 tree s
, delta
, step
;
6842 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6847 /* Strip the nops that might be added when converting op1 to sizetype. */
6850 /* Canonicalize op1 into a possibly non-constant delta
6851 and an INTEGER_CST s. */
6852 if (TREE_CODE (op1
) == MULT_EXPR
)
6854 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6859 if (TREE_CODE (arg0
) == INTEGER_CST
)
6864 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6872 else if (TREE_CODE (op1
) == INTEGER_CST
)
6879 /* Simulate we are delta * 1. */
6881 s
= integer_one_node
;
6884 /* Handle &x.array the same as we would handle &x.array[0]. */
6885 if (TREE_CODE (ref
) == COMPONENT_REF
6886 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6890 /* Remember if this was a multi-dimensional array. */
6891 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6894 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6897 itype
= TREE_TYPE (domain
);
6899 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6900 if (TREE_CODE (step
) != INTEGER_CST
)
6905 if (! tree_int_cst_equal (step
, s
))
6910 /* Try if delta is a multiple of step. */
6911 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6917 /* Only fold here if we can verify we do not overflow one
6918 dimension of a multi-dimensional array. */
6923 if (!TYPE_MIN_VALUE (domain
)
6924 || !TYPE_MAX_VALUE (domain
)
6925 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6928 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6929 fold_convert_loc (loc
, itype
,
6930 TYPE_MIN_VALUE (domain
)),
6931 fold_convert_loc (loc
, itype
, delta
));
6932 if (TREE_CODE (tmp
) != INTEGER_CST
6933 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6937 /* We found a suitable component reference. */
6939 pref
= TREE_OPERAND (addr
, 0);
6940 ret
= copy_node (pref
);
6941 SET_EXPR_LOCATION (ret
, loc
);
6943 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6945 (loc
, PLUS_EXPR
, itype
,
6946 fold_convert_loc (loc
, itype
,
6948 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6949 fold_convert_loc (loc
, itype
, delta
)),
6950 NULL_TREE
, NULL_TREE
);
6951 return build_fold_addr_expr_loc (loc
, ret
);
6956 for (;; ref
= TREE_OPERAND (ref
, 0))
6958 if (TREE_CODE (ref
) == ARRAY_REF
)
6962 /* Remember if this was a multi-dimensional array. */
6963 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6966 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6969 itype
= TREE_TYPE (domain
);
6971 step
= array_ref_element_size (ref
);
6972 if (TREE_CODE (step
) != INTEGER_CST
)
6977 if (! tree_int_cst_equal (step
, s
))
6982 /* Try if delta is a multiple of step. */
6983 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6989 /* Only fold here if we can verify we do not overflow one
6990 dimension of a multi-dimensional array. */
6995 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6996 || !TYPE_MAX_VALUE (domain
)
6997 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7000 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7001 fold_convert_loc (loc
, itype
,
7002 TREE_OPERAND (ref
, 1)),
7003 fold_convert_loc (loc
, itype
, delta
));
7005 || TREE_CODE (tmp
) != INTEGER_CST
7006 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7015 if (!handled_component_p (ref
))
7019 /* We found the suitable array reference. So copy everything up to it,
7020 and replace the index. */
7022 pref
= TREE_OPERAND (addr
, 0);
7023 ret
= copy_node (pref
);
7024 SET_EXPR_LOCATION (ret
, loc
);
7029 pref
= TREE_OPERAND (pref
, 0);
7030 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7031 pos
= TREE_OPERAND (pos
, 0);
7034 TREE_OPERAND (pos
, 1)
7035 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7036 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7037 fold_convert_loc (loc
, itype
, delta
));
7038 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7042 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7043 means A >= Y && A != MAX, but in this case we know that
7044 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7047 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7049 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7051 if (TREE_CODE (bound
) == LT_EXPR
)
7052 a
= TREE_OPERAND (bound
, 0);
7053 else if (TREE_CODE (bound
) == GT_EXPR
)
7054 a
= TREE_OPERAND (bound
, 1);
7058 typea
= TREE_TYPE (a
);
7059 if (!INTEGRAL_TYPE_P (typea
)
7060 && !POINTER_TYPE_P (typea
))
7063 if (TREE_CODE (ineq
) == LT_EXPR
)
7065 a1
= TREE_OPERAND (ineq
, 1);
7066 y
= TREE_OPERAND (ineq
, 0);
7068 else if (TREE_CODE (ineq
) == GT_EXPR
)
7070 a1
= TREE_OPERAND (ineq
, 0);
7071 y
= TREE_OPERAND (ineq
, 1);
7076 if (TREE_TYPE (a1
) != typea
)
7079 if (POINTER_TYPE_P (typea
))
7081 /* Convert the pointer types into integer before taking the difference. */
7082 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7083 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7084 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7087 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7089 if (!diff
|| !integer_onep (diff
))
7092 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7095 /* Fold a sum or difference of at least one multiplication.
7096 Returns the folded tree or NULL if no simplification could be made. */
7099 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7100 tree arg0
, tree arg1
)
7102 tree arg00
, arg01
, arg10
, arg11
;
7103 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7105 /* (A * C) +- (B * C) -> (A+-B) * C.
7106 (A * C) +- A -> A * (C+-1).
7107 We are most concerned about the case where C is a constant,
7108 but other combinations show up during loop reduction. Since
7109 it is not difficult, try all four possibilities. */
7111 if (TREE_CODE (arg0
) == MULT_EXPR
)
7113 arg00
= TREE_OPERAND (arg0
, 0);
7114 arg01
= TREE_OPERAND (arg0
, 1);
7116 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7118 arg00
= build_one_cst (type
);
7123 /* We cannot generate constant 1 for fract. */
7124 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7127 arg01
= build_one_cst (type
);
7129 if (TREE_CODE (arg1
) == MULT_EXPR
)
7131 arg10
= TREE_OPERAND (arg1
, 0);
7132 arg11
= TREE_OPERAND (arg1
, 1);
7134 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7136 arg10
= build_one_cst (type
);
7137 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7138 the purpose of this canonicalization. */
7139 if (TREE_INT_CST_HIGH (arg1
) == -1
7140 && negate_expr_p (arg1
)
7141 && code
== PLUS_EXPR
)
7143 arg11
= negate_expr (arg1
);
7151 /* We cannot generate constant 1 for fract. */
7152 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7155 arg11
= build_one_cst (type
);
7159 if (operand_equal_p (arg01
, arg11
, 0))
7160 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7161 else if (operand_equal_p (arg00
, arg10
, 0))
7162 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7163 else if (operand_equal_p (arg00
, arg11
, 0))
7164 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7165 else if (operand_equal_p (arg01
, arg10
, 0))
7166 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7168 /* No identical multiplicands; see if we can find a common
7169 power-of-two factor in non-power-of-two multiplies. This
7170 can help in multi-dimensional array access. */
7171 else if (host_integerp (arg01
, 0)
7172 && host_integerp (arg11
, 0))
7174 HOST_WIDE_INT int01
, int11
, tmp
;
7177 int01
= TREE_INT_CST_LOW (arg01
);
7178 int11
= TREE_INT_CST_LOW (arg11
);
7180 /* Move min of absolute values to int11. */
7181 if (absu_hwi (int01
) < absu_hwi (int11
))
7183 tmp
= int01
, int01
= int11
, int11
= tmp
;
7184 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7191 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7192 /* The remainder should not be a constant, otherwise we
7193 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7194 increased the number of multiplications necessary. */
7195 && TREE_CODE (arg10
) != INTEGER_CST
)
7197 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7198 build_int_cst (TREE_TYPE (arg00
),
7203 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7208 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7209 fold_build2_loc (loc
, code
, type
,
7210 fold_convert_loc (loc
, type
, alt0
),
7211 fold_convert_loc (loc
, type
, alt1
)),
7212 fold_convert_loc (loc
, type
, same
));
7217 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7218 specified by EXPR into the buffer PTR of length LEN bytes.
7219 Return the number of bytes placed in the buffer, or zero
7223 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7225 tree type
= TREE_TYPE (expr
);
7226 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7227 int byte
, offset
, word
, words
;
7228 unsigned char value
;
7230 if (total_bytes
> len
)
7232 words
= total_bytes
/ UNITS_PER_WORD
;
7234 for (byte
= 0; byte
< total_bytes
; byte
++)
7236 int bitpos
= byte
* BITS_PER_UNIT
;
7237 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7238 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7240 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7241 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7243 if (total_bytes
> UNITS_PER_WORD
)
7245 word
= byte
/ UNITS_PER_WORD
;
7246 if (WORDS_BIG_ENDIAN
)
7247 word
= (words
- 1) - word
;
7248 offset
= word
* UNITS_PER_WORD
;
7249 if (BYTES_BIG_ENDIAN
)
7250 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7252 offset
+= byte
% UNITS_PER_WORD
;
7255 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7256 ptr
[offset
] = value
;
7262 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7263 specified by EXPR into the buffer PTR of length LEN bytes.
7264 Return the number of bytes placed in the buffer, or zero
7268 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7270 tree type
= TREE_TYPE (expr
);
7271 enum machine_mode mode
= TYPE_MODE (type
);
7272 int total_bytes
= GET_MODE_SIZE (mode
);
7273 FIXED_VALUE_TYPE value
;
7274 tree i_value
, i_type
;
7276 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7279 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7281 if (NULL_TREE
== i_type
7282 || TYPE_PRECISION (i_type
) != total_bytes
)
7285 value
= TREE_FIXED_CST (expr
);
7286 i_value
= double_int_to_tree (i_type
, value
.data
);
7288 return native_encode_int (i_value
, ptr
, len
);
7292 /* Subroutine of native_encode_expr. Encode the REAL_CST
7293 specified by EXPR into the buffer PTR of length LEN bytes.
7294 Return the number of bytes placed in the buffer, or zero
7298 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7300 tree type
= TREE_TYPE (expr
);
7301 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7302 int byte
, offset
, word
, words
, bitpos
;
7303 unsigned char value
;
7305 /* There are always 32 bits in each long, no matter the size of
7306 the hosts long. We handle floating point representations with
7310 if (total_bytes
> len
)
7312 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7314 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7316 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7317 bitpos
+= BITS_PER_UNIT
)
7319 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7320 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7322 if (UNITS_PER_WORD
< 4)
7324 word
= byte
/ UNITS_PER_WORD
;
7325 if (WORDS_BIG_ENDIAN
)
7326 word
= (words
- 1) - word
;
7327 offset
= word
* UNITS_PER_WORD
;
7328 if (BYTES_BIG_ENDIAN
)
7329 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7331 offset
+= byte
% UNITS_PER_WORD
;
7334 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7335 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7340 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7341 specified by EXPR into the buffer PTR of length LEN bytes.
7342 Return the number of bytes placed in the buffer, or zero
7346 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7351 part
= TREE_REALPART (expr
);
7352 rsize
= native_encode_expr (part
, ptr
, len
);
7355 part
= TREE_IMAGPART (expr
);
7356 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7359 return rsize
+ isize
;
7363 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7364 specified by EXPR into the buffer PTR of length LEN bytes.
7365 Return the number of bytes placed in the buffer, or zero
7369 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7376 count
= VECTOR_CST_NELTS (expr
);
7377 itype
= TREE_TYPE (TREE_TYPE (expr
));
7378 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7379 for (i
= 0; i
< count
; i
++)
7381 elem
= VECTOR_CST_ELT (expr
, i
);
7382 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7390 /* Subroutine of native_encode_expr. Encode the STRING_CST
7391 specified by EXPR into the buffer PTR of length LEN bytes.
7392 Return the number of bytes placed in the buffer, or zero
7396 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7398 tree type
= TREE_TYPE (expr
);
7399 HOST_WIDE_INT total_bytes
;
7401 if (TREE_CODE (type
) != ARRAY_TYPE
7402 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7403 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7404 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7406 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7407 if (total_bytes
> len
)
7409 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7411 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7412 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7413 total_bytes
- TREE_STRING_LENGTH (expr
));
7416 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7421 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7422 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7423 buffer PTR of length LEN bytes. Return the number of bytes
7424 placed in the buffer, or zero upon failure. */
7427 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7429 switch (TREE_CODE (expr
))
7432 return native_encode_int (expr
, ptr
, len
);
7435 return native_encode_real (expr
, ptr
, len
);
7438 return native_encode_fixed (expr
, ptr
, len
);
7441 return native_encode_complex (expr
, ptr
, len
);
7444 return native_encode_vector (expr
, ptr
, len
);
7447 return native_encode_string (expr
, ptr
, len
);
7455 /* Subroutine of native_interpret_expr. Interpret the contents of
7456 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7457 If the buffer cannot be interpreted, return NULL_TREE. */
7460 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7462 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7465 if (total_bytes
> len
7466 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7469 result
= double_int::from_buffer (ptr
, total_bytes
);
7471 return double_int_to_tree (type
, result
);
7475 /* Subroutine of native_interpret_expr. Interpret the contents of
7476 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7477 If the buffer cannot be interpreted, return NULL_TREE. */
7480 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7482 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7484 FIXED_VALUE_TYPE fixed_value
;
7486 if (total_bytes
> len
7487 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7490 result
= double_int::from_buffer (ptr
, total_bytes
);
7491 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7493 return build_fixed (type
, fixed_value
);
7497 /* Subroutine of native_interpret_expr. Interpret the contents of
7498 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7499 If the buffer cannot be interpreted, return NULL_TREE. */
7502 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7504 enum machine_mode mode
= TYPE_MODE (type
);
7505 int total_bytes
= GET_MODE_SIZE (mode
);
7506 int byte
, offset
, word
, words
, bitpos
;
7507 unsigned char value
;
7508 /* There are always 32 bits in each long, no matter the size of
7509 the hosts long. We handle floating point representations with
7514 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7515 if (total_bytes
> len
|| total_bytes
> 24)
7517 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7519 memset (tmp
, 0, sizeof (tmp
));
7520 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7521 bitpos
+= BITS_PER_UNIT
)
7523 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7524 if (UNITS_PER_WORD
< 4)
7526 word
= byte
/ UNITS_PER_WORD
;
7527 if (WORDS_BIG_ENDIAN
)
7528 word
= (words
- 1) - word
;
7529 offset
= word
* UNITS_PER_WORD
;
7530 if (BYTES_BIG_ENDIAN
)
7531 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7533 offset
+= byte
% UNITS_PER_WORD
;
7536 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7537 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7539 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7542 real_from_target (&r
, tmp
, mode
);
7543 return build_real (type
, r
);
7547 /* Subroutine of native_interpret_expr. Interpret the contents of
7548 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7549 If the buffer cannot be interpreted, return NULL_TREE. */
7552 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7554 tree etype
, rpart
, ipart
;
7557 etype
= TREE_TYPE (type
);
7558 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7561 rpart
= native_interpret_expr (etype
, ptr
, size
);
7564 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7567 return build_complex (type
, rpart
, ipart
);
7571 /* Subroutine of native_interpret_expr. Interpret the contents of
7572 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7573 If the buffer cannot be interpreted, return NULL_TREE. */
7576 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7582 etype
= TREE_TYPE (type
);
7583 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7584 count
= TYPE_VECTOR_SUBPARTS (type
);
7585 if (size
* count
> len
)
7588 elements
= XALLOCAVEC (tree
, count
);
7589 for (i
= count
- 1; i
>= 0; i
--)
7591 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7596 return build_vector (type
, elements
);
7600 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7601 the buffer PTR of length LEN as a constant of type TYPE. For
7602 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7603 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7604 return NULL_TREE. */
7607 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7609 switch (TREE_CODE (type
))
7615 case REFERENCE_TYPE
:
7616 return native_interpret_int (type
, ptr
, len
);
7619 return native_interpret_real (type
, ptr
, len
);
7621 case FIXED_POINT_TYPE
:
7622 return native_interpret_fixed (type
, ptr
, len
);
7625 return native_interpret_complex (type
, ptr
, len
);
7628 return native_interpret_vector (type
, ptr
, len
);
7635 /* Returns true if we can interpret the contents of a native encoding
7639 can_native_interpret_type_p (tree type
)
7641 switch (TREE_CODE (type
))
7647 case REFERENCE_TYPE
:
7648 case FIXED_POINT_TYPE
:
7658 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7659 TYPE at compile-time. If we're unable to perform the conversion
7660 return NULL_TREE. */
7663 fold_view_convert_expr (tree type
, tree expr
)
7665 /* We support up to 512-bit values (for V8DFmode). */
7666 unsigned char buffer
[64];
7669 /* Check that the host and target are sane. */
7670 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7673 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7677 return native_interpret_expr (type
, buffer
, len
);
7680 /* Build an expression for the address of T. Folds away INDIRECT_REF
7681 to avoid confusing the gimplify process. */
7684 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7686 /* The size of the object is not relevant when talking about its address. */
7687 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7688 t
= TREE_OPERAND (t
, 0);
7690 if (TREE_CODE (t
) == INDIRECT_REF
)
7692 t
= TREE_OPERAND (t
, 0);
7694 if (TREE_TYPE (t
) != ptrtype
)
7695 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7697 else if (TREE_CODE (t
) == MEM_REF
7698 && integer_zerop (TREE_OPERAND (t
, 1)))
7699 return TREE_OPERAND (t
, 0);
7700 else if (TREE_CODE (t
) == MEM_REF
7701 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7702 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7703 TREE_OPERAND (t
, 0),
7704 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7705 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7707 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7709 if (TREE_TYPE (t
) != ptrtype
)
7710 t
= fold_convert_loc (loc
, ptrtype
, t
);
7713 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7718 /* Build an expression for the address of T. */
7721 build_fold_addr_expr_loc (location_t loc
, tree t
)
7723 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7725 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7728 static bool vec_cst_ctor_to_array (tree
, tree
*);
7730 /* Fold a unary expression of code CODE and type TYPE with operand
7731 OP0. Return the folded expression if folding is successful.
7732 Otherwise, return NULL_TREE. */
7735 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7739 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7741 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7742 && TREE_CODE_LENGTH (code
) == 1);
7747 if (CONVERT_EXPR_CODE_P (code
)
7748 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7750 /* Don't use STRIP_NOPS, because signedness of argument type
7752 STRIP_SIGN_NOPS (arg0
);
7756 /* Strip any conversions that don't change the mode. This
7757 is safe for every expression, except for a comparison
7758 expression because its signedness is derived from its
7761 Note that this is done as an internal manipulation within
7762 the constant folder, in order to find the simplest
7763 representation of the arguments so that their form can be
7764 studied. In any cases, the appropriate type conversions
7765 should be put back in the tree that will get out of the
7771 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7773 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7774 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7775 fold_build1_loc (loc
, code
, type
,
7776 fold_convert_loc (loc
, TREE_TYPE (op0
),
7777 TREE_OPERAND (arg0
, 1))));
7778 else if (TREE_CODE (arg0
) == COND_EXPR
)
7780 tree arg01
= TREE_OPERAND (arg0
, 1);
7781 tree arg02
= TREE_OPERAND (arg0
, 2);
7782 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7783 arg01
= fold_build1_loc (loc
, code
, type
,
7784 fold_convert_loc (loc
,
7785 TREE_TYPE (op0
), arg01
));
7786 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7787 arg02
= fold_build1_loc (loc
, code
, type
,
7788 fold_convert_loc (loc
,
7789 TREE_TYPE (op0
), arg02
));
7790 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7793 /* If this was a conversion, and all we did was to move into
7794 inside the COND_EXPR, bring it back out. But leave it if
7795 it is a conversion from integer to integer and the
7796 result precision is no wider than a word since such a
7797 conversion is cheap and may be optimized away by combine,
7798 while it couldn't if it were outside the COND_EXPR. Then return
7799 so we don't get into an infinite recursion loop taking the
7800 conversion out and then back in. */
7802 if ((CONVERT_EXPR_CODE_P (code
)
7803 || code
== NON_LVALUE_EXPR
)
7804 && TREE_CODE (tem
) == COND_EXPR
7805 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7806 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7807 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7808 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7809 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7810 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7811 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7813 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7814 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7815 || flag_syntax_only
))
7816 tem
= build1_loc (loc
, code
, type
,
7818 TREE_TYPE (TREE_OPERAND
7819 (TREE_OPERAND (tem
, 1), 0)),
7820 TREE_OPERAND (tem
, 0),
7821 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7822 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7831 /* Re-association barriers around constants and other re-association
7832 barriers can be removed. */
7833 if (CONSTANT_CLASS_P (op0
)
7834 || TREE_CODE (op0
) == PAREN_EXPR
)
7835 return fold_convert_loc (loc
, type
, op0
);
7840 case FIX_TRUNC_EXPR
:
7841 if (TREE_TYPE (op0
) == type
)
7844 if (COMPARISON_CLASS_P (op0
))
7846 /* If we have (type) (a CMP b) and type is an integral type, return
7847 new expression involving the new type. Canonicalize
7848 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7850 Do not fold the result as that would not simplify further, also
7851 folding again results in recursions. */
7852 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7853 return build2_loc (loc
, TREE_CODE (op0
), type
,
7854 TREE_OPERAND (op0
, 0),
7855 TREE_OPERAND (op0
, 1));
7856 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7857 && TREE_CODE (type
) != VECTOR_TYPE
)
7858 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7859 constant_boolean_node (true, type
),
7860 constant_boolean_node (false, type
));
7863 /* Handle cases of two conversions in a row. */
7864 if (CONVERT_EXPR_P (op0
))
7866 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7867 tree inter_type
= TREE_TYPE (op0
);
7868 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7869 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7870 int inside_float
= FLOAT_TYPE_P (inside_type
);
7871 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7872 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7873 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7874 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7875 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7876 int inter_float
= FLOAT_TYPE_P (inter_type
);
7877 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7878 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7879 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7880 int final_int
= INTEGRAL_TYPE_P (type
);
7881 int final_ptr
= POINTER_TYPE_P (type
);
7882 int final_float
= FLOAT_TYPE_P (type
);
7883 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7884 unsigned int final_prec
= TYPE_PRECISION (type
);
7885 int final_unsignedp
= TYPE_UNSIGNED (type
);
7887 /* In addition to the cases of two conversions in a row
7888 handled below, if we are converting something to its own
7889 type via an object of identical or wider precision, neither
7890 conversion is needed. */
7891 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7892 && (((inter_int
|| inter_ptr
) && final_int
)
7893 || (inter_float
&& final_float
))
7894 && inter_prec
>= final_prec
)
7895 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7897 /* Likewise, if the intermediate and initial types are either both
7898 float or both integer, we don't need the middle conversion if the
7899 former is wider than the latter and doesn't change the signedness
7900 (for integers). Avoid this if the final type is a pointer since
7901 then we sometimes need the middle conversion. Likewise if the
7902 final type has a precision not equal to the size of its mode. */
7903 if (((inter_int
&& inside_int
)
7904 || (inter_float
&& inside_float
)
7905 || (inter_vec
&& inside_vec
))
7906 && inter_prec
>= inside_prec
7907 && (inter_float
|| inter_vec
7908 || inter_unsignedp
== inside_unsignedp
)
7909 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7910 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7912 && (! final_vec
|| inter_prec
== inside_prec
))
7913 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7915 /* If we have a sign-extension of a zero-extended value, we can
7916 replace that by a single zero-extension. Likewise if the
7917 final conversion does not change precision we can drop the
7918 intermediate conversion. */
7919 if (inside_int
&& inter_int
&& final_int
7920 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7921 && inside_unsignedp
&& !inter_unsignedp
)
7922 || final_prec
== inter_prec
))
7923 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7925 /* Two conversions in a row are not needed unless:
7926 - some conversion is floating-point (overstrict for now), or
7927 - some conversion is a vector (overstrict for now), or
7928 - the intermediate type is narrower than both initial and
7930 - the intermediate type and innermost type differ in signedness,
7931 and the outermost type is wider than the intermediate, or
7932 - the initial type is a pointer type and the precisions of the
7933 intermediate and final types differ, or
7934 - the final type is a pointer type and the precisions of the
7935 initial and intermediate types differ. */
7936 if (! inside_float
&& ! inter_float
&& ! final_float
7937 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7938 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7939 && ! (inside_int
&& inter_int
7940 && inter_unsignedp
!= inside_unsignedp
7941 && inter_prec
< final_prec
)
7942 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7943 == (final_unsignedp
&& final_prec
> inter_prec
))
7944 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7945 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7946 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7947 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7948 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7951 /* Handle (T *)&A.B.C for A being of type T and B and C
7952 living at offset zero. This occurs frequently in
7953 C++ upcasting and then accessing the base. */
7954 if (TREE_CODE (op0
) == ADDR_EXPR
7955 && POINTER_TYPE_P (type
)
7956 && handled_component_p (TREE_OPERAND (op0
, 0)))
7958 HOST_WIDE_INT bitsize
, bitpos
;
7960 enum machine_mode mode
;
7961 int unsignedp
, volatilep
;
7962 tree base
= TREE_OPERAND (op0
, 0);
7963 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7964 &mode
, &unsignedp
, &volatilep
, false);
7965 /* If the reference was to a (constant) zero offset, we can use
7966 the address of the base if it has the same base type
7967 as the result type and the pointer type is unqualified. */
7968 if (! offset
&& bitpos
== 0
7969 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7970 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7971 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7972 return fold_convert_loc (loc
, type
,
7973 build_fold_addr_expr_loc (loc
, base
));
7976 if (TREE_CODE (op0
) == MODIFY_EXPR
7977 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7978 /* Detect assigning a bitfield. */
7979 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7981 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7983 /* Don't leave an assignment inside a conversion
7984 unless assigning a bitfield. */
7985 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7986 /* First do the assignment, then return converted constant. */
7987 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7988 TREE_NO_WARNING (tem
) = 1;
7989 TREE_USED (tem
) = 1;
7993 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7994 constants (if x has signed type, the sign bit cannot be set
7995 in c). This folds extension into the BIT_AND_EXPR.
7996 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7997 very likely don't have maximal range for their precision and this
7998 transformation effectively doesn't preserve non-maximal ranges. */
7999 if (TREE_CODE (type
) == INTEGER_TYPE
8000 && TREE_CODE (op0
) == BIT_AND_EXPR
8001 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8003 tree and_expr
= op0
;
8004 tree and0
= TREE_OPERAND (and_expr
, 0);
8005 tree and1
= TREE_OPERAND (and_expr
, 1);
8008 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8009 || (TYPE_PRECISION (type
)
8010 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8012 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8013 <= HOST_BITS_PER_WIDE_INT
8014 && host_integerp (and1
, 1))
8016 unsigned HOST_WIDE_INT cst
;
8018 cst
= tree_low_cst (and1
, 1);
8019 cst
&= (HOST_WIDE_INT
) -1
8020 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8021 change
= (cst
== 0);
8022 #ifdef LOAD_EXTEND_OP
8024 && !flag_syntax_only
8025 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8028 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8029 and0
= fold_convert_loc (loc
, uns
, and0
);
8030 and1
= fold_convert_loc (loc
, uns
, and1
);
8036 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
8037 0, TREE_OVERFLOW (and1
));
8038 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8039 fold_convert_loc (loc
, type
, and0
), tem
);
8043 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8044 when one of the new casts will fold away. Conservatively we assume
8045 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8046 if (POINTER_TYPE_P (type
)
8047 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8048 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8049 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8050 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8051 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8053 tree arg00
= TREE_OPERAND (arg0
, 0);
8054 tree arg01
= TREE_OPERAND (arg0
, 1);
8056 return fold_build_pointer_plus_loc
8057 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8060 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8061 of the same precision, and X is an integer type not narrower than
8062 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8063 if (INTEGRAL_TYPE_P (type
)
8064 && TREE_CODE (op0
) == BIT_NOT_EXPR
8065 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8066 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8067 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8069 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8070 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8071 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8072 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8073 fold_convert_loc (loc
, type
, tem
));
8076 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8077 type of X and Y (integer types only). */
8078 if (INTEGRAL_TYPE_P (type
)
8079 && TREE_CODE (op0
) == MULT_EXPR
8080 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8081 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8083 /* Be careful not to introduce new overflows. */
8085 if (TYPE_OVERFLOW_WRAPS (type
))
8088 mult_type
= unsigned_type_for (type
);
8090 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8092 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8093 fold_convert_loc (loc
, mult_type
,
8094 TREE_OPERAND (op0
, 0)),
8095 fold_convert_loc (loc
, mult_type
,
8096 TREE_OPERAND (op0
, 1)));
8097 return fold_convert_loc (loc
, type
, tem
);
8101 tem
= fold_convert_const (code
, type
, op0
);
8102 return tem
? tem
: NULL_TREE
;
8104 case ADDR_SPACE_CONVERT_EXPR
:
8105 if (integer_zerop (arg0
))
8106 return fold_convert_const (code
, type
, arg0
);
8109 case FIXED_CONVERT_EXPR
:
8110 tem
= fold_convert_const (code
, type
, arg0
);
8111 return tem
? tem
: NULL_TREE
;
8113 case VIEW_CONVERT_EXPR
:
8114 if (TREE_TYPE (op0
) == type
)
8116 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8117 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8118 type
, TREE_OPERAND (op0
, 0));
8119 if (TREE_CODE (op0
) == MEM_REF
)
8120 return fold_build2_loc (loc
, MEM_REF
, type
,
8121 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8123 /* For integral conversions with the same precision or pointer
8124 conversions use a NOP_EXPR instead. */
8125 if ((INTEGRAL_TYPE_P (type
)
8126 || POINTER_TYPE_P (type
))
8127 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8128 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8129 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8130 return fold_convert_loc (loc
, type
, op0
);
8132 /* Strip inner integral conversions that do not change the precision. */
8133 if (CONVERT_EXPR_P (op0
)
8134 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8135 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8136 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8137 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8138 && (TYPE_PRECISION (TREE_TYPE (op0
))
8139 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8140 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8141 type
, TREE_OPERAND (op0
, 0));
8143 return fold_view_convert_expr (type
, op0
);
8146 tem
= fold_negate_expr (loc
, arg0
);
8148 return fold_convert_loc (loc
, type
, tem
);
8152 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8153 return fold_abs_const (arg0
, type
);
8154 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8155 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8156 /* Convert fabs((double)float) into (double)fabsf(float). */
8157 else if (TREE_CODE (arg0
) == NOP_EXPR
8158 && TREE_CODE (type
) == REAL_TYPE
)
8160 tree targ0
= strip_float_extensions (arg0
);
8162 return fold_convert_loc (loc
, type
,
8163 fold_build1_loc (loc
, ABS_EXPR
,
8167 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8168 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8170 else if (tree_expr_nonnegative_p (arg0
))
8173 /* Strip sign ops from argument. */
8174 if (TREE_CODE (type
) == REAL_TYPE
)
8176 tem
= fold_strip_sign_ops (arg0
);
8178 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8179 fold_convert_loc (loc
, type
, tem
));
8184 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8185 return fold_convert_loc (loc
, type
, arg0
);
8186 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8188 tree itype
= TREE_TYPE (type
);
8189 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8190 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8191 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8192 negate_expr (ipart
));
8194 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8196 tree itype
= TREE_TYPE (type
);
8197 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8198 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8199 return build_complex (type
, rpart
, negate_expr (ipart
));
8201 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8202 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8206 if (TREE_CODE (arg0
) == INTEGER_CST
)
8207 return fold_not_const (arg0
, type
);
8208 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8209 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8210 /* Convert ~ (-A) to A - 1. */
8211 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8212 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8213 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8214 build_int_cst (type
, 1));
8215 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8216 else if (INTEGRAL_TYPE_P (type
)
8217 && ((TREE_CODE (arg0
) == MINUS_EXPR
8218 && integer_onep (TREE_OPERAND (arg0
, 1)))
8219 || (TREE_CODE (arg0
) == PLUS_EXPR
8220 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8221 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8222 fold_convert_loc (loc
, type
,
8223 TREE_OPERAND (arg0
, 0)));
8224 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8225 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8226 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8227 fold_convert_loc (loc
, type
,
8228 TREE_OPERAND (arg0
, 0)))))
8229 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8230 fold_convert_loc (loc
, type
,
8231 TREE_OPERAND (arg0
, 1)));
8232 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8233 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8234 fold_convert_loc (loc
, type
,
8235 TREE_OPERAND (arg0
, 1)))))
8236 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8237 fold_convert_loc (loc
, type
,
8238 TREE_OPERAND (arg0
, 0)), tem
);
8239 /* Perform BIT_NOT_EXPR on each element individually. */
8240 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8244 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8246 elements
= XALLOCAVEC (tree
, count
);
8247 for (i
= 0; i
< count
; i
++)
8249 elem
= VECTOR_CST_ELT (arg0
, i
);
8250 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8251 if (elem
== NULL_TREE
)
8256 return build_vector (type
, elements
);
8261 case TRUTH_NOT_EXPR
:
8262 /* The argument to invert_truthvalue must have Boolean type. */
8263 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8264 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8266 /* Note that the operand of this must be an int
8267 and its values must be 0 or 1.
8268 ("true" is a fixed value perhaps depending on the language,
8269 but we don't handle values other than 1 correctly yet.) */
8270 tem
= fold_truth_not_expr (loc
, arg0
);
8273 return fold_convert_loc (loc
, type
, tem
);
8276 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8277 return fold_convert_loc (loc
, type
, arg0
);
8278 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8279 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8280 TREE_OPERAND (arg0
, 1));
8281 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8282 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8283 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8285 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8286 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8287 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8288 TREE_OPERAND (arg0
, 0)),
8289 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8290 TREE_OPERAND (arg0
, 1)));
8291 return fold_convert_loc (loc
, type
, tem
);
8293 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8295 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8296 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8297 TREE_OPERAND (arg0
, 0));
8298 return fold_convert_loc (loc
, type
, tem
);
8300 if (TREE_CODE (arg0
) == CALL_EXPR
)
8302 tree fn
= get_callee_fndecl (arg0
);
8303 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8304 switch (DECL_FUNCTION_CODE (fn
))
8306 CASE_FLT_FN (BUILT_IN_CEXPI
):
8307 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8309 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8319 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8320 return build_zero_cst (type
);
8321 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8322 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8323 TREE_OPERAND (arg0
, 0));
8324 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8325 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8326 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8328 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8329 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8330 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8331 TREE_OPERAND (arg0
, 0)),
8332 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8333 TREE_OPERAND (arg0
, 1)));
8334 return fold_convert_loc (loc
, type
, tem
);
8336 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8338 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8339 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8340 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8342 if (TREE_CODE (arg0
) == CALL_EXPR
)
8344 tree fn
= get_callee_fndecl (arg0
);
8345 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8346 switch (DECL_FUNCTION_CODE (fn
))
8348 CASE_FLT_FN (BUILT_IN_CEXPI
):
8349 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8351 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8361 /* Fold *&X to X if X is an lvalue. */
8362 if (TREE_CODE (op0
) == ADDR_EXPR
)
8364 tree op00
= TREE_OPERAND (op0
, 0);
8365 if ((TREE_CODE (op00
) == VAR_DECL
8366 || TREE_CODE (op00
) == PARM_DECL
8367 || TREE_CODE (op00
) == RESULT_DECL
)
8368 && !TREE_READONLY (op00
))
8373 case VEC_UNPACK_LO_EXPR
:
8374 case VEC_UNPACK_HI_EXPR
:
8375 case VEC_UNPACK_FLOAT_LO_EXPR
:
8376 case VEC_UNPACK_FLOAT_HI_EXPR
:
8378 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8380 enum tree_code subcode
;
8382 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8383 if (TREE_CODE (arg0
) != VECTOR_CST
)
8386 elts
= XALLOCAVEC (tree
, nelts
* 2);
8387 if (!vec_cst_ctor_to_array (arg0
, elts
))
8390 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8391 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8394 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8397 subcode
= FLOAT_EXPR
;
8399 for (i
= 0; i
< nelts
; i
++)
8401 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8402 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8406 return build_vector (type
, elts
);
8409 case REDUC_MIN_EXPR
:
8410 case REDUC_MAX_EXPR
:
8411 case REDUC_PLUS_EXPR
:
8413 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8415 enum tree_code subcode
;
8417 if (TREE_CODE (op0
) != VECTOR_CST
)
8420 elts
= XALLOCAVEC (tree
, nelts
);
8421 if (!vec_cst_ctor_to_array (op0
, elts
))
8426 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8427 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8428 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8429 default: gcc_unreachable ();
8432 for (i
= 1; i
< nelts
; i
++)
8434 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8435 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8437 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8440 return build_vector (type
, elts
);
8445 } /* switch (code) */
8449 /* If the operation was a conversion do _not_ mark a resulting constant
8450 with TREE_OVERFLOW if the original constant was not. These conversions
8451 have implementation defined behavior and retaining the TREE_OVERFLOW
8452 flag here would confuse later passes such as VRP. */
8454 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8455 tree type
, tree op0
)
8457 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8459 && TREE_CODE (res
) == INTEGER_CST
8460 && TREE_CODE (op0
) == INTEGER_CST
8461 && CONVERT_EXPR_CODE_P (code
))
8462 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8467 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8468 operands OP0 and OP1. LOC is the location of the resulting expression.
8469 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8470 Return the folded expression if folding is successful. Otherwise,
8471 return NULL_TREE. */
8473 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8474 tree arg0
, tree arg1
, tree op0
, tree op1
)
8478 /* We only do these simplifications if we are optimizing. */
8482 /* Check for things like (A || B) && (A || C). We can convert this
8483 to A || (B && C). Note that either operator can be any of the four
8484 truth and/or operations and the transformation will still be
8485 valid. Also note that we only care about order for the
8486 ANDIF and ORIF operators. If B contains side effects, this
8487 might change the truth-value of A. */
8488 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8489 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8490 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8491 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8492 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8493 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8495 tree a00
= TREE_OPERAND (arg0
, 0);
8496 tree a01
= TREE_OPERAND (arg0
, 1);
8497 tree a10
= TREE_OPERAND (arg1
, 0);
8498 tree a11
= TREE_OPERAND (arg1
, 1);
8499 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8500 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8501 && (code
== TRUTH_AND_EXPR
8502 || code
== TRUTH_OR_EXPR
));
8504 if (operand_equal_p (a00
, a10
, 0))
8505 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8506 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8507 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8508 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8509 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8510 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8511 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8512 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8514 /* This case if tricky because we must either have commutative
8515 operators or else A10 must not have side-effects. */
8517 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8518 && operand_equal_p (a01
, a11
, 0))
8519 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8520 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8524 /* See if we can build a range comparison. */
8525 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8528 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8529 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8531 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8533 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8536 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8537 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8539 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8541 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8544 /* Check for the possibility of merging component references. If our
8545 lhs is another similar operation, try to merge its rhs with our
8546 rhs. Then try to merge our lhs and rhs. */
8547 if (TREE_CODE (arg0
) == code
8548 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8549 TREE_OPERAND (arg0
, 1), arg1
)))
8550 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8552 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8555 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8556 && (code
== TRUTH_AND_EXPR
8557 || code
== TRUTH_ANDIF_EXPR
8558 || code
== TRUTH_OR_EXPR
8559 || code
== TRUTH_ORIF_EXPR
))
8561 enum tree_code ncode
, icode
;
8563 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8564 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8565 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8567 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8568 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8569 We don't want to pack more than two leafs to a non-IF AND/OR
8571 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8572 equal to IF-CODE, then we don't want to add right-hand operand.
8573 If the inner right-hand side of left-hand operand has
8574 side-effects, or isn't simple, then we can't add to it,
8575 as otherwise we might destroy if-sequence. */
8576 if (TREE_CODE (arg0
) == icode
8577 && simple_operand_p_2 (arg1
)
8578 /* Needed for sequence points to handle trappings, and
8580 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8582 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8584 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8587 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8588 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8589 else if (TREE_CODE (arg1
) == icode
8590 && simple_operand_p_2 (arg0
)
8591 /* Needed for sequence points to handle trappings, and
8593 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8595 tem
= fold_build2_loc (loc
, ncode
, type
,
8596 arg0
, TREE_OPERAND (arg1
, 0));
8597 return fold_build2_loc (loc
, icode
, type
, tem
,
8598 TREE_OPERAND (arg1
, 1));
8600 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8602 For sequence point consistancy, we need to check for trapping,
8603 and side-effects. */
8604 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8605 && simple_operand_p_2 (arg1
))
8606 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8612 /* Fold a binary expression of code CODE and type TYPE with operands
8613 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8614 Return the folded expression if folding is successful. Otherwise,
8615 return NULL_TREE. */
8618 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8620 enum tree_code compl_code
;
8622 if (code
== MIN_EXPR
)
8623 compl_code
= MAX_EXPR
;
8624 else if (code
== MAX_EXPR
)
8625 compl_code
= MIN_EXPR
;
8629 /* MIN (MAX (a, b), b) == b. */
8630 if (TREE_CODE (op0
) == compl_code
8631 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8632 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8634 /* MIN (MAX (b, a), b) == b. */
8635 if (TREE_CODE (op0
) == compl_code
8636 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8637 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8638 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8640 /* MIN (a, MAX (a, b)) == a. */
8641 if (TREE_CODE (op1
) == compl_code
8642 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8643 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8644 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8646 /* MIN (a, MAX (b, a)) == a. */
8647 if (TREE_CODE (op1
) == compl_code
8648 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8649 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8650 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8655 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8656 by changing CODE to reduce the magnitude of constants involved in
8657 ARG0 of the comparison.
8658 Returns a canonicalized comparison tree if a simplification was
8659 possible, otherwise returns NULL_TREE.
8660 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8661 valid if signed overflow is undefined. */
8664 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8665 tree arg0
, tree arg1
,
8666 bool *strict_overflow_p
)
8668 enum tree_code code0
= TREE_CODE (arg0
);
8669 tree t
, cst0
= NULL_TREE
;
8673 /* Match A +- CST code arg1 and CST code arg1. We can change the
8674 first form only if overflow is undefined. */
8675 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8676 /* In principle pointers also have undefined overflow behavior,
8677 but that causes problems elsewhere. */
8678 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8679 && (code0
== MINUS_EXPR
8680 || code0
== PLUS_EXPR
)
8681 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8682 || code0
== INTEGER_CST
))
8685 /* Identify the constant in arg0 and its sign. */
8686 if (code0
== INTEGER_CST
)
8689 cst0
= TREE_OPERAND (arg0
, 1);
8690 sgn0
= tree_int_cst_sgn (cst0
);
8692 /* Overflowed constants and zero will cause problems. */
8693 if (integer_zerop (cst0
)
8694 || TREE_OVERFLOW (cst0
))
8697 /* See if we can reduce the magnitude of the constant in
8698 arg0 by changing the comparison code. */
8699 if (code0
== INTEGER_CST
)
8701 /* CST <= arg1 -> CST-1 < arg1. */
8702 if (code
== LE_EXPR
&& sgn0
== 1)
8704 /* -CST < arg1 -> -CST-1 <= arg1. */
8705 else if (code
== LT_EXPR
&& sgn0
== -1)
8707 /* CST > arg1 -> CST-1 >= arg1. */
8708 else if (code
== GT_EXPR
&& sgn0
== 1)
8710 /* -CST >= arg1 -> -CST-1 > arg1. */
8711 else if (code
== GE_EXPR
&& sgn0
== -1)
8715 /* arg1 code' CST' might be more canonical. */
8720 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8722 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8724 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8725 else if (code
== GT_EXPR
8726 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8728 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8729 else if (code
== LE_EXPR
8730 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8732 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8733 else if (code
== GE_EXPR
8734 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8738 *strict_overflow_p
= true;
8741 /* Now build the constant reduced in magnitude. But not if that
8742 would produce one outside of its types range. */
8743 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8745 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8746 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8748 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8749 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8750 /* We cannot swap the comparison here as that would cause us to
8751 endlessly recurse. */
8754 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8755 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8756 if (code0
!= INTEGER_CST
)
8757 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8758 t
= fold_convert (TREE_TYPE (arg1
), t
);
8760 /* If swapping might yield to a more canonical form, do so. */
8762 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8764 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8767 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8768 overflow further. Try to decrease the magnitude of constants involved
8769 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8770 and put sole constants at the second argument position.
8771 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8774 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8775 tree arg0
, tree arg1
)
8778 bool strict_overflow_p
;
8779 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8780 "when reducing constant in comparison");
8782 /* Try canonicalization by simplifying arg0. */
8783 strict_overflow_p
= false;
8784 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8785 &strict_overflow_p
);
8788 if (strict_overflow_p
)
8789 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8793 /* Try canonicalization by simplifying arg1 using the swapped
8795 code
= swap_tree_comparison (code
);
8796 strict_overflow_p
= false;
8797 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8798 &strict_overflow_p
);
8799 if (t
&& strict_overflow_p
)
8800 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8804 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8805 space. This is used to avoid issuing overflow warnings for
8806 expressions like &p->x which can not wrap. */
8809 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8811 double_int di_offset
, total
;
8813 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8819 if (offset
== NULL_TREE
)
8820 di_offset
= double_int_zero
;
8821 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8824 di_offset
= TREE_INT_CST (offset
);
8827 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8828 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8832 if (total
.high
!= 0)
8835 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8839 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8841 if (TREE_CODE (base
) == ADDR_EXPR
)
8843 HOST_WIDE_INT base_size
;
8845 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8846 if (base_size
> 0 && size
< base_size
)
8850 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8853 /* Subroutine of fold_binary. This routine performs all of the
8854 transformations that are common to the equality/inequality
8855 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8856 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8857 fold_binary should call fold_binary. Fold a comparison with
8858 tree code CODE and type TYPE with operands OP0 and OP1. Return
8859 the folded comparison or NULL_TREE. */
8862 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8865 tree arg0
, arg1
, tem
;
8870 STRIP_SIGN_NOPS (arg0
);
8871 STRIP_SIGN_NOPS (arg1
);
8873 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8874 if (tem
!= NULL_TREE
)
8877 /* If one arg is a real or integer constant, put it last. */
8878 if (tree_swap_operands_p (arg0
, arg1
, true))
8879 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8881 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8882 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8883 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8884 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8885 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8886 && (TREE_CODE (arg1
) == INTEGER_CST
8887 && !TREE_OVERFLOW (arg1
)))
8889 tree const1
= TREE_OPERAND (arg0
, 1);
8891 tree variable
= TREE_OPERAND (arg0
, 0);
8894 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8896 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8897 TREE_TYPE (arg1
), const2
, const1
);
8899 /* If the constant operation overflowed this can be
8900 simplified as a comparison against INT_MAX/INT_MIN. */
8901 if (TREE_CODE (lhs
) == INTEGER_CST
8902 && TREE_OVERFLOW (lhs
))
8904 int const1_sgn
= tree_int_cst_sgn (const1
);
8905 enum tree_code code2
= code
;
8907 /* Get the sign of the constant on the lhs if the
8908 operation were VARIABLE + CONST1. */
8909 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8910 const1_sgn
= -const1_sgn
;
8912 /* The sign of the constant determines if we overflowed
8913 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8914 Canonicalize to the INT_MIN overflow by swapping the comparison
8916 if (const1_sgn
== -1)
8917 code2
= swap_tree_comparison (code
);
8919 /* We now can look at the canonicalized case
8920 VARIABLE + 1 CODE2 INT_MIN
8921 and decide on the result. */
8922 if (code2
== LT_EXPR
8924 || code2
== EQ_EXPR
)
8925 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8926 else if (code2
== NE_EXPR
8928 || code2
== GT_EXPR
)
8929 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8932 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8933 && (TREE_CODE (lhs
) != INTEGER_CST
8934 || !TREE_OVERFLOW (lhs
)))
8936 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8937 fold_overflow_warning ("assuming signed overflow does not occur "
8938 "when changing X +- C1 cmp C2 to "
8940 WARN_STRICT_OVERFLOW_COMPARISON
);
8941 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8945 /* For comparisons of pointers we can decompose it to a compile time
8946 comparison of the base objects and the offsets into the object.
8947 This requires at least one operand being an ADDR_EXPR or a
8948 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8949 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8950 && (TREE_CODE (arg0
) == ADDR_EXPR
8951 || TREE_CODE (arg1
) == ADDR_EXPR
8952 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8953 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8955 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8956 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8957 enum machine_mode mode
;
8958 int volatilep
, unsignedp
;
8959 bool indirect_base0
= false, indirect_base1
= false;
8961 /* Get base and offset for the access. Strip ADDR_EXPR for
8962 get_inner_reference, but put it back by stripping INDIRECT_REF
8963 off the base object if possible. indirect_baseN will be true
8964 if baseN is not an address but refers to the object itself. */
8966 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8968 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8969 &bitsize
, &bitpos0
, &offset0
, &mode
,
8970 &unsignedp
, &volatilep
, false);
8971 if (TREE_CODE (base0
) == INDIRECT_REF
)
8972 base0
= TREE_OPERAND (base0
, 0);
8974 indirect_base0
= true;
8976 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8978 base0
= TREE_OPERAND (arg0
, 0);
8979 STRIP_SIGN_NOPS (base0
);
8980 if (TREE_CODE (base0
) == ADDR_EXPR
)
8982 base0
= TREE_OPERAND (base0
, 0);
8983 indirect_base0
= true;
8985 offset0
= TREE_OPERAND (arg0
, 1);
8986 if (host_integerp (offset0
, 0))
8988 HOST_WIDE_INT off
= size_low_cst (offset0
);
8989 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8991 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8993 bitpos0
= off
* BITS_PER_UNIT
;
8994 offset0
= NULL_TREE
;
9000 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9002 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9003 &bitsize
, &bitpos1
, &offset1
, &mode
,
9004 &unsignedp
, &volatilep
, false);
9005 if (TREE_CODE (base1
) == INDIRECT_REF
)
9006 base1
= TREE_OPERAND (base1
, 0);
9008 indirect_base1
= true;
9010 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9012 base1
= TREE_OPERAND (arg1
, 0);
9013 STRIP_SIGN_NOPS (base1
);
9014 if (TREE_CODE (base1
) == ADDR_EXPR
)
9016 base1
= TREE_OPERAND (base1
, 0);
9017 indirect_base1
= true;
9019 offset1
= TREE_OPERAND (arg1
, 1);
9020 if (host_integerp (offset1
, 0))
9022 HOST_WIDE_INT off
= size_low_cst (offset1
);
9023 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9025 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9027 bitpos1
= off
* BITS_PER_UNIT
;
9028 offset1
= NULL_TREE
;
9033 /* A local variable can never be pointed to by
9034 the default SSA name of an incoming parameter. */
9035 if ((TREE_CODE (arg0
) == ADDR_EXPR
9037 && TREE_CODE (base0
) == VAR_DECL
9038 && auto_var_in_fn_p (base0
, current_function_decl
)
9040 && TREE_CODE (base1
) == SSA_NAME
9041 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9042 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9043 || (TREE_CODE (arg1
) == ADDR_EXPR
9045 && TREE_CODE (base1
) == VAR_DECL
9046 && auto_var_in_fn_p (base1
, current_function_decl
)
9048 && TREE_CODE (base0
) == SSA_NAME
9049 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9050 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9052 if (code
== NE_EXPR
)
9053 return constant_boolean_node (1, type
);
9054 else if (code
== EQ_EXPR
)
9055 return constant_boolean_node (0, type
);
9057 /* If we have equivalent bases we might be able to simplify. */
9058 else if (indirect_base0
== indirect_base1
9059 && operand_equal_p (base0
, base1
, 0))
9061 /* We can fold this expression to a constant if the non-constant
9062 offset parts are equal. */
9063 if ((offset0
== offset1
9064 || (offset0
&& offset1
9065 && operand_equal_p (offset0
, offset1
, 0)))
9068 || (indirect_base0
&& DECL_P (base0
))
9069 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9074 && bitpos0
!= bitpos1
9075 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9076 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9077 fold_overflow_warning (("assuming pointer wraparound does not "
9078 "occur when comparing P +- C1 with "
9080 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9085 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9087 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9089 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9091 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9093 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9095 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9099 /* We can simplify the comparison to a comparison of the variable
9100 offset parts if the constant offset parts are equal.
9101 Be careful to use signed sizetype here because otherwise we
9102 mess with array offsets in the wrong way. This is possible
9103 because pointer arithmetic is restricted to retain within an
9104 object and overflow on pointer differences is undefined as of
9105 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9106 else if (bitpos0
== bitpos1
9107 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9108 || (indirect_base0
&& DECL_P (base0
))
9109 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9111 /* By converting to signed sizetype we cover middle-end pointer
9112 arithmetic which operates on unsigned pointer types of size
9113 type size and ARRAY_REF offsets which are properly sign or
9114 zero extended from their type in case it is narrower than
9116 if (offset0
== NULL_TREE
)
9117 offset0
= build_int_cst (ssizetype
, 0);
9119 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9120 if (offset1
== NULL_TREE
)
9121 offset1
= build_int_cst (ssizetype
, 0);
9123 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9127 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9128 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9129 fold_overflow_warning (("assuming pointer wraparound does not "
9130 "occur when comparing P +- C1 with "
9132 WARN_STRICT_OVERFLOW_COMPARISON
);
9134 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9137 /* For non-equal bases we can simplify if they are addresses
9138 of local binding decls or constants. */
9139 else if (indirect_base0
&& indirect_base1
9140 /* We know that !operand_equal_p (base0, base1, 0)
9141 because the if condition was false. But make
9142 sure two decls are not the same. */
9144 && TREE_CODE (arg0
) == ADDR_EXPR
9145 && TREE_CODE (arg1
) == ADDR_EXPR
9146 && (((TREE_CODE (base0
) == VAR_DECL
9147 || TREE_CODE (base0
) == PARM_DECL
)
9148 && (targetm
.binds_local_p (base0
)
9149 || CONSTANT_CLASS_P (base1
)))
9150 || CONSTANT_CLASS_P (base0
))
9151 && (((TREE_CODE (base1
) == VAR_DECL
9152 || TREE_CODE (base1
) == PARM_DECL
)
9153 && (targetm
.binds_local_p (base1
)
9154 || CONSTANT_CLASS_P (base0
)))
9155 || CONSTANT_CLASS_P (base1
)))
9157 if (code
== EQ_EXPR
)
9158 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9160 else if (code
== NE_EXPR
)
9161 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9164 /* For equal offsets we can simplify to a comparison of the
9166 else if (bitpos0
== bitpos1
9168 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9170 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9171 && ((offset0
== offset1
)
9172 || (offset0
&& offset1
9173 && operand_equal_p (offset0
, offset1
, 0))))
9176 base0
= build_fold_addr_expr_loc (loc
, base0
);
9178 base1
= build_fold_addr_expr_loc (loc
, base1
);
9179 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9183 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9184 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9185 the resulting offset is smaller in absolute value than the
9187 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9188 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9189 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9190 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9191 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9192 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9193 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9195 tree const1
= TREE_OPERAND (arg0
, 1);
9196 tree const2
= TREE_OPERAND (arg1
, 1);
9197 tree variable1
= TREE_OPERAND (arg0
, 0);
9198 tree variable2
= TREE_OPERAND (arg1
, 0);
9200 const char * const warnmsg
= G_("assuming signed overflow does not "
9201 "occur when combining constants around "
9204 /* Put the constant on the side where it doesn't overflow and is
9205 of lower absolute value than before. */
9206 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9207 ? MINUS_EXPR
: PLUS_EXPR
,
9209 if (!TREE_OVERFLOW (cst
)
9210 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9212 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9213 return fold_build2_loc (loc
, code
, type
,
9215 fold_build2_loc (loc
,
9216 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9220 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9221 ? MINUS_EXPR
: PLUS_EXPR
,
9223 if (!TREE_OVERFLOW (cst
)
9224 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9226 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9227 return fold_build2_loc (loc
, code
, type
,
9228 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9234 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9235 signed arithmetic case. That form is created by the compiler
9236 often enough for folding it to be of value. One example is in
9237 computing loop trip counts after Operator Strength Reduction. */
9238 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9239 && TREE_CODE (arg0
) == MULT_EXPR
9240 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9241 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9242 && integer_zerop (arg1
))
9244 tree const1
= TREE_OPERAND (arg0
, 1);
9245 tree const2
= arg1
; /* zero */
9246 tree variable1
= TREE_OPERAND (arg0
, 0);
9247 enum tree_code cmp_code
= code
;
9249 /* Handle unfolded multiplication by zero. */
9250 if (integer_zerop (const1
))
9251 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9253 fold_overflow_warning (("assuming signed overflow does not occur when "
9254 "eliminating multiplication in comparison "
9256 WARN_STRICT_OVERFLOW_COMPARISON
);
9258 /* If const1 is negative we swap the sense of the comparison. */
9259 if (tree_int_cst_sgn (const1
) < 0)
9260 cmp_code
= swap_tree_comparison (cmp_code
);
9262 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9265 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9269 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9271 tree targ0
= strip_float_extensions (arg0
);
9272 tree targ1
= strip_float_extensions (arg1
);
9273 tree newtype
= TREE_TYPE (targ0
);
9275 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9276 newtype
= TREE_TYPE (targ1
);
9278 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9279 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9280 return fold_build2_loc (loc
, code
, type
,
9281 fold_convert_loc (loc
, newtype
, targ0
),
9282 fold_convert_loc (loc
, newtype
, targ1
));
9284 /* (-a) CMP (-b) -> b CMP a */
9285 if (TREE_CODE (arg0
) == NEGATE_EXPR
9286 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9287 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9288 TREE_OPERAND (arg0
, 0));
9290 if (TREE_CODE (arg1
) == REAL_CST
)
9292 REAL_VALUE_TYPE cst
;
9293 cst
= TREE_REAL_CST (arg1
);
9295 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9296 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9297 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9298 TREE_OPERAND (arg0
, 0),
9299 build_real (TREE_TYPE (arg1
),
9300 real_value_negate (&cst
)));
9302 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9303 /* a CMP (-0) -> a CMP 0 */
9304 if (REAL_VALUE_MINUS_ZERO (cst
))
9305 return fold_build2_loc (loc
, code
, type
, arg0
,
9306 build_real (TREE_TYPE (arg1
), dconst0
));
9308 /* x != NaN is always true, other ops are always false. */
9309 if (REAL_VALUE_ISNAN (cst
)
9310 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9312 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9313 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9316 /* Fold comparisons against infinity. */
9317 if (REAL_VALUE_ISINF (cst
)
9318 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9320 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9321 if (tem
!= NULL_TREE
)
9326 /* If this is a comparison of a real constant with a PLUS_EXPR
9327 or a MINUS_EXPR of a real constant, we can convert it into a
9328 comparison with a revised real constant as long as no overflow
9329 occurs when unsafe_math_optimizations are enabled. */
9330 if (flag_unsafe_math_optimizations
9331 && TREE_CODE (arg1
) == REAL_CST
9332 && (TREE_CODE (arg0
) == PLUS_EXPR
9333 || TREE_CODE (arg0
) == MINUS_EXPR
)
9334 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9335 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9336 ? MINUS_EXPR
: PLUS_EXPR
,
9337 arg1
, TREE_OPERAND (arg0
, 1)))
9338 && !TREE_OVERFLOW (tem
))
9339 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9341 /* Likewise, we can simplify a comparison of a real constant with
9342 a MINUS_EXPR whose first operand is also a real constant, i.e.
9343 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9344 floating-point types only if -fassociative-math is set. */
9345 if (flag_associative_math
9346 && TREE_CODE (arg1
) == REAL_CST
9347 && TREE_CODE (arg0
) == MINUS_EXPR
9348 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9349 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9351 && !TREE_OVERFLOW (tem
))
9352 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9353 TREE_OPERAND (arg0
, 1), tem
);
9355 /* Fold comparisons against built-in math functions. */
9356 if (TREE_CODE (arg1
) == REAL_CST
9357 && flag_unsafe_math_optimizations
9358 && ! flag_errno_math
)
9360 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9362 if (fcode
!= END_BUILTINS
)
9364 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9365 if (tem
!= NULL_TREE
)
9371 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9372 && CONVERT_EXPR_P (arg0
))
9374 /* If we are widening one operand of an integer comparison,
9375 see if the other operand is similarly being widened. Perhaps we
9376 can do the comparison in the narrower type. */
9377 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9381 /* Or if we are changing signedness. */
9382 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9387 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9388 constant, we can simplify it. */
9389 if (TREE_CODE (arg1
) == INTEGER_CST
9390 && (TREE_CODE (arg0
) == MIN_EXPR
9391 || TREE_CODE (arg0
) == MAX_EXPR
)
9392 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9394 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9399 /* Simplify comparison of something with itself. (For IEEE
9400 floating-point, we can only do some of these simplifications.) */
9401 if (operand_equal_p (arg0
, arg1
, 0))
9406 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9407 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9408 return constant_boolean_node (1, type
);
9413 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9414 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9415 return constant_boolean_node (1, type
);
9416 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9419 /* For NE, we can only do this simplification if integer
9420 or we don't honor IEEE floating point NaNs. */
9421 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9422 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9424 /* ... fall through ... */
9427 return constant_boolean_node (0, type
);
9433 /* If we are comparing an expression that just has comparisons
9434 of two integer values, arithmetic expressions of those comparisons,
9435 and constants, we can simplify it. There are only three cases
9436 to check: the two values can either be equal, the first can be
9437 greater, or the second can be greater. Fold the expression for
9438 those three values. Since each value must be 0 or 1, we have
9439 eight possibilities, each of which corresponds to the constant 0
9440 or 1 or one of the six possible comparisons.
9442 This handles common cases like (a > b) == 0 but also handles
9443 expressions like ((x > y) - (y > x)) > 0, which supposedly
9444 occur in macroized code. */
9446 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9448 tree cval1
= 0, cval2
= 0;
9451 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9452 /* Don't handle degenerate cases here; they should already
9453 have been handled anyway. */
9454 && cval1
!= 0 && cval2
!= 0
9455 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9456 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9457 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9458 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9459 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9460 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9461 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9463 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9464 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9466 /* We can't just pass T to eval_subst in case cval1 or cval2
9467 was the same as ARG1. */
9470 = fold_build2_loc (loc
, code
, type
,
9471 eval_subst (loc
, arg0
, cval1
, maxval
,
9475 = fold_build2_loc (loc
, code
, type
,
9476 eval_subst (loc
, arg0
, cval1
, maxval
,
9480 = fold_build2_loc (loc
, code
, type
,
9481 eval_subst (loc
, arg0
, cval1
, minval
,
9485 /* All three of these results should be 0 or 1. Confirm they are.
9486 Then use those values to select the proper code to use. */
9488 if (TREE_CODE (high_result
) == INTEGER_CST
9489 && TREE_CODE (equal_result
) == INTEGER_CST
9490 && TREE_CODE (low_result
) == INTEGER_CST
)
9492 /* Make a 3-bit mask with the high-order bit being the
9493 value for `>', the next for '=', and the low for '<'. */
9494 switch ((integer_onep (high_result
) * 4)
9495 + (integer_onep (equal_result
) * 2)
9496 + integer_onep (low_result
))
9500 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9521 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9526 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9527 SET_EXPR_LOCATION (tem
, loc
);
9530 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9535 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9536 into a single range test. */
9537 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9538 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9539 && TREE_CODE (arg1
) == INTEGER_CST
9540 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9541 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9542 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9543 && !TREE_OVERFLOW (arg1
))
9545 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9546 if (tem
!= NULL_TREE
)
9550 /* Fold ~X op ~Y as Y op X. */
9551 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9552 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9554 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9555 return fold_build2_loc (loc
, code
, type
,
9556 fold_convert_loc (loc
, cmp_type
,
9557 TREE_OPERAND (arg1
, 0)),
9558 TREE_OPERAND (arg0
, 0));
9561 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9562 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9563 && TREE_CODE (arg1
) == INTEGER_CST
)
9565 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9566 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9567 TREE_OPERAND (arg0
, 0),
9568 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9569 fold_convert_loc (loc
, cmp_type
, arg1
)));
9576 /* Subroutine of fold_binary. Optimize complex multiplications of the
9577 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9578 argument EXPR represents the expression "z" of type TYPE. */
9581 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9583 tree itype
= TREE_TYPE (type
);
9584 tree rpart
, ipart
, tem
;
9586 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9588 rpart
= TREE_OPERAND (expr
, 0);
9589 ipart
= TREE_OPERAND (expr
, 1);
9591 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9593 rpart
= TREE_REALPART (expr
);
9594 ipart
= TREE_IMAGPART (expr
);
9598 expr
= save_expr (expr
);
9599 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9600 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9603 rpart
= save_expr (rpart
);
9604 ipart
= save_expr (ipart
);
9605 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9606 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9607 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9608 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9609 build_zero_cst (itype
));
9613 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9614 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9615 guarantees that P and N have the same least significant log2(M) bits.
9616 N is not otherwise constrained. In particular, N is not normalized to
9617 0 <= N < M as is common. In general, the precise value of P is unknown.
9618 M is chosen as large as possible such that constant N can be determined.
9620 Returns M and sets *RESIDUE to N.
9622 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9623 account. This is not always possible due to PR 35705.
9626 static unsigned HOST_WIDE_INT
9627 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9628 bool allow_func_align
)
9630 enum tree_code code
;
9634 code
= TREE_CODE (expr
);
9635 if (code
== ADDR_EXPR
)
9637 unsigned int bitalign
;
9638 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9639 *residue
/= BITS_PER_UNIT
;
9640 return bitalign
/ BITS_PER_UNIT
;
9642 else if (code
== POINTER_PLUS_EXPR
)
9645 unsigned HOST_WIDE_INT modulus
;
9646 enum tree_code inner_code
;
9648 op0
= TREE_OPERAND (expr
, 0);
9650 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9653 op1
= TREE_OPERAND (expr
, 1);
9655 inner_code
= TREE_CODE (op1
);
9656 if (inner_code
== INTEGER_CST
)
9658 *residue
+= TREE_INT_CST_LOW (op1
);
9661 else if (inner_code
== MULT_EXPR
)
9663 op1
= TREE_OPERAND (op1
, 1);
9664 if (TREE_CODE (op1
) == INTEGER_CST
)
9666 unsigned HOST_WIDE_INT align
;
9668 /* Compute the greatest power-of-2 divisor of op1. */
9669 align
= TREE_INT_CST_LOW (op1
);
9672 /* If align is non-zero and less than *modulus, replace
9673 *modulus with align., If align is 0, then either op1 is 0
9674 or the greatest power-of-2 divisor of op1 doesn't fit in an
9675 unsigned HOST_WIDE_INT. In either case, no additional
9676 constraint is imposed. */
9678 modulus
= MIN (modulus
, align
);
9685 /* If we get here, we were unable to determine anything useful about the
9690 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9691 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9694 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9696 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9698 if (TREE_CODE (arg
) == VECTOR_CST
)
9700 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9701 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9703 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9705 constructor_elt
*elt
;
9707 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9708 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9711 elts
[i
] = elt
->value
;
9715 for (; i
< nelts
; i
++)
9717 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9721 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9722 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9723 NULL_TREE otherwise. */
9726 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9728 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9730 bool need_ctor
= false;
9732 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9733 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9734 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9735 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9738 elts
= XALLOCAVEC (tree
, nelts
* 3);
9739 if (!vec_cst_ctor_to_array (arg0
, elts
)
9740 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9743 for (i
= 0; i
< nelts
; i
++)
9745 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9747 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9752 vec
<constructor_elt
, va_gc
> *v
;
9753 vec_alloc (v
, nelts
);
9754 for (i
= 0; i
< nelts
; i
++)
9755 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9756 return build_constructor (type
, v
);
9759 return build_vector (type
, &elts
[2 * nelts
]);
9762 /* Try to fold a pointer difference of type TYPE two address expressions of
9763 array references AREF0 and AREF1 using location LOC. Return a
9764 simplified expression for the difference or NULL_TREE. */
9767 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9768 tree aref0
, tree aref1
)
9770 tree base0
= TREE_OPERAND (aref0
, 0);
9771 tree base1
= TREE_OPERAND (aref1
, 0);
9772 tree base_offset
= build_int_cst (type
, 0);
9774 /* If the bases are array references as well, recurse. If the bases
9775 are pointer indirections compute the difference of the pointers.
9776 If the bases are equal, we are set. */
9777 if ((TREE_CODE (base0
) == ARRAY_REF
9778 && TREE_CODE (base1
) == ARRAY_REF
9780 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9781 || (INDIRECT_REF_P (base0
)
9782 && INDIRECT_REF_P (base1
)
9783 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9784 TREE_OPERAND (base0
, 0),
9785 TREE_OPERAND (base1
, 0))))
9786 || operand_equal_p (base0
, base1
, 0))
9788 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9789 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9790 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9791 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9792 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9794 fold_build2_loc (loc
, MULT_EXPR
, type
,
9800 /* If the real or vector real constant CST of type TYPE has an exact
9801 inverse, return it, else return NULL. */
9804 exact_inverse (tree type
, tree cst
)
9807 tree unit_type
, *elts
;
9808 enum machine_mode mode
;
9809 unsigned vec_nelts
, i
;
9811 switch (TREE_CODE (cst
))
9814 r
= TREE_REAL_CST (cst
);
9816 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9817 return build_real (type
, r
);
9822 vec_nelts
= VECTOR_CST_NELTS (cst
);
9823 elts
= XALLOCAVEC (tree
, vec_nelts
);
9824 unit_type
= TREE_TYPE (type
);
9825 mode
= TYPE_MODE (unit_type
);
9827 for (i
= 0; i
< vec_nelts
; i
++)
9829 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9830 if (!exact_real_inverse (mode
, &r
))
9832 elts
[i
] = build_real (unit_type
, r
);
9835 return build_vector (type
, elts
);
9842 /* Fold a binary expression of code CODE and type TYPE with operands
9843 OP0 and OP1. LOC is the location of the resulting expression.
9844 Return the folded expression if folding is successful. Otherwise,
9845 return NULL_TREE. */
9848 fold_binary_loc (location_t loc
,
9849 enum tree_code code
, tree type
, tree op0
, tree op1
)
9851 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9852 tree arg0
, arg1
, tem
;
9853 tree t1
= NULL_TREE
;
9854 bool strict_overflow_p
;
9856 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9857 && TREE_CODE_LENGTH (code
) == 2
9859 && op1
!= NULL_TREE
);
9864 /* Strip any conversions that don't change the mode. This is
9865 safe for every expression, except for a comparison expression
9866 because its signedness is derived from its operands. So, in
9867 the latter case, only strip conversions that don't change the
9868 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9871 Note that this is done as an internal manipulation within the
9872 constant folder, in order to find the simplest representation
9873 of the arguments so that their form can be studied. In any
9874 cases, the appropriate type conversions should be put back in
9875 the tree that will get out of the constant folder. */
9877 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9879 STRIP_SIGN_NOPS (arg0
);
9880 STRIP_SIGN_NOPS (arg1
);
9888 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9889 constant but we can't do arithmetic on them. */
9890 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9891 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9892 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9893 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9894 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9895 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9896 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9898 if (kind
== tcc_binary
)
9900 /* Make sure type and arg0 have the same saturating flag. */
9901 gcc_assert (TYPE_SATURATING (type
)
9902 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9903 tem
= const_binop (code
, arg0
, arg1
);
9905 else if (kind
== tcc_comparison
)
9906 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9910 if (tem
!= NULL_TREE
)
9912 if (TREE_TYPE (tem
) != type
)
9913 tem
= fold_convert_loc (loc
, type
, tem
);
9918 /* If this is a commutative operation, and ARG0 is a constant, move it
9919 to ARG1 to reduce the number of tests below. */
9920 if (commutative_tree_code (code
)
9921 && tree_swap_operands_p (arg0
, arg1
, true))
9922 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9924 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9926 First check for cases where an arithmetic operation is applied to a
9927 compound, conditional, or comparison operation. Push the arithmetic
9928 operation inside the compound or conditional to see if any folding
9929 can then be done. Convert comparison to conditional for this purpose.
9930 The also optimizes non-constant cases that used to be done in
9933 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9934 one of the operands is a comparison and the other is a comparison, a
9935 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9936 code below would make the expression more complex. Change it to a
9937 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9938 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9940 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9941 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9942 && TREE_CODE (type
) != VECTOR_TYPE
9943 && ((truth_value_p (TREE_CODE (arg0
))
9944 && (truth_value_p (TREE_CODE (arg1
))
9945 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9946 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9947 || (truth_value_p (TREE_CODE (arg1
))
9948 && (truth_value_p (TREE_CODE (arg0
))
9949 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9950 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9952 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9953 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9956 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9957 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9959 if (code
== EQ_EXPR
)
9960 tem
= invert_truthvalue_loc (loc
, tem
);
9962 return fold_convert_loc (loc
, type
, tem
);
9965 if (TREE_CODE_CLASS (code
) == tcc_binary
9966 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9968 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9970 tem
= fold_build2_loc (loc
, code
, type
,
9971 fold_convert_loc (loc
, TREE_TYPE (op0
),
9972 TREE_OPERAND (arg0
, 1)), op1
);
9973 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9976 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9977 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9979 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9980 fold_convert_loc (loc
, TREE_TYPE (op1
),
9981 TREE_OPERAND (arg1
, 1)));
9982 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9986 if (TREE_CODE (arg0
) == COND_EXPR
9987 || TREE_CODE (arg0
) == VEC_COND_EXPR
9988 || COMPARISON_CLASS_P (arg0
))
9990 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9992 /*cond_first_p=*/1);
9993 if (tem
!= NULL_TREE
)
9997 if (TREE_CODE (arg1
) == COND_EXPR
9998 || TREE_CODE (arg1
) == VEC_COND_EXPR
9999 || COMPARISON_CLASS_P (arg1
))
10001 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10003 /*cond_first_p=*/0);
10004 if (tem
!= NULL_TREE
)
10012 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10013 if (TREE_CODE (arg0
) == ADDR_EXPR
10014 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10016 tree iref
= TREE_OPERAND (arg0
, 0);
10017 return fold_build2 (MEM_REF
, type
,
10018 TREE_OPERAND (iref
, 0),
10019 int_const_binop (PLUS_EXPR
, arg1
,
10020 TREE_OPERAND (iref
, 1)));
10023 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10024 if (TREE_CODE (arg0
) == ADDR_EXPR
10025 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10028 HOST_WIDE_INT coffset
;
10029 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10033 return fold_build2 (MEM_REF
, type
,
10034 build_fold_addr_expr (base
),
10035 int_const_binop (PLUS_EXPR
, arg1
,
10036 size_int (coffset
)));
10041 case POINTER_PLUS_EXPR
:
10042 /* 0 +p index -> (type)index */
10043 if (integer_zerop (arg0
))
10044 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10046 /* PTR +p 0 -> PTR */
10047 if (integer_zerop (arg1
))
10048 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10050 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10051 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10052 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10053 return fold_convert_loc (loc
, type
,
10054 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10055 fold_convert_loc (loc
, sizetype
,
10057 fold_convert_loc (loc
, sizetype
,
10060 /* (PTR +p B) +p A -> PTR +p (B + A) */
10061 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10064 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10065 tree arg00
= TREE_OPERAND (arg0
, 0);
10066 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10067 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10068 return fold_convert_loc (loc
, type
,
10069 fold_build_pointer_plus_loc (loc
,
10073 /* PTR_CST +p CST -> CST1 */
10074 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10075 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10076 fold_convert_loc (loc
, type
, arg1
));
10078 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10079 of the array. Loop optimizer sometimes produce this type of
10081 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10083 tem
= try_move_mult_to_index (loc
, arg0
,
10084 fold_convert_loc (loc
,
10087 return fold_convert_loc (loc
, type
, tem
);
10093 /* A + (-B) -> A - B */
10094 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10095 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10096 fold_convert_loc (loc
, type
, arg0
),
10097 fold_convert_loc (loc
, type
,
10098 TREE_OPERAND (arg1
, 0)));
10099 /* (-A) + B -> B - A */
10100 if (TREE_CODE (arg0
) == NEGATE_EXPR
10101 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10102 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10103 fold_convert_loc (loc
, type
, arg1
),
10104 fold_convert_loc (loc
, type
,
10105 TREE_OPERAND (arg0
, 0)));
10107 if (INTEGRAL_TYPE_P (type
))
10109 /* Convert ~A + 1 to -A. */
10110 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10111 && integer_onep (arg1
))
10112 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10113 fold_convert_loc (loc
, type
,
10114 TREE_OPERAND (arg0
, 0)));
10116 /* ~X + X is -1. */
10117 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10118 && !TYPE_OVERFLOW_TRAPS (type
))
10120 tree tem
= TREE_OPERAND (arg0
, 0);
10123 if (operand_equal_p (tem
, arg1
, 0))
10125 t1
= build_int_cst_type (type
, -1);
10126 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10130 /* X + ~X is -1. */
10131 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10132 && !TYPE_OVERFLOW_TRAPS (type
))
10134 tree tem
= TREE_OPERAND (arg1
, 0);
10137 if (operand_equal_p (arg0
, tem
, 0))
10139 t1
= build_int_cst_type (type
, -1);
10140 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10144 /* X + (X / CST) * -CST is X % CST. */
10145 if (TREE_CODE (arg1
) == MULT_EXPR
10146 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10147 && operand_equal_p (arg0
,
10148 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10150 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10151 tree cst1
= TREE_OPERAND (arg1
, 1);
10152 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10154 if (sum
&& integer_zerop (sum
))
10155 return fold_convert_loc (loc
, type
,
10156 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10157 TREE_TYPE (arg0
), arg0
,
10162 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10163 one. Make sure the type is not saturating and has the signedness of
10164 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10165 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10166 if ((TREE_CODE (arg0
) == MULT_EXPR
10167 || TREE_CODE (arg1
) == MULT_EXPR
)
10168 && !TYPE_SATURATING (type
)
10169 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10170 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10171 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10173 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10178 if (! FLOAT_TYPE_P (type
))
10180 if (integer_zerop (arg1
))
10181 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10183 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10184 with a constant, and the two constants have no bits in common,
10185 we should treat this as a BIT_IOR_EXPR since this may produce more
10186 simplifications. */
10187 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10188 && TREE_CODE (arg1
) == BIT_AND_EXPR
10189 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10190 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10191 && integer_zerop (const_binop (BIT_AND_EXPR
,
10192 TREE_OPERAND (arg0
, 1),
10193 TREE_OPERAND (arg1
, 1))))
10195 code
= BIT_IOR_EXPR
;
10199 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10200 (plus (plus (mult) (mult)) (foo)) so that we can
10201 take advantage of the factoring cases below. */
10202 if (TYPE_OVERFLOW_WRAPS (type
)
10203 && (((TREE_CODE (arg0
) == PLUS_EXPR
10204 || TREE_CODE (arg0
) == MINUS_EXPR
)
10205 && TREE_CODE (arg1
) == MULT_EXPR
)
10206 || ((TREE_CODE (arg1
) == PLUS_EXPR
10207 || TREE_CODE (arg1
) == MINUS_EXPR
)
10208 && TREE_CODE (arg0
) == MULT_EXPR
)))
10210 tree parg0
, parg1
, parg
, marg
;
10211 enum tree_code pcode
;
10213 if (TREE_CODE (arg1
) == MULT_EXPR
)
10214 parg
= arg0
, marg
= arg1
;
10216 parg
= arg1
, marg
= arg0
;
10217 pcode
= TREE_CODE (parg
);
10218 parg0
= TREE_OPERAND (parg
, 0);
10219 parg1
= TREE_OPERAND (parg
, 1);
10220 STRIP_NOPS (parg0
);
10221 STRIP_NOPS (parg1
);
10223 if (TREE_CODE (parg0
) == MULT_EXPR
10224 && TREE_CODE (parg1
) != MULT_EXPR
)
10225 return fold_build2_loc (loc
, pcode
, type
,
10226 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10227 fold_convert_loc (loc
, type
,
10229 fold_convert_loc (loc
, type
,
10231 fold_convert_loc (loc
, type
, parg1
));
10232 if (TREE_CODE (parg0
) != MULT_EXPR
10233 && TREE_CODE (parg1
) == MULT_EXPR
)
10235 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10236 fold_convert_loc (loc
, type
, parg0
),
10237 fold_build2_loc (loc
, pcode
, type
,
10238 fold_convert_loc (loc
, type
, marg
),
10239 fold_convert_loc (loc
, type
,
10245 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10246 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10247 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10249 /* Likewise if the operands are reversed. */
10250 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10251 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10253 /* Convert X + -C into X - C. */
10254 if (TREE_CODE (arg1
) == REAL_CST
10255 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10257 tem
= fold_negate_const (arg1
, type
);
10258 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10259 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10260 fold_convert_loc (loc
, type
, arg0
),
10261 fold_convert_loc (loc
, type
, tem
));
10264 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10265 to __complex__ ( x, y ). This is not the same for SNaNs or
10266 if signed zeros are involved. */
10267 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10268 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10269 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10271 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10272 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10273 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10274 bool arg0rz
= false, arg0iz
= false;
10275 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10276 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10278 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10279 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10280 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10282 tree rp
= arg1r
? arg1r
10283 : build1 (REALPART_EXPR
, rtype
, arg1
);
10284 tree ip
= arg0i
? arg0i
10285 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10286 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10288 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10290 tree rp
= arg0r
? arg0r
10291 : build1 (REALPART_EXPR
, rtype
, arg0
);
10292 tree ip
= arg1i
? arg1i
10293 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10294 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10299 if (flag_unsafe_math_optimizations
10300 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10301 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10302 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10305 /* Convert x+x into x*2.0. */
10306 if (operand_equal_p (arg0
, arg1
, 0)
10307 && SCALAR_FLOAT_TYPE_P (type
))
10308 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10309 build_real (type
, dconst2
));
10311 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10312 We associate floats only if the user has specified
10313 -fassociative-math. */
10314 if (flag_associative_math
10315 && TREE_CODE (arg1
) == PLUS_EXPR
10316 && TREE_CODE (arg0
) != MULT_EXPR
)
10318 tree tree10
= TREE_OPERAND (arg1
, 0);
10319 tree tree11
= TREE_OPERAND (arg1
, 1);
10320 if (TREE_CODE (tree11
) == MULT_EXPR
10321 && TREE_CODE (tree10
) == MULT_EXPR
)
10324 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10325 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10328 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10329 We associate floats only if the user has specified
10330 -fassociative-math. */
10331 if (flag_associative_math
10332 && TREE_CODE (arg0
) == PLUS_EXPR
10333 && TREE_CODE (arg1
) != MULT_EXPR
)
10335 tree tree00
= TREE_OPERAND (arg0
, 0);
10336 tree tree01
= TREE_OPERAND (arg0
, 1);
10337 if (TREE_CODE (tree01
) == MULT_EXPR
10338 && TREE_CODE (tree00
) == MULT_EXPR
)
10341 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10342 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10348 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10349 is a rotate of A by C1 bits. */
10350 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10351 is a rotate of A by B bits. */
10353 enum tree_code code0
, code1
;
10355 code0
= TREE_CODE (arg0
);
10356 code1
= TREE_CODE (arg1
);
10357 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10358 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10359 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10360 TREE_OPERAND (arg1
, 0), 0)
10361 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10362 TYPE_UNSIGNED (rtype
))
10363 /* Only create rotates in complete modes. Other cases are not
10364 expanded properly. */
10365 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10367 tree tree01
, tree11
;
10368 enum tree_code code01
, code11
;
10370 tree01
= TREE_OPERAND (arg0
, 1);
10371 tree11
= TREE_OPERAND (arg1
, 1);
10372 STRIP_NOPS (tree01
);
10373 STRIP_NOPS (tree11
);
10374 code01
= TREE_CODE (tree01
);
10375 code11
= TREE_CODE (tree11
);
10376 if (code01
== INTEGER_CST
10377 && code11
== INTEGER_CST
10378 && TREE_INT_CST_HIGH (tree01
) == 0
10379 && TREE_INT_CST_HIGH (tree11
) == 0
10380 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10381 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10383 tem
= build2_loc (loc
, LROTATE_EXPR
,
10384 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10385 TREE_OPERAND (arg0
, 0),
10386 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10387 return fold_convert_loc (loc
, type
, tem
);
10389 else if (code11
== MINUS_EXPR
)
10391 tree tree110
, tree111
;
10392 tree110
= TREE_OPERAND (tree11
, 0);
10393 tree111
= TREE_OPERAND (tree11
, 1);
10394 STRIP_NOPS (tree110
);
10395 STRIP_NOPS (tree111
);
10396 if (TREE_CODE (tree110
) == INTEGER_CST
10397 && 0 == compare_tree_int (tree110
,
10399 (TREE_TYPE (TREE_OPERAND
10401 && operand_equal_p (tree01
, tree111
, 0))
10403 fold_convert_loc (loc
, type
,
10404 build2 ((code0
== LSHIFT_EXPR
10407 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10408 TREE_OPERAND (arg0
, 0), tree01
));
10410 else if (code01
== MINUS_EXPR
)
10412 tree tree010
, tree011
;
10413 tree010
= TREE_OPERAND (tree01
, 0);
10414 tree011
= TREE_OPERAND (tree01
, 1);
10415 STRIP_NOPS (tree010
);
10416 STRIP_NOPS (tree011
);
10417 if (TREE_CODE (tree010
) == INTEGER_CST
10418 && 0 == compare_tree_int (tree010
,
10420 (TREE_TYPE (TREE_OPERAND
10422 && operand_equal_p (tree11
, tree011
, 0))
10423 return fold_convert_loc
10425 build2 ((code0
!= LSHIFT_EXPR
10428 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10429 TREE_OPERAND (arg0
, 0), tree11
));
10435 /* In most languages, can't associate operations on floats through
10436 parentheses. Rather than remember where the parentheses were, we
10437 don't associate floats at all, unless the user has specified
10438 -fassociative-math.
10439 And, we need to make sure type is not saturating. */
10441 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10442 && !TYPE_SATURATING (type
))
10444 tree var0
, con0
, lit0
, minus_lit0
;
10445 tree var1
, con1
, lit1
, minus_lit1
;
10449 /* Split both trees into variables, constants, and literals. Then
10450 associate each group together, the constants with literals,
10451 then the result with variables. This increases the chances of
10452 literals being recombined later and of generating relocatable
10453 expressions for the sum of a constant and literal. */
10454 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10455 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10456 code
== MINUS_EXPR
);
10458 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10459 if (code
== MINUS_EXPR
)
10462 /* With undefined overflow prefer doing association in a type
10463 which wraps on overflow, if that is one of the operand types. */
10464 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10465 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10467 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10468 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10469 atype
= TREE_TYPE (arg0
);
10470 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10471 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10472 atype
= TREE_TYPE (arg1
);
10473 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10476 /* With undefined overflow we can only associate constants with one
10477 variable, and constants whose association doesn't overflow. */
10478 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10479 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10486 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10487 tmp0
= TREE_OPERAND (tmp0
, 0);
10488 if (CONVERT_EXPR_P (tmp0
)
10489 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10490 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10491 <= TYPE_PRECISION (atype
)))
10492 tmp0
= TREE_OPERAND (tmp0
, 0);
10493 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10494 tmp1
= TREE_OPERAND (tmp1
, 0);
10495 if (CONVERT_EXPR_P (tmp1
)
10496 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10497 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10498 <= TYPE_PRECISION (atype
)))
10499 tmp1
= TREE_OPERAND (tmp1
, 0);
10500 /* The only case we can still associate with two variables
10501 is if they are the same, modulo negation and bit-pattern
10502 preserving conversions. */
10503 if (!operand_equal_p (tmp0
, tmp1
, 0))
10508 /* Only do something if we found more than two objects. Otherwise,
10509 nothing has changed and we risk infinite recursion. */
10511 && (2 < ((var0
!= 0) + (var1
!= 0)
10512 + (con0
!= 0) + (con1
!= 0)
10513 + (lit0
!= 0) + (lit1
!= 0)
10514 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10516 bool any_overflows
= false;
10517 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10518 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10519 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10520 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10521 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10522 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10523 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10524 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10527 /* Preserve the MINUS_EXPR if the negative part of the literal is
10528 greater than the positive part. Otherwise, the multiplicative
10529 folding code (i.e extract_muldiv) may be fooled in case
10530 unsigned constants are subtracted, like in the following
10531 example: ((X*2 + 4) - 8U)/2. */
10532 if (minus_lit0
&& lit0
)
10534 if (TREE_CODE (lit0
) == INTEGER_CST
10535 && TREE_CODE (minus_lit0
) == INTEGER_CST
10536 && tree_int_cst_lt (lit0
, minus_lit0
))
10538 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10539 MINUS_EXPR
, atype
);
10544 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10545 MINUS_EXPR
, atype
);
10550 /* Don't introduce overflows through reassociation. */
10552 && ((lit0
&& TREE_OVERFLOW (lit0
))
10553 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10560 fold_convert_loc (loc
, type
,
10561 associate_trees (loc
, var0
, minus_lit0
,
10562 MINUS_EXPR
, atype
));
10565 con0
= associate_trees (loc
, con0
, minus_lit0
,
10566 MINUS_EXPR
, atype
);
10568 fold_convert_loc (loc
, type
,
10569 associate_trees (loc
, var0
, con0
,
10570 PLUS_EXPR
, atype
));
10574 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10576 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10584 /* Pointer simplifications for subtraction, simple reassociations. */
10585 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10587 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10588 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10589 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10591 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10592 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10593 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10594 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10595 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10596 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10598 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10601 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10602 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10604 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10605 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10606 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10607 fold_convert_loc (loc
, type
, arg1
));
10609 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10612 /* A - (-B) -> A + B */
10613 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10614 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10615 fold_convert_loc (loc
, type
,
10616 TREE_OPERAND (arg1
, 0)));
10617 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10618 if (TREE_CODE (arg0
) == NEGATE_EXPR
10619 && (FLOAT_TYPE_P (type
)
10620 || INTEGRAL_TYPE_P (type
))
10621 && negate_expr_p (arg1
)
10622 && reorder_operands_p (arg0
, arg1
))
10623 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10624 fold_convert_loc (loc
, type
,
10625 negate_expr (arg1
)),
10626 fold_convert_loc (loc
, type
,
10627 TREE_OPERAND (arg0
, 0)));
10628 /* Convert -A - 1 to ~A. */
10629 if (INTEGRAL_TYPE_P (type
)
10630 && TREE_CODE (arg0
) == NEGATE_EXPR
10631 && integer_onep (arg1
)
10632 && !TYPE_OVERFLOW_TRAPS (type
))
10633 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10634 fold_convert_loc (loc
, type
,
10635 TREE_OPERAND (arg0
, 0)));
10637 /* Convert -1 - A to ~A. */
10638 if (INTEGRAL_TYPE_P (type
)
10639 && integer_all_onesp (arg0
))
10640 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10643 /* X - (X / CST) * CST is X % CST. */
10644 if (INTEGRAL_TYPE_P (type
)
10645 && TREE_CODE (arg1
) == MULT_EXPR
10646 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10647 && operand_equal_p (arg0
,
10648 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10649 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10650 TREE_OPERAND (arg1
, 1), 0))
10652 fold_convert_loc (loc
, type
,
10653 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10654 arg0
, TREE_OPERAND (arg1
, 1)));
10656 if (! FLOAT_TYPE_P (type
))
10658 if (integer_zerop (arg0
))
10659 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10660 if (integer_zerop (arg1
))
10661 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10663 /* Fold A - (A & B) into ~B & A. */
10664 if (!TREE_SIDE_EFFECTS (arg0
)
10665 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10667 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10669 tree arg10
= fold_convert_loc (loc
, type
,
10670 TREE_OPERAND (arg1
, 0));
10671 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10672 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10674 fold_convert_loc (loc
, type
, arg0
));
10676 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10678 tree arg11
= fold_convert_loc (loc
,
10679 type
, TREE_OPERAND (arg1
, 1));
10680 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10681 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10683 fold_convert_loc (loc
, type
, arg0
));
10687 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10688 any power of 2 minus 1. */
10689 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10690 && TREE_CODE (arg1
) == BIT_AND_EXPR
10691 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10692 TREE_OPERAND (arg1
, 0), 0))
10694 tree mask0
= TREE_OPERAND (arg0
, 1);
10695 tree mask1
= TREE_OPERAND (arg1
, 1);
10696 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10698 if (operand_equal_p (tem
, mask1
, 0))
10700 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10701 TREE_OPERAND (arg0
, 0), mask1
);
10702 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10707 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10708 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10709 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10711 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10712 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10713 (-ARG1 + ARG0) reduces to -ARG1. */
10714 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10715 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10717 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10718 __complex__ ( x, -y ). This is not the same for SNaNs or if
10719 signed zeros are involved. */
10720 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10721 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10722 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10724 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10725 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10726 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10727 bool arg0rz
= false, arg0iz
= false;
10728 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10729 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10731 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10732 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10733 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10735 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10737 : build1 (REALPART_EXPR
, rtype
, arg1
));
10738 tree ip
= arg0i
? arg0i
10739 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10740 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10742 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10744 tree rp
= arg0r
? arg0r
10745 : build1 (REALPART_EXPR
, rtype
, arg0
);
10746 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10748 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10749 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10754 /* Fold &x - &x. This can happen from &x.foo - &x.
10755 This is unsafe for certain floats even in non-IEEE formats.
10756 In IEEE, it is unsafe because it does wrong for NaNs.
10757 Also note that operand_equal_p is always false if an operand
10760 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10761 && operand_equal_p (arg0
, arg1
, 0))
10762 return build_zero_cst (type
);
10764 /* A - B -> A + (-B) if B is easily negatable. */
10765 if (negate_expr_p (arg1
)
10766 && ((FLOAT_TYPE_P (type
)
10767 /* Avoid this transformation if B is a positive REAL_CST. */
10768 && (TREE_CODE (arg1
) != REAL_CST
10769 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10770 || INTEGRAL_TYPE_P (type
)))
10771 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10772 fold_convert_loc (loc
, type
, arg0
),
10773 fold_convert_loc (loc
, type
,
10774 negate_expr (arg1
)));
10776 /* Try folding difference of addresses. */
10778 HOST_WIDE_INT diff
;
10780 if ((TREE_CODE (arg0
) == ADDR_EXPR
10781 || TREE_CODE (arg1
) == ADDR_EXPR
)
10782 && ptr_difference_const (arg0
, arg1
, &diff
))
10783 return build_int_cst_type (type
, diff
);
10786 /* Fold &a[i] - &a[j] to i-j. */
10787 if (TREE_CODE (arg0
) == ADDR_EXPR
10788 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10789 && TREE_CODE (arg1
) == ADDR_EXPR
10790 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10792 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10793 TREE_OPERAND (arg0
, 0),
10794 TREE_OPERAND (arg1
, 0));
10799 if (FLOAT_TYPE_P (type
)
10800 && flag_unsafe_math_optimizations
10801 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10802 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10803 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10806 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10807 one. Make sure the type is not saturating and has the signedness of
10808 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10809 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10810 if ((TREE_CODE (arg0
) == MULT_EXPR
10811 || TREE_CODE (arg1
) == MULT_EXPR
)
10812 && !TYPE_SATURATING (type
)
10813 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10814 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10815 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10817 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10825 /* (-A) * (-B) -> A * B */
10826 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10827 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10828 fold_convert_loc (loc
, type
,
10829 TREE_OPERAND (arg0
, 0)),
10830 fold_convert_loc (loc
, type
,
10831 negate_expr (arg1
)));
10832 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10833 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10834 fold_convert_loc (loc
, type
,
10835 negate_expr (arg0
)),
10836 fold_convert_loc (loc
, type
,
10837 TREE_OPERAND (arg1
, 0)));
10839 if (! FLOAT_TYPE_P (type
))
10841 if (integer_zerop (arg1
))
10842 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10843 if (integer_onep (arg1
))
10844 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10845 /* Transform x * -1 into -x. Make sure to do the negation
10846 on the original operand with conversions not stripped
10847 because we can only strip non-sign-changing conversions. */
10848 if (integer_all_onesp (arg1
))
10849 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10850 /* Transform x * -C into -x * C if x is easily negatable. */
10851 if (TREE_CODE (arg1
) == INTEGER_CST
10852 && tree_int_cst_sgn (arg1
) == -1
10853 && negate_expr_p (arg0
)
10854 && (tem
= negate_expr (arg1
)) != arg1
10855 && !TREE_OVERFLOW (tem
))
10856 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10857 fold_convert_loc (loc
, type
,
10858 negate_expr (arg0
)),
10861 /* (a * (1 << b)) is (a << b) */
10862 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10863 && integer_onep (TREE_OPERAND (arg1
, 0)))
10864 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10865 TREE_OPERAND (arg1
, 1));
10866 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10867 && integer_onep (TREE_OPERAND (arg0
, 0)))
10868 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10869 TREE_OPERAND (arg0
, 1));
10871 /* (A + A) * C -> A * 2 * C */
10872 if (TREE_CODE (arg0
) == PLUS_EXPR
10873 && TREE_CODE (arg1
) == INTEGER_CST
10874 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10875 TREE_OPERAND (arg0
, 1), 0))
10876 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10877 omit_one_operand_loc (loc
, type
,
10878 TREE_OPERAND (arg0
, 0),
10879 TREE_OPERAND (arg0
, 1)),
10880 fold_build2_loc (loc
, MULT_EXPR
, type
,
10881 build_int_cst (type
, 2) , arg1
));
10883 strict_overflow_p
= false;
10884 if (TREE_CODE (arg1
) == INTEGER_CST
10885 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10886 &strict_overflow_p
)))
10888 if (strict_overflow_p
)
10889 fold_overflow_warning (("assuming signed overflow does not "
10890 "occur when simplifying "
10892 WARN_STRICT_OVERFLOW_MISC
);
10893 return fold_convert_loc (loc
, type
, tem
);
10896 /* Optimize z * conj(z) for integer complex numbers. */
10897 if (TREE_CODE (arg0
) == CONJ_EXPR
10898 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10899 return fold_mult_zconjz (loc
, type
, arg1
);
10900 if (TREE_CODE (arg1
) == CONJ_EXPR
10901 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10902 return fold_mult_zconjz (loc
, type
, arg0
);
10906 /* Maybe fold x * 0 to 0. The expressions aren't the same
10907 when x is NaN, since x * 0 is also NaN. Nor are they the
10908 same in modes with signed zeros, since multiplying a
10909 negative value by 0 gives -0, not +0. */
10910 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10911 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10912 && real_zerop (arg1
))
10913 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10914 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10915 Likewise for complex arithmetic with signed zeros. */
10916 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10917 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10918 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10919 && real_onep (arg1
))
10920 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10922 /* Transform x * -1.0 into -x. */
10923 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10924 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10925 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10926 && real_minus_onep (arg1
))
10927 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10929 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10930 the result for floating point types due to rounding so it is applied
10931 only if -fassociative-math was specify. */
10932 if (flag_associative_math
10933 && TREE_CODE (arg0
) == RDIV_EXPR
10934 && TREE_CODE (arg1
) == REAL_CST
10935 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10937 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10940 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10941 TREE_OPERAND (arg0
, 1));
10944 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10945 if (operand_equal_p (arg0
, arg1
, 0))
10947 tree tem
= fold_strip_sign_ops (arg0
);
10948 if (tem
!= NULL_TREE
)
10950 tem
= fold_convert_loc (loc
, type
, tem
);
10951 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10955 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10956 This is not the same for NaNs or if signed zeros are
10958 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10959 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10960 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10961 && TREE_CODE (arg1
) == COMPLEX_CST
10962 && real_zerop (TREE_REALPART (arg1
)))
10964 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10965 if (real_onep (TREE_IMAGPART (arg1
)))
10967 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10968 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10970 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10971 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10973 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10974 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10975 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10979 /* Optimize z * conj(z) for floating point complex numbers.
10980 Guarded by flag_unsafe_math_optimizations as non-finite
10981 imaginary components don't produce scalar results. */
10982 if (flag_unsafe_math_optimizations
10983 && TREE_CODE (arg0
) == CONJ_EXPR
10984 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10985 return fold_mult_zconjz (loc
, type
, arg1
);
10986 if (flag_unsafe_math_optimizations
10987 && TREE_CODE (arg1
) == CONJ_EXPR
10988 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10989 return fold_mult_zconjz (loc
, type
, arg0
);
10991 if (flag_unsafe_math_optimizations
)
10993 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10994 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10996 /* Optimizations of root(...)*root(...). */
10997 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11000 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11001 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11003 /* Optimize sqrt(x)*sqrt(x) as x. */
11004 if (BUILTIN_SQRT_P (fcode0
)
11005 && operand_equal_p (arg00
, arg10
, 0)
11006 && ! HONOR_SNANS (TYPE_MODE (type
)))
11009 /* Optimize root(x)*root(y) as root(x*y). */
11010 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11011 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11012 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11015 /* Optimize expN(x)*expN(y) as expN(x+y). */
11016 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11018 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11019 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11020 CALL_EXPR_ARG (arg0
, 0),
11021 CALL_EXPR_ARG (arg1
, 0));
11022 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11025 /* Optimizations of pow(...)*pow(...). */
11026 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11027 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11028 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11030 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11031 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11032 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11033 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11035 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11036 if (operand_equal_p (arg01
, arg11
, 0))
11038 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11039 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11041 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11044 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11045 if (operand_equal_p (arg00
, arg10
, 0))
11047 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11048 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11050 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11054 /* Optimize tan(x)*cos(x) as sin(x). */
11055 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11056 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11057 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11058 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11059 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11060 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11061 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11062 CALL_EXPR_ARG (arg1
, 0), 0))
11064 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11066 if (sinfn
!= NULL_TREE
)
11067 return build_call_expr_loc (loc
, sinfn
, 1,
11068 CALL_EXPR_ARG (arg0
, 0));
11071 /* Optimize x*pow(x,c) as pow(x,c+1). */
11072 if (fcode1
== BUILT_IN_POW
11073 || fcode1
== BUILT_IN_POWF
11074 || fcode1
== BUILT_IN_POWL
)
11076 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11077 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11078 if (TREE_CODE (arg11
) == REAL_CST
11079 && !TREE_OVERFLOW (arg11
)
11080 && operand_equal_p (arg0
, arg10
, 0))
11082 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11086 c
= TREE_REAL_CST (arg11
);
11087 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11088 arg
= build_real (type
, c
);
11089 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11093 /* Optimize pow(x,c)*x as pow(x,c+1). */
11094 if (fcode0
== BUILT_IN_POW
11095 || fcode0
== BUILT_IN_POWF
11096 || fcode0
== BUILT_IN_POWL
)
11098 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11099 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11100 if (TREE_CODE (arg01
) == REAL_CST
11101 && !TREE_OVERFLOW (arg01
)
11102 && operand_equal_p (arg1
, arg00
, 0))
11104 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11108 c
= TREE_REAL_CST (arg01
);
11109 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11110 arg
= build_real (type
, c
);
11111 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11115 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11116 if (!in_gimple_form
11118 && operand_equal_p (arg0
, arg1
, 0))
11120 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11124 tree arg
= build_real (type
, dconst2
);
11125 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11134 if (integer_all_onesp (arg1
))
11135 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11136 if (integer_zerop (arg1
))
11137 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11138 if (operand_equal_p (arg0
, arg1
, 0))
11139 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11141 /* ~X | X is -1. */
11142 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11143 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11145 t1
= build_zero_cst (type
);
11146 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11147 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11150 /* X | ~X is -1. */
11151 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11152 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11154 t1
= build_zero_cst (type
);
11155 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11156 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11159 /* Canonicalize (X & C1) | C2. */
11160 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11161 && TREE_CODE (arg1
) == INTEGER_CST
11162 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11164 double_int c1
, c2
, c3
, msk
;
11165 int width
= TYPE_PRECISION (type
), w
;
11166 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11167 c2
= tree_to_double_int (arg1
);
11169 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11170 if ((c1
& c2
) == c1
)
11171 return omit_one_operand_loc (loc
, type
, arg1
,
11172 TREE_OPERAND (arg0
, 0));
11174 msk
= double_int::mask (width
);
11176 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11177 if (msk
.and_not (c1
| c2
).is_zero ())
11178 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11179 TREE_OPERAND (arg0
, 0), arg1
);
11181 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11182 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11183 mode which allows further optimizations. */
11186 c3
= c1
.and_not (c2
);
11187 for (w
= BITS_PER_UNIT
;
11188 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11191 unsigned HOST_WIDE_INT mask
11192 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11193 if (((c1
.low
| c2
.low
) & mask
) == mask
11194 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11196 c3
= double_int::from_uhwi (mask
);
11201 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11202 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11203 TREE_OPERAND (arg0
, 0),
11204 double_int_to_tree (type
,
11209 /* (X & Y) | Y is (X, Y). */
11210 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11211 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11212 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11213 /* (X & Y) | X is (Y, X). */
11214 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11215 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11216 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11217 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11218 /* X | (X & Y) is (Y, X). */
11219 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11220 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11221 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11222 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11223 /* X | (Y & X) is (Y, X). */
11224 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11225 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11226 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11227 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11229 /* (X & ~Y) | (~X & Y) is X ^ Y */
11230 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11231 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11233 tree a0
, a1
, l0
, l1
, n0
, n1
;
11235 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11236 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11238 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11239 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11241 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11242 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11244 if ((operand_equal_p (n0
, a0
, 0)
11245 && operand_equal_p (n1
, a1
, 0))
11246 || (operand_equal_p (n0
, a1
, 0)
11247 && operand_equal_p (n1
, a0
, 0)))
11248 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11251 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11252 if (t1
!= NULL_TREE
)
11255 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11257 This results in more efficient code for machines without a NAND
11258 instruction. Combine will canonicalize to the first form
11259 which will allow use of NAND instructions provided by the
11260 backend if they exist. */
11261 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11262 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11265 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11266 build2 (BIT_AND_EXPR
, type
,
11267 fold_convert_loc (loc
, type
,
11268 TREE_OPERAND (arg0
, 0)),
11269 fold_convert_loc (loc
, type
,
11270 TREE_OPERAND (arg1
, 0))));
11273 /* See if this can be simplified into a rotate first. If that
11274 is unsuccessful continue in the association code. */
11278 if (integer_zerop (arg1
))
11279 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11280 if (integer_all_onesp (arg1
))
11281 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11282 if (operand_equal_p (arg0
, arg1
, 0))
11283 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11285 /* ~X ^ X is -1. */
11286 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11287 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11289 t1
= build_zero_cst (type
);
11290 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11291 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11294 /* X ^ ~X is -1. */
11295 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11296 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11298 t1
= build_zero_cst (type
);
11299 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11300 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11303 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11304 with a constant, and the two constants have no bits in common,
11305 we should treat this as a BIT_IOR_EXPR since this may produce more
11306 simplifications. */
11307 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11308 && TREE_CODE (arg1
) == BIT_AND_EXPR
11309 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11310 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11311 && integer_zerop (const_binop (BIT_AND_EXPR
,
11312 TREE_OPERAND (arg0
, 1),
11313 TREE_OPERAND (arg1
, 1))))
11315 code
= BIT_IOR_EXPR
;
11319 /* (X | Y) ^ X -> Y & ~ X*/
11320 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11321 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11323 tree t2
= TREE_OPERAND (arg0
, 1);
11324 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11326 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11327 fold_convert_loc (loc
, type
, t2
),
11328 fold_convert_loc (loc
, type
, t1
));
11332 /* (Y | X) ^ X -> Y & ~ X*/
11333 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11334 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11336 tree t2
= TREE_OPERAND (arg0
, 0);
11337 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11339 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11340 fold_convert_loc (loc
, type
, t2
),
11341 fold_convert_loc (loc
, type
, t1
));
11345 /* X ^ (X | Y) -> Y & ~ X*/
11346 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11347 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11349 tree t2
= TREE_OPERAND (arg1
, 1);
11350 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11352 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11353 fold_convert_loc (loc
, type
, t2
),
11354 fold_convert_loc (loc
, type
, t1
));
11358 /* X ^ (Y | X) -> Y & ~ X*/
11359 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11360 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11362 tree t2
= TREE_OPERAND (arg1
, 0);
11363 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11365 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11366 fold_convert_loc (loc
, type
, t2
),
11367 fold_convert_loc (loc
, type
, t1
));
11371 /* Convert ~X ^ ~Y to X ^ Y. */
11372 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11373 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11374 return fold_build2_loc (loc
, code
, type
,
11375 fold_convert_loc (loc
, type
,
11376 TREE_OPERAND (arg0
, 0)),
11377 fold_convert_loc (loc
, type
,
11378 TREE_OPERAND (arg1
, 0)));
11380 /* Convert ~X ^ C to X ^ ~C. */
11381 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11382 && TREE_CODE (arg1
) == INTEGER_CST
)
11383 return fold_build2_loc (loc
, code
, type
,
11384 fold_convert_loc (loc
, type
,
11385 TREE_OPERAND (arg0
, 0)),
11386 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11388 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11389 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11390 && integer_onep (TREE_OPERAND (arg0
, 1))
11391 && integer_onep (arg1
))
11392 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11393 build_zero_cst (TREE_TYPE (arg0
)));
11395 /* Fold (X & Y) ^ Y as ~X & Y. */
11396 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11397 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11399 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11400 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11401 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11402 fold_convert_loc (loc
, type
, arg1
));
11404 /* Fold (X & Y) ^ X as ~Y & X. */
11405 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11406 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11407 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11409 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11410 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11411 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11412 fold_convert_loc (loc
, type
, arg1
));
11414 /* Fold X ^ (X & Y) as X & ~Y. */
11415 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11416 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11418 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11419 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11420 fold_convert_loc (loc
, type
, arg0
),
11421 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11423 /* Fold X ^ (Y & X) as ~Y & X. */
11424 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11425 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11426 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11428 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11429 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11430 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11431 fold_convert_loc (loc
, type
, arg0
));
11434 /* See if this can be simplified into a rotate first. If that
11435 is unsuccessful continue in the association code. */
11439 if (integer_all_onesp (arg1
))
11440 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11441 if (integer_zerop (arg1
))
11442 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11443 if (operand_equal_p (arg0
, arg1
, 0))
11444 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11446 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11447 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11448 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11449 || (TREE_CODE (arg0
) == EQ_EXPR
11450 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11451 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11452 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11454 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11455 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11456 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11457 || (TREE_CODE (arg1
) == EQ_EXPR
11458 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11459 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11460 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11462 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11463 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11464 && TREE_CODE (arg1
) == INTEGER_CST
11465 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11467 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11468 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11469 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11470 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11471 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11473 fold_convert_loc (loc
, type
,
11474 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11475 type
, tmp2
, tmp3
));
11478 /* (X | Y) & Y is (X, Y). */
11479 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11481 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11482 /* (X | Y) & X is (Y, X). */
11483 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11484 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11485 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11486 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11487 /* X & (X | Y) is (Y, X). */
11488 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11489 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11490 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11491 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11492 /* X & (Y | X) is (Y, X). */
11493 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11494 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11495 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11496 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11498 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11499 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11500 && integer_onep (TREE_OPERAND (arg0
, 1))
11501 && integer_onep (arg1
))
11504 tem
= TREE_OPERAND (arg0
, 0);
11505 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11506 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11508 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11509 build_zero_cst (TREE_TYPE (tem
)));
11511 /* Fold ~X & 1 as (X & 1) == 0. */
11512 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11513 && integer_onep (arg1
))
11516 tem
= TREE_OPERAND (arg0
, 0);
11517 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11518 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11520 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11521 build_zero_cst (TREE_TYPE (tem
)));
11523 /* Fold !X & 1 as X == 0. */
11524 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11525 && integer_onep (arg1
))
11527 tem
= TREE_OPERAND (arg0
, 0);
11528 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11529 build_zero_cst (TREE_TYPE (tem
)));
11532 /* Fold (X ^ Y) & Y as ~X & Y. */
11533 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11534 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11536 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11537 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11538 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11539 fold_convert_loc (loc
, type
, arg1
));
11541 /* Fold (X ^ Y) & X as ~Y & X. */
11542 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11543 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11544 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11546 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11547 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11548 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11549 fold_convert_loc (loc
, type
, arg1
));
11551 /* Fold X & (X ^ Y) as X & ~Y. */
11552 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11553 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11555 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11556 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11557 fold_convert_loc (loc
, type
, arg0
),
11558 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11560 /* Fold X & (Y ^ X) as ~Y & X. */
11561 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11562 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11563 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11565 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11566 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11567 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11568 fold_convert_loc (loc
, type
, arg0
));
11571 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11572 multiple of 1 << CST. */
11573 if (TREE_CODE (arg1
) == INTEGER_CST
)
11575 double_int cst1
= tree_to_double_int (arg1
);
11576 double_int ncst1
= (-cst1
).ext(TYPE_PRECISION (TREE_TYPE (arg1
)),
11577 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11578 if ((cst1
& ncst1
) == ncst1
11579 && multiple_of_p (type
, arg0
,
11580 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11581 return fold_convert_loc (loc
, type
, arg0
);
11584 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11586 if (TREE_CODE (arg1
) == INTEGER_CST
11587 && TREE_CODE (arg0
) == MULT_EXPR
11588 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11591 = tree_to_double_int (TREE_OPERAND (arg0
, 1)).trailing_zeros ();
11594 double_int arg1mask
, masked
;
11595 arg1mask
= ~double_int::mask (arg1tz
);
11596 arg1mask
= arg1mask
.ext (TYPE_PRECISION (type
),
11597 TYPE_UNSIGNED (type
));
11598 masked
= arg1mask
& tree_to_double_int (arg1
);
11599 if (masked
.is_zero ())
11600 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11602 else if (masked
!= tree_to_double_int (arg1
))
11603 return fold_build2_loc (loc
, code
, type
, op0
,
11604 double_int_to_tree (type
, masked
));
11608 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11609 ((A & N) + B) & M -> (A + B) & M
11610 Similarly if (N & M) == 0,
11611 ((A | N) + B) & M -> (A + B) & M
11612 and for - instead of + (or unary - instead of +)
11613 and/or ^ instead of |.
11614 If B is constant and (B & M) == 0, fold into A & M. */
11615 if (host_integerp (arg1
, 1))
11617 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11618 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11619 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11620 && (TREE_CODE (arg0
) == PLUS_EXPR
11621 || TREE_CODE (arg0
) == MINUS_EXPR
11622 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11623 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11624 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11628 unsigned HOST_WIDE_INT cst0
;
11630 /* Now we know that arg0 is (C + D) or (C - D) or
11631 -C and arg1 (M) is == (1LL << cst) - 1.
11632 Store C into PMOP[0] and D into PMOP[1]. */
11633 pmop
[0] = TREE_OPERAND (arg0
, 0);
11635 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11637 pmop
[1] = TREE_OPERAND (arg0
, 1);
11641 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11642 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11646 for (; which
>= 0; which
--)
11647 switch (TREE_CODE (pmop
[which
]))
11652 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11655 /* tree_low_cst not used, because we don't care about
11657 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11659 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11664 else if (cst0
!= 0)
11666 /* If C or D is of the form (A & N) where
11667 (N & M) == M, or of the form (A | N) or
11668 (A ^ N) where (N & M) == 0, replace it with A. */
11669 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11672 /* If C or D is a N where (N & M) == 0, it can be
11673 omitted (assumed 0). */
11674 if ((TREE_CODE (arg0
) == PLUS_EXPR
11675 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11676 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11677 pmop
[which
] = NULL
;
11683 /* Only build anything new if we optimized one or both arguments
11685 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11686 || (TREE_CODE (arg0
) != NEGATE_EXPR
11687 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11689 tree utype
= TREE_TYPE (arg0
);
11690 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11692 /* Perform the operations in a type that has defined
11693 overflow behavior. */
11694 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11695 if (pmop
[0] != NULL
)
11696 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11697 if (pmop
[1] != NULL
)
11698 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11701 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11702 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11703 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11705 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11706 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11708 else if (pmop
[0] != NULL
)
11710 else if (pmop
[1] != NULL
)
11713 return build_int_cst (type
, 0);
11715 else if (pmop
[0] == NULL
)
11716 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11718 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11720 /* TEM is now the new binary +, - or unary - replacement. */
11721 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11722 fold_convert_loc (loc
, utype
, arg1
));
11723 return fold_convert_loc (loc
, type
, tem
);
11728 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11729 if (t1
!= NULL_TREE
)
11731 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11732 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11733 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11736 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11738 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11739 && (~TREE_INT_CST_LOW (arg1
)
11740 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11742 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11745 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11747 This results in more efficient code for machines without a NOR
11748 instruction. Combine will canonicalize to the first form
11749 which will allow use of NOR instructions provided by the
11750 backend if they exist. */
11751 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11752 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11754 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11755 build2 (BIT_IOR_EXPR
, type
,
11756 fold_convert_loc (loc
, type
,
11757 TREE_OPERAND (arg0
, 0)),
11758 fold_convert_loc (loc
, type
,
11759 TREE_OPERAND (arg1
, 0))));
11762 /* If arg0 is derived from the address of an object or function, we may
11763 be able to fold this expression using the object or function's
11765 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11767 unsigned HOST_WIDE_INT modulus
, residue
;
11768 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11770 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11771 integer_onep (arg1
));
11773 /* This works because modulus is a power of 2. If this weren't the
11774 case, we'd have to replace it by its greatest power-of-2
11775 divisor: modulus & -modulus. */
11777 return build_int_cst (type
, residue
& low
);
11780 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11781 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11782 if the new mask might be further optimized. */
11783 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11784 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11785 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11786 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11787 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11788 < TYPE_PRECISION (TREE_TYPE (arg0
))
11789 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11790 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11792 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11793 unsigned HOST_WIDE_INT mask
11794 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11795 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11796 tree shift_type
= TREE_TYPE (arg0
);
11798 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11799 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11800 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11801 && TYPE_PRECISION (TREE_TYPE (arg0
))
11802 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11804 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11805 tree arg00
= TREE_OPERAND (arg0
, 0);
11806 /* See if more bits can be proven as zero because of
11808 if (TREE_CODE (arg00
) == NOP_EXPR
11809 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11811 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11812 if (TYPE_PRECISION (inner_type
)
11813 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11814 && TYPE_PRECISION (inner_type
) < prec
)
11816 prec
= TYPE_PRECISION (inner_type
);
11817 /* See if we can shorten the right shift. */
11819 shift_type
= inner_type
;
11822 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11823 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11824 zerobits
<<= prec
- shiftc
;
11825 /* For arithmetic shift if sign bit could be set, zerobits
11826 can contain actually sign bits, so no transformation is
11827 possible, unless MASK masks them all away. In that
11828 case the shift needs to be converted into logical shift. */
11829 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11830 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11832 if ((mask
& zerobits
) == 0)
11833 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11839 /* ((X << 16) & 0xff00) is (X, 0). */
11840 if ((mask
& zerobits
) == mask
)
11841 return omit_one_operand_loc (loc
, type
,
11842 build_int_cst (type
, 0), arg0
);
11844 newmask
= mask
| zerobits
;
11845 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11849 /* Only do the transformation if NEWMASK is some integer
11851 for (prec
= BITS_PER_UNIT
;
11852 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11853 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11855 if (prec
< HOST_BITS_PER_WIDE_INT
11856 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11860 if (shift_type
!= TREE_TYPE (arg0
))
11862 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11863 fold_convert_loc (loc
, shift_type
,
11864 TREE_OPERAND (arg0
, 0)),
11865 TREE_OPERAND (arg0
, 1));
11866 tem
= fold_convert_loc (loc
, type
, tem
);
11870 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11871 if (!tree_int_cst_equal (newmaskt
, arg1
))
11872 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11880 /* Don't touch a floating-point divide by zero unless the mode
11881 of the constant can represent infinity. */
11882 if (TREE_CODE (arg1
) == REAL_CST
11883 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11884 && real_zerop (arg1
))
11887 /* Optimize A / A to 1.0 if we don't care about
11888 NaNs or Infinities. Skip the transformation
11889 for non-real operands. */
11890 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11891 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11892 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11893 && operand_equal_p (arg0
, arg1
, 0))
11895 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11897 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11900 /* The complex version of the above A / A optimization. */
11901 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11902 && operand_equal_p (arg0
, arg1
, 0))
11904 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11905 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11906 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11908 tree r
= build_real (elem_type
, dconst1
);
11909 /* omit_two_operands will call fold_convert for us. */
11910 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11914 /* (-A) / (-B) -> A / B */
11915 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11916 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11917 TREE_OPERAND (arg0
, 0),
11918 negate_expr (arg1
));
11919 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11920 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11921 negate_expr (arg0
),
11922 TREE_OPERAND (arg1
, 0));
11924 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11925 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11926 && real_onep (arg1
))
11927 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11929 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11930 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11931 && real_minus_onep (arg1
))
11932 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11933 negate_expr (arg0
)));
11935 /* If ARG1 is a constant, we can convert this to a multiply by the
11936 reciprocal. This does not have the same rounding properties,
11937 so only do this if -freciprocal-math. We can actually
11938 always safely do it if ARG1 is a power of two, but it's hard to
11939 tell if it is or not in a portable manner. */
11941 && (TREE_CODE (arg1
) == REAL_CST
11942 || (TREE_CODE (arg1
) == COMPLEX_CST
11943 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11944 || (TREE_CODE (arg1
) == VECTOR_CST
11945 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11947 if (flag_reciprocal_math
11948 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11949 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11950 /* Find the reciprocal if optimizing and the result is exact.
11951 TODO: Complex reciprocal not implemented. */
11952 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11954 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11957 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11960 /* Convert A/B/C to A/(B*C). */
11961 if (flag_reciprocal_math
11962 && TREE_CODE (arg0
) == RDIV_EXPR
)
11963 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11964 fold_build2_loc (loc
, MULT_EXPR
, type
,
11965 TREE_OPERAND (arg0
, 1), arg1
));
11967 /* Convert A/(B/C) to (A/B)*C. */
11968 if (flag_reciprocal_math
11969 && TREE_CODE (arg1
) == RDIV_EXPR
)
11970 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11971 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11972 TREE_OPERAND (arg1
, 0)),
11973 TREE_OPERAND (arg1
, 1));
11975 /* Convert C1/(X*C2) into (C1/C2)/X. */
11976 if (flag_reciprocal_math
11977 && TREE_CODE (arg1
) == MULT_EXPR
11978 && TREE_CODE (arg0
) == REAL_CST
11979 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11981 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11982 TREE_OPERAND (arg1
, 1));
11984 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11985 TREE_OPERAND (arg1
, 0));
11988 if (flag_unsafe_math_optimizations
)
11990 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11991 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11993 /* Optimize sin(x)/cos(x) as tan(x). */
11994 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11995 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11996 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11997 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11998 CALL_EXPR_ARG (arg1
, 0), 0))
12000 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12002 if (tanfn
!= NULL_TREE
)
12003 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12006 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12007 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12008 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12009 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12010 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12011 CALL_EXPR_ARG (arg1
, 0), 0))
12013 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12015 if (tanfn
!= NULL_TREE
)
12017 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12018 CALL_EXPR_ARG (arg0
, 0));
12019 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12020 build_real (type
, dconst1
), tmp
);
12024 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12025 NaNs or Infinities. */
12026 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12027 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12028 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12030 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12031 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12033 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12034 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12035 && operand_equal_p (arg00
, arg01
, 0))
12037 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12039 if (cosfn
!= NULL_TREE
)
12040 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12044 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12045 NaNs or Infinities. */
12046 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12047 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12048 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12050 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12051 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12053 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12054 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12055 && operand_equal_p (arg00
, arg01
, 0))
12057 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12059 if (cosfn
!= NULL_TREE
)
12061 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12062 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12063 build_real (type
, dconst1
),
12069 /* Optimize pow(x,c)/x as pow(x,c-1). */
12070 if (fcode0
== BUILT_IN_POW
12071 || fcode0
== BUILT_IN_POWF
12072 || fcode0
== BUILT_IN_POWL
)
12074 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12075 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12076 if (TREE_CODE (arg01
) == REAL_CST
12077 && !TREE_OVERFLOW (arg01
)
12078 && operand_equal_p (arg1
, arg00
, 0))
12080 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12084 c
= TREE_REAL_CST (arg01
);
12085 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12086 arg
= build_real (type
, c
);
12087 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12091 /* Optimize a/root(b/c) into a*root(c/b). */
12092 if (BUILTIN_ROOT_P (fcode1
))
12094 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12096 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12098 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12099 tree b
= TREE_OPERAND (rootarg
, 0);
12100 tree c
= TREE_OPERAND (rootarg
, 1);
12102 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12104 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12105 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12109 /* Optimize x/expN(y) into x*expN(-y). */
12110 if (BUILTIN_EXPONENT_P (fcode1
))
12112 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12113 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12114 arg1
= build_call_expr_loc (loc
,
12116 fold_convert_loc (loc
, type
, arg
));
12117 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12120 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12121 if (fcode1
== BUILT_IN_POW
12122 || fcode1
== BUILT_IN_POWF
12123 || fcode1
== BUILT_IN_POWL
)
12125 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12126 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12127 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12128 tree neg11
= fold_convert_loc (loc
, type
,
12129 negate_expr (arg11
));
12130 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12131 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12136 case TRUNC_DIV_EXPR
:
12137 /* Optimize (X & (-A)) / A where A is a power of 2,
12139 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12140 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12141 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12143 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12144 arg1
, TREE_OPERAND (arg0
, 1));
12145 if (sum
&& integer_zerop (sum
)) {
12146 unsigned long pow2
;
12148 if (TREE_INT_CST_LOW (arg1
))
12149 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12151 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12152 + HOST_BITS_PER_WIDE_INT
;
12154 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12155 TREE_OPERAND (arg0
, 0),
12156 build_int_cst (integer_type_node
, pow2
));
12162 case FLOOR_DIV_EXPR
:
12163 /* Simplify A / (B << N) where A and B are positive and B is
12164 a power of 2, to A >> (N + log2(B)). */
12165 strict_overflow_p
= false;
12166 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12167 && (TYPE_UNSIGNED (type
)
12168 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12170 tree sval
= TREE_OPERAND (arg1
, 0);
12171 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12173 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12174 unsigned long pow2
;
12176 if (TREE_INT_CST_LOW (sval
))
12177 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12179 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12180 + HOST_BITS_PER_WIDE_INT
;
12182 if (strict_overflow_p
)
12183 fold_overflow_warning (("assuming signed overflow does not "
12184 "occur when simplifying A / (B << N)"),
12185 WARN_STRICT_OVERFLOW_MISC
);
12187 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12189 build_int_cst (TREE_TYPE (sh_cnt
),
12191 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12192 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12196 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12197 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12198 if (INTEGRAL_TYPE_P (type
)
12199 && TYPE_UNSIGNED (type
)
12200 && code
== FLOOR_DIV_EXPR
)
12201 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12205 case ROUND_DIV_EXPR
:
12206 case CEIL_DIV_EXPR
:
12207 case EXACT_DIV_EXPR
:
12208 if (integer_onep (arg1
))
12209 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12210 if (integer_zerop (arg1
))
12212 /* X / -1 is -X. */
12213 if (!TYPE_UNSIGNED (type
)
12214 && TREE_CODE (arg1
) == INTEGER_CST
12215 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12216 && TREE_INT_CST_HIGH (arg1
) == -1)
12217 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12219 /* Convert -A / -B to A / B when the type is signed and overflow is
12221 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12222 && TREE_CODE (arg0
) == NEGATE_EXPR
12223 && negate_expr_p (arg1
))
12225 if (INTEGRAL_TYPE_P (type
))
12226 fold_overflow_warning (("assuming signed overflow does not occur "
12227 "when distributing negation across "
12229 WARN_STRICT_OVERFLOW_MISC
);
12230 return fold_build2_loc (loc
, code
, type
,
12231 fold_convert_loc (loc
, type
,
12232 TREE_OPERAND (arg0
, 0)),
12233 fold_convert_loc (loc
, type
,
12234 negate_expr (arg1
)));
12236 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12237 && TREE_CODE (arg1
) == NEGATE_EXPR
12238 && negate_expr_p (arg0
))
12240 if (INTEGRAL_TYPE_P (type
))
12241 fold_overflow_warning (("assuming signed overflow does not occur "
12242 "when distributing negation across "
12244 WARN_STRICT_OVERFLOW_MISC
);
12245 return fold_build2_loc (loc
, code
, type
,
12246 fold_convert_loc (loc
, type
,
12247 negate_expr (arg0
)),
12248 fold_convert_loc (loc
, type
,
12249 TREE_OPERAND (arg1
, 0)));
12252 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12253 operation, EXACT_DIV_EXPR.
12255 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12256 At one time others generated faster code, it's not clear if they do
12257 after the last round to changes to the DIV code in expmed.c. */
12258 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12259 && multiple_of_p (type
, arg0
, arg1
))
12260 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12262 strict_overflow_p
= false;
12263 if (TREE_CODE (arg1
) == INTEGER_CST
12264 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12265 &strict_overflow_p
)))
12267 if (strict_overflow_p
)
12268 fold_overflow_warning (("assuming signed overflow does not occur "
12269 "when simplifying division"),
12270 WARN_STRICT_OVERFLOW_MISC
);
12271 return fold_convert_loc (loc
, type
, tem
);
12276 case CEIL_MOD_EXPR
:
12277 case FLOOR_MOD_EXPR
:
12278 case ROUND_MOD_EXPR
:
12279 case TRUNC_MOD_EXPR
:
12280 /* X % 1 is always zero, but be sure to preserve any side
12282 if (integer_onep (arg1
))
12283 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12285 /* X % 0, return X % 0 unchanged so that we can get the
12286 proper warnings and errors. */
12287 if (integer_zerop (arg1
))
12290 /* 0 % X is always zero, but be sure to preserve any side
12291 effects in X. Place this after checking for X == 0. */
12292 if (integer_zerop (arg0
))
12293 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12295 /* X % -1 is zero. */
12296 if (!TYPE_UNSIGNED (type
)
12297 && TREE_CODE (arg1
) == INTEGER_CST
12298 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12299 && TREE_INT_CST_HIGH (arg1
) == -1)
12300 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12302 /* X % -C is the same as X % C. */
12303 if (code
== TRUNC_MOD_EXPR
12304 && !TYPE_UNSIGNED (type
)
12305 && TREE_CODE (arg1
) == INTEGER_CST
12306 && !TREE_OVERFLOW (arg1
)
12307 && TREE_INT_CST_HIGH (arg1
) < 0
12308 && !TYPE_OVERFLOW_TRAPS (type
)
12309 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12310 && !sign_bit_p (arg1
, arg1
))
12311 return fold_build2_loc (loc
, code
, type
,
12312 fold_convert_loc (loc
, type
, arg0
),
12313 fold_convert_loc (loc
, type
,
12314 negate_expr (arg1
)));
12316 /* X % -Y is the same as X % Y. */
12317 if (code
== TRUNC_MOD_EXPR
12318 && !TYPE_UNSIGNED (type
)
12319 && TREE_CODE (arg1
) == NEGATE_EXPR
12320 && !TYPE_OVERFLOW_TRAPS (type
))
12321 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12322 fold_convert_loc (loc
, type
,
12323 TREE_OPERAND (arg1
, 0)));
12325 strict_overflow_p
= false;
12326 if (TREE_CODE (arg1
) == INTEGER_CST
12327 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12328 &strict_overflow_p
)))
12330 if (strict_overflow_p
)
12331 fold_overflow_warning (("assuming signed overflow does not occur "
12332 "when simplifying modulus"),
12333 WARN_STRICT_OVERFLOW_MISC
);
12334 return fold_convert_loc (loc
, type
, tem
);
12337 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12338 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12339 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12340 && (TYPE_UNSIGNED (type
)
12341 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12344 /* Also optimize A % (C << N) where C is a power of 2,
12345 to A & ((C << N) - 1). */
12346 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12347 c
= TREE_OPERAND (arg1
, 0);
12349 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12352 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12353 build_int_cst (TREE_TYPE (arg1
), 1));
12354 if (strict_overflow_p
)
12355 fold_overflow_warning (("assuming signed overflow does not "
12356 "occur when simplifying "
12357 "X % (power of two)"),
12358 WARN_STRICT_OVERFLOW_MISC
);
12359 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12360 fold_convert_loc (loc
, type
, arg0
),
12361 fold_convert_loc (loc
, type
, mask
));
12369 if (integer_all_onesp (arg0
))
12370 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12374 /* Optimize -1 >> x for arithmetic right shifts. */
12375 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12376 && tree_expr_nonnegative_p (arg1
))
12377 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12378 /* ... fall through ... */
12382 if (integer_zerop (arg1
))
12383 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12384 if (integer_zerop (arg0
))
12385 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12387 /* Since negative shift count is not well-defined,
12388 don't try to compute it in the compiler. */
12389 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12392 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12393 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12394 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12395 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12396 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12398 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12399 + TREE_INT_CST_LOW (arg1
));
12401 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12402 being well defined. */
12403 if (low
>= TYPE_PRECISION (type
))
12405 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12406 low
= low
% TYPE_PRECISION (type
);
12407 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12408 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12409 TREE_OPERAND (arg0
, 0));
12411 low
= TYPE_PRECISION (type
) - 1;
12414 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12415 build_int_cst (type
, low
));
12418 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12419 into x & ((unsigned)-1 >> c) for unsigned types. */
12420 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12421 || (TYPE_UNSIGNED (type
)
12422 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12423 && host_integerp (arg1
, false)
12424 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12425 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12426 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12428 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12429 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12435 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12437 lshift
= build_int_cst (type
, -1);
12438 lshift
= int_const_binop (code
, lshift
, arg1
);
12440 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12444 /* Rewrite an LROTATE_EXPR by a constant into an
12445 RROTATE_EXPR by a new constant. */
12446 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12448 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12449 TYPE_PRECISION (type
));
12450 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12451 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12454 /* If we have a rotate of a bit operation with the rotate count and
12455 the second operand of the bit operation both constant,
12456 permute the two operations. */
12457 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12458 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12459 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12460 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12461 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12462 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12463 fold_build2_loc (loc
, code
, type
,
12464 TREE_OPERAND (arg0
, 0), arg1
),
12465 fold_build2_loc (loc
, code
, type
,
12466 TREE_OPERAND (arg0
, 1), arg1
));
12468 /* Two consecutive rotates adding up to the precision of the
12469 type can be ignored. */
12470 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12471 && TREE_CODE (arg0
) == RROTATE_EXPR
12472 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12473 && TREE_INT_CST_HIGH (arg1
) == 0
12474 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12475 && ((TREE_INT_CST_LOW (arg1
)
12476 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12477 == (unsigned int) TYPE_PRECISION (type
)))
12478 return TREE_OPERAND (arg0
, 0);
12480 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12481 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12482 if the latter can be further optimized. */
12483 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12484 && TREE_CODE (arg0
) == BIT_AND_EXPR
12485 && TREE_CODE (arg1
) == INTEGER_CST
12486 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12488 tree mask
= fold_build2_loc (loc
, code
, type
,
12489 fold_convert_loc (loc
, type
,
12490 TREE_OPERAND (arg0
, 1)),
12492 tree shift
= fold_build2_loc (loc
, code
, type
,
12493 fold_convert_loc (loc
, type
,
12494 TREE_OPERAND (arg0
, 0)),
12496 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12504 if (operand_equal_p (arg0
, arg1
, 0))
12505 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12506 if (INTEGRAL_TYPE_P (type
)
12507 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12508 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12509 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12515 if (operand_equal_p (arg0
, arg1
, 0))
12516 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12517 if (INTEGRAL_TYPE_P (type
)
12518 && TYPE_MAX_VALUE (type
)
12519 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12520 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12521 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12526 case TRUTH_ANDIF_EXPR
:
12527 /* Note that the operands of this must be ints
12528 and their values must be 0 or 1.
12529 ("true" is a fixed value perhaps depending on the language.) */
12530 /* If first arg is constant zero, return it. */
12531 if (integer_zerop (arg0
))
12532 return fold_convert_loc (loc
, type
, arg0
);
12533 case TRUTH_AND_EXPR
:
12534 /* If either arg is constant true, drop it. */
12535 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12536 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12537 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12538 /* Preserve sequence points. */
12539 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12540 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12541 /* If second arg is constant zero, result is zero, but first arg
12542 must be evaluated. */
12543 if (integer_zerop (arg1
))
12544 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12545 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12546 case will be handled here. */
12547 if (integer_zerop (arg0
))
12548 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12550 /* !X && X is always false. */
12551 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12552 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12553 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12554 /* X && !X is always false. */
12555 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12556 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12557 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12559 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12560 means A >= Y && A != MAX, but in this case we know that
12563 if (!TREE_SIDE_EFFECTS (arg0
)
12564 && !TREE_SIDE_EFFECTS (arg1
))
12566 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12567 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12568 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12570 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12571 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12572 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12575 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12581 case TRUTH_ORIF_EXPR
:
12582 /* Note that the operands of this must be ints
12583 and their values must be 0 or true.
12584 ("true" is a fixed value perhaps depending on the language.) */
12585 /* If first arg is constant true, return it. */
12586 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12587 return fold_convert_loc (loc
, type
, arg0
);
12588 case TRUTH_OR_EXPR
:
12589 /* If either arg is constant zero, drop it. */
12590 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12591 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12592 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12593 /* Preserve sequence points. */
12594 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12595 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12596 /* If second arg is constant true, result is true, but we must
12597 evaluate first arg. */
12598 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12599 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12600 /* Likewise for first arg, but note this only occurs here for
12602 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12603 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12605 /* !X || X is always true. */
12606 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12607 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12608 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12609 /* X || !X is always true. */
12610 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12611 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12612 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12614 /* (X && !Y) || (!X && Y) is X ^ Y */
12615 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12616 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12618 tree a0
, a1
, l0
, l1
, n0
, n1
;
12620 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12621 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12623 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12624 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12626 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12627 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12629 if ((operand_equal_p (n0
, a0
, 0)
12630 && operand_equal_p (n1
, a1
, 0))
12631 || (operand_equal_p (n0
, a1
, 0)
12632 && operand_equal_p (n1
, a0
, 0)))
12633 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12636 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12642 case TRUTH_XOR_EXPR
:
12643 /* If the second arg is constant zero, drop it. */
12644 if (integer_zerop (arg1
))
12645 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12646 /* If the second arg is constant true, this is a logical inversion. */
12647 if (integer_onep (arg1
))
12649 /* Only call invert_truthvalue if operand is a truth value. */
12650 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12651 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12653 tem
= invert_truthvalue_loc (loc
, arg0
);
12654 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12656 /* Identical arguments cancel to zero. */
12657 if (operand_equal_p (arg0
, arg1
, 0))
12658 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12660 /* !X ^ X is always true. */
12661 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12662 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12663 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12665 /* X ^ !X is always true. */
12666 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12667 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12668 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12677 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12678 if (tem
!= NULL_TREE
)
12681 /* bool_var != 0 becomes bool_var. */
12682 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12683 && code
== NE_EXPR
)
12684 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12686 /* bool_var == 1 becomes bool_var. */
12687 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12688 && code
== EQ_EXPR
)
12689 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12691 /* bool_var != 1 becomes !bool_var. */
12692 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12693 && code
== NE_EXPR
)
12694 return fold_convert_loc (loc
, type
,
12695 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12696 TREE_TYPE (arg0
), arg0
));
12698 /* bool_var == 0 becomes !bool_var. */
12699 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12700 && code
== EQ_EXPR
)
12701 return fold_convert_loc (loc
, type
,
12702 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12703 TREE_TYPE (arg0
), arg0
));
12705 /* !exp != 0 becomes !exp */
12706 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12707 && code
== NE_EXPR
)
12708 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12710 /* If this is an equality comparison of the address of two non-weak,
12711 unaliased symbols neither of which are extern (since we do not
12712 have access to attributes for externs), then we know the result. */
12713 if (TREE_CODE (arg0
) == ADDR_EXPR
12714 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12715 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12716 && ! lookup_attribute ("alias",
12717 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12718 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12719 && TREE_CODE (arg1
) == ADDR_EXPR
12720 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12721 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12722 && ! lookup_attribute ("alias",
12723 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12724 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12726 /* We know that we're looking at the address of two
12727 non-weak, unaliased, static _DECL nodes.
12729 It is both wasteful and incorrect to call operand_equal_p
12730 to compare the two ADDR_EXPR nodes. It is wasteful in that
12731 all we need to do is test pointer equality for the arguments
12732 to the two ADDR_EXPR nodes. It is incorrect to use
12733 operand_equal_p as that function is NOT equivalent to a
12734 C equality test. It can in fact return false for two
12735 objects which would test as equal using the C equality
12737 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12738 return constant_boolean_node (equal
12739 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12743 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12744 a MINUS_EXPR of a constant, we can convert it into a comparison with
12745 a revised constant as long as no overflow occurs. */
12746 if (TREE_CODE (arg1
) == INTEGER_CST
12747 && (TREE_CODE (arg0
) == PLUS_EXPR
12748 || TREE_CODE (arg0
) == MINUS_EXPR
)
12749 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12750 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12751 ? MINUS_EXPR
: PLUS_EXPR
,
12752 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12754 TREE_OPERAND (arg0
, 1)))
12755 && !TREE_OVERFLOW (tem
))
12756 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12758 /* Similarly for a NEGATE_EXPR. */
12759 if (TREE_CODE (arg0
) == NEGATE_EXPR
12760 && TREE_CODE (arg1
) == INTEGER_CST
12761 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12763 && TREE_CODE (tem
) == INTEGER_CST
12764 && !TREE_OVERFLOW (tem
))
12765 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12767 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12768 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12769 && TREE_CODE (arg1
) == INTEGER_CST
12770 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12771 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12772 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12773 fold_convert_loc (loc
,
12776 TREE_OPERAND (arg0
, 1)));
12778 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12779 if ((TREE_CODE (arg0
) == PLUS_EXPR
12780 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12781 || TREE_CODE (arg0
) == MINUS_EXPR
)
12782 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12785 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12786 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12788 tree val
= TREE_OPERAND (arg0
, 1);
12789 return omit_two_operands_loc (loc
, type
,
12790 fold_build2_loc (loc
, code
, type
,
12792 build_int_cst (TREE_TYPE (val
),
12794 TREE_OPERAND (arg0
, 0), arg1
);
12797 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12798 if (TREE_CODE (arg0
) == MINUS_EXPR
12799 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12800 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12803 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12805 return omit_two_operands_loc (loc
, type
,
12807 ? boolean_true_node
: boolean_false_node
,
12808 TREE_OPERAND (arg0
, 1), arg1
);
12811 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12812 for !=. Don't do this for ordered comparisons due to overflow. */
12813 if (TREE_CODE (arg0
) == MINUS_EXPR
12814 && integer_zerop (arg1
))
12815 return fold_build2_loc (loc
, code
, type
,
12816 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12818 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12819 if (TREE_CODE (arg0
) == ABS_EXPR
12820 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12821 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12823 /* If this is an EQ or NE comparison with zero and ARG0 is
12824 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12825 two operations, but the latter can be done in one less insn
12826 on machines that have only two-operand insns or on which a
12827 constant cannot be the first operand. */
12828 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12829 && integer_zerop (arg1
))
12831 tree arg00
= TREE_OPERAND (arg0
, 0);
12832 tree arg01
= TREE_OPERAND (arg0
, 1);
12833 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12834 && integer_onep (TREE_OPERAND (arg00
, 0)))
12836 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12837 arg01
, TREE_OPERAND (arg00
, 1));
12838 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12839 build_int_cst (TREE_TYPE (arg0
), 1));
12840 return fold_build2_loc (loc
, code
, type
,
12841 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12844 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12845 && integer_onep (TREE_OPERAND (arg01
, 0)))
12847 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12848 arg00
, TREE_OPERAND (arg01
, 1));
12849 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12850 build_int_cst (TREE_TYPE (arg0
), 1));
12851 return fold_build2_loc (loc
, code
, type
,
12852 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12857 /* If this is an NE or EQ comparison of zero against the result of a
12858 signed MOD operation whose second operand is a power of 2, make
12859 the MOD operation unsigned since it is simpler and equivalent. */
12860 if (integer_zerop (arg1
)
12861 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12862 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12863 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12864 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12865 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12866 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12868 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12869 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12870 fold_convert_loc (loc
, newtype
,
12871 TREE_OPERAND (arg0
, 0)),
12872 fold_convert_loc (loc
, newtype
,
12873 TREE_OPERAND (arg0
, 1)));
12875 return fold_build2_loc (loc
, code
, type
, newmod
,
12876 fold_convert_loc (loc
, newtype
, arg1
));
12879 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12880 C1 is a valid shift constant, and C2 is a power of two, i.e.
12882 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12883 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12884 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12886 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12887 && integer_zerop (arg1
))
12889 tree itype
= TREE_TYPE (arg0
);
12890 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12891 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12893 /* Check for a valid shift count. */
12894 if (TREE_INT_CST_HIGH (arg001
) == 0
12895 && TREE_INT_CST_LOW (arg001
) < prec
)
12897 tree arg01
= TREE_OPERAND (arg0
, 1);
12898 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12899 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12900 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12901 can be rewritten as (X & (C2 << C1)) != 0. */
12902 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12904 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12905 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12906 return fold_build2_loc (loc
, code
, type
, tem
,
12907 fold_convert_loc (loc
, itype
, arg1
));
12909 /* Otherwise, for signed (arithmetic) shifts,
12910 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12911 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12912 else if (!TYPE_UNSIGNED (itype
))
12913 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12914 arg000
, build_int_cst (itype
, 0));
12915 /* Otherwise, of unsigned (logical) shifts,
12916 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12917 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12919 return omit_one_operand_loc (loc
, type
,
12920 code
== EQ_EXPR
? integer_one_node
12921 : integer_zero_node
,
12926 /* If we have (A & C) == C where C is a power of 2, convert this into
12927 (A & C) != 0. Similarly for NE_EXPR. */
12928 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12929 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12930 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12931 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12932 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12933 integer_zero_node
));
12935 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12936 bit, then fold the expression into A < 0 or A >= 0. */
12937 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12941 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12942 Similarly for NE_EXPR. */
12943 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12944 && TREE_CODE (arg1
) == INTEGER_CST
12945 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12947 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12948 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12949 TREE_OPERAND (arg0
, 1));
12951 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12952 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12954 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12955 if (integer_nonzerop (dandnotc
))
12956 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12959 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12960 Similarly for NE_EXPR. */
12961 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12962 && TREE_CODE (arg1
) == INTEGER_CST
12963 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12965 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12967 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12968 TREE_OPERAND (arg0
, 1),
12969 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12970 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12971 if (integer_nonzerop (candnotd
))
12972 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12975 /* If this is a comparison of a field, we may be able to simplify it. */
12976 if ((TREE_CODE (arg0
) == COMPONENT_REF
12977 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12978 /* Handle the constant case even without -O
12979 to make sure the warnings are given. */
12980 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12982 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12987 /* Optimize comparisons of strlen vs zero to a compare of the
12988 first character of the string vs zero. To wit,
12989 strlen(ptr) == 0 => *ptr == 0
12990 strlen(ptr) != 0 => *ptr != 0
12991 Other cases should reduce to one of these two (or a constant)
12992 due to the return value of strlen being unsigned. */
12993 if (TREE_CODE (arg0
) == CALL_EXPR
12994 && integer_zerop (arg1
))
12996 tree fndecl
= get_callee_fndecl (arg0
);
12999 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13000 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13001 && call_expr_nargs (arg0
) == 1
13002 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13004 tree iref
= build_fold_indirect_ref_loc (loc
,
13005 CALL_EXPR_ARG (arg0
, 0));
13006 return fold_build2_loc (loc
, code
, type
, iref
,
13007 build_int_cst (TREE_TYPE (iref
), 0));
13011 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13012 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13013 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13014 && integer_zerop (arg1
)
13015 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13017 tree arg00
= TREE_OPERAND (arg0
, 0);
13018 tree arg01
= TREE_OPERAND (arg0
, 1);
13019 tree itype
= TREE_TYPE (arg00
);
13020 if (TREE_INT_CST_HIGH (arg01
) == 0
13021 && TREE_INT_CST_LOW (arg01
)
13022 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
13024 if (TYPE_UNSIGNED (itype
))
13026 itype
= signed_type_for (itype
);
13027 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13029 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13030 type
, arg00
, build_zero_cst (itype
));
13034 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13035 if (integer_zerop (arg1
)
13036 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13037 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13038 TREE_OPERAND (arg0
, 1));
13040 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13041 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13042 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13043 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13044 build_zero_cst (TREE_TYPE (arg0
)));
13045 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13046 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13047 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13048 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13049 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13050 build_zero_cst (TREE_TYPE (arg0
)));
13052 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13053 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13054 && TREE_CODE (arg1
) == INTEGER_CST
13055 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13056 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13057 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13058 TREE_OPERAND (arg0
, 1), arg1
));
13060 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13061 (X & C) == 0 when C is a single bit. */
13062 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13063 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13064 && integer_zerop (arg1
)
13065 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13067 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13068 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13069 TREE_OPERAND (arg0
, 1));
13070 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13072 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13076 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13077 constant C is a power of two, i.e. a single bit. */
13078 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13079 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13080 && integer_zerop (arg1
)
13081 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13082 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13083 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13085 tree arg00
= TREE_OPERAND (arg0
, 0);
13086 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13087 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13090 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13091 when is C is a power of two, i.e. a single bit. */
13092 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13093 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13094 && integer_zerop (arg1
)
13095 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13096 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13097 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13099 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13100 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13101 arg000
, TREE_OPERAND (arg0
, 1));
13102 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13103 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13106 if (integer_zerop (arg1
)
13107 && tree_expr_nonzero_p (arg0
))
13109 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13110 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13113 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13114 if (TREE_CODE (arg0
) == NEGATE_EXPR
13115 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13116 return fold_build2_loc (loc
, code
, type
,
13117 TREE_OPERAND (arg0
, 0),
13118 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13119 TREE_OPERAND (arg1
, 0)));
13121 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13122 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13123 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13125 tree arg00
= TREE_OPERAND (arg0
, 0);
13126 tree arg01
= TREE_OPERAND (arg0
, 1);
13127 tree arg10
= TREE_OPERAND (arg1
, 0);
13128 tree arg11
= TREE_OPERAND (arg1
, 1);
13129 tree itype
= TREE_TYPE (arg0
);
13131 if (operand_equal_p (arg01
, arg11
, 0))
13132 return fold_build2_loc (loc
, code
, type
,
13133 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13134 fold_build2_loc (loc
,
13135 BIT_XOR_EXPR
, itype
,
13138 build_zero_cst (itype
));
13140 if (operand_equal_p (arg01
, arg10
, 0))
13141 return fold_build2_loc (loc
, code
, type
,
13142 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13143 fold_build2_loc (loc
,
13144 BIT_XOR_EXPR
, itype
,
13147 build_zero_cst (itype
));
13149 if (operand_equal_p (arg00
, arg11
, 0))
13150 return fold_build2_loc (loc
, code
, type
,
13151 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13152 fold_build2_loc (loc
,
13153 BIT_XOR_EXPR
, itype
,
13156 build_zero_cst (itype
));
13158 if (operand_equal_p (arg00
, arg10
, 0))
13159 return fold_build2_loc (loc
, code
, type
,
13160 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13161 fold_build2_loc (loc
,
13162 BIT_XOR_EXPR
, itype
,
13165 build_zero_cst (itype
));
13168 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13169 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13171 tree arg00
= TREE_OPERAND (arg0
, 0);
13172 tree arg01
= TREE_OPERAND (arg0
, 1);
13173 tree arg10
= TREE_OPERAND (arg1
, 0);
13174 tree arg11
= TREE_OPERAND (arg1
, 1);
13175 tree itype
= TREE_TYPE (arg0
);
13177 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13178 operand_equal_p guarantees no side-effects so we don't need
13179 to use omit_one_operand on Z. */
13180 if (operand_equal_p (arg01
, arg11
, 0))
13181 return fold_build2_loc (loc
, code
, type
, arg00
,
13182 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13184 if (operand_equal_p (arg01
, arg10
, 0))
13185 return fold_build2_loc (loc
, code
, type
, arg00
,
13186 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13188 if (operand_equal_p (arg00
, arg11
, 0))
13189 return fold_build2_loc (loc
, code
, type
, arg01
,
13190 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13192 if (operand_equal_p (arg00
, arg10
, 0))
13193 return fold_build2_loc (loc
, code
, type
, arg01
,
13194 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13197 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13198 if (TREE_CODE (arg01
) == INTEGER_CST
13199 && TREE_CODE (arg11
) == INTEGER_CST
)
13201 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13202 fold_convert_loc (loc
, itype
, arg11
));
13203 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13204 return fold_build2_loc (loc
, code
, type
, tem
,
13205 fold_convert_loc (loc
, itype
, arg10
));
13209 /* Attempt to simplify equality/inequality comparisons of complex
13210 values. Only lower the comparison if the result is known or
13211 can be simplified to a single scalar comparison. */
13212 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13213 || TREE_CODE (arg0
) == COMPLEX_CST
)
13214 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13215 || TREE_CODE (arg1
) == COMPLEX_CST
))
13217 tree real0
, imag0
, real1
, imag1
;
13220 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13222 real0
= TREE_OPERAND (arg0
, 0);
13223 imag0
= TREE_OPERAND (arg0
, 1);
13227 real0
= TREE_REALPART (arg0
);
13228 imag0
= TREE_IMAGPART (arg0
);
13231 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13233 real1
= TREE_OPERAND (arg1
, 0);
13234 imag1
= TREE_OPERAND (arg1
, 1);
13238 real1
= TREE_REALPART (arg1
);
13239 imag1
= TREE_IMAGPART (arg1
);
13242 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13243 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13245 if (integer_zerop (rcond
))
13247 if (code
== EQ_EXPR
)
13248 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13250 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13254 if (code
== NE_EXPR
)
13255 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13257 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13261 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13262 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13264 if (integer_zerop (icond
))
13266 if (code
== EQ_EXPR
)
13267 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13269 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13273 if (code
== NE_EXPR
)
13274 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13276 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13287 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13288 if (tem
!= NULL_TREE
)
13291 /* Transform comparisons of the form X +- C CMP X. */
13292 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13293 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13294 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13295 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13296 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13297 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13299 tree arg01
= TREE_OPERAND (arg0
, 1);
13300 enum tree_code code0
= TREE_CODE (arg0
);
13303 if (TREE_CODE (arg01
) == REAL_CST
)
13304 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13306 is_positive
= tree_int_cst_sgn (arg01
);
13308 /* (X - c) > X becomes false. */
13309 if (code
== GT_EXPR
13310 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13311 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13313 if (TREE_CODE (arg01
) == INTEGER_CST
13314 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13315 fold_overflow_warning (("assuming signed overflow does not "
13316 "occur when assuming that (X - c) > X "
13317 "is always false"),
13318 WARN_STRICT_OVERFLOW_ALL
);
13319 return constant_boolean_node (0, type
);
13322 /* Likewise (X + c) < X becomes false. */
13323 if (code
== LT_EXPR
13324 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13325 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13327 if (TREE_CODE (arg01
) == INTEGER_CST
13328 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13329 fold_overflow_warning (("assuming signed overflow does not "
13330 "occur when assuming that "
13331 "(X + c) < X is always false"),
13332 WARN_STRICT_OVERFLOW_ALL
);
13333 return constant_boolean_node (0, type
);
13336 /* Convert (X - c) <= X to true. */
13337 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13339 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13340 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13342 if (TREE_CODE (arg01
) == INTEGER_CST
13343 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13344 fold_overflow_warning (("assuming signed overflow does not "
13345 "occur when assuming that "
13346 "(X - c) <= X is always true"),
13347 WARN_STRICT_OVERFLOW_ALL
);
13348 return constant_boolean_node (1, type
);
13351 /* Convert (X + c) >= X to true. */
13352 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13354 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13355 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13357 if (TREE_CODE (arg01
) == INTEGER_CST
13358 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13359 fold_overflow_warning (("assuming signed overflow does not "
13360 "occur when assuming that "
13361 "(X + c) >= X is always true"),
13362 WARN_STRICT_OVERFLOW_ALL
);
13363 return constant_boolean_node (1, type
);
13366 if (TREE_CODE (arg01
) == INTEGER_CST
)
13368 /* Convert X + c > X and X - c < X to true for integers. */
13369 if (code
== GT_EXPR
13370 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13371 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13373 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13374 fold_overflow_warning (("assuming signed overflow does "
13375 "not occur when assuming that "
13376 "(X + c) > X is always true"),
13377 WARN_STRICT_OVERFLOW_ALL
);
13378 return constant_boolean_node (1, type
);
13381 if (code
== LT_EXPR
13382 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13383 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13385 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13386 fold_overflow_warning (("assuming signed overflow does "
13387 "not occur when assuming that "
13388 "(X - c) < X is always true"),
13389 WARN_STRICT_OVERFLOW_ALL
);
13390 return constant_boolean_node (1, type
);
13393 /* Convert X + c <= X and X - c >= X to false for integers. */
13394 if (code
== LE_EXPR
13395 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13396 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13398 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13399 fold_overflow_warning (("assuming signed overflow does "
13400 "not occur when assuming that "
13401 "(X + c) <= X is always false"),
13402 WARN_STRICT_OVERFLOW_ALL
);
13403 return constant_boolean_node (0, type
);
13406 if (code
== GE_EXPR
13407 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13408 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13410 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13411 fold_overflow_warning (("assuming signed overflow does "
13412 "not occur when assuming that "
13413 "(X - c) >= X is always false"),
13414 WARN_STRICT_OVERFLOW_ALL
);
13415 return constant_boolean_node (0, type
);
13420 /* Comparisons with the highest or lowest possible integer of
13421 the specified precision will have known values. */
13423 tree arg1_type
= TREE_TYPE (arg1
);
13424 unsigned int width
= TYPE_PRECISION (arg1_type
);
13426 if (TREE_CODE (arg1
) == INTEGER_CST
13427 && width
<= HOST_BITS_PER_DOUBLE_INT
13428 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13430 HOST_WIDE_INT signed_max_hi
;
13431 unsigned HOST_WIDE_INT signed_max_lo
;
13432 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13434 if (width
<= HOST_BITS_PER_WIDE_INT
)
13436 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13441 if (TYPE_UNSIGNED (arg1_type
))
13443 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13449 max_lo
= signed_max_lo
;
13450 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13456 width
-= HOST_BITS_PER_WIDE_INT
;
13457 signed_max_lo
= -1;
13458 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13463 if (TYPE_UNSIGNED (arg1_type
))
13465 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13470 max_hi
= signed_max_hi
;
13471 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13475 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13476 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13480 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13483 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13486 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13489 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13491 /* The GE_EXPR and LT_EXPR cases above are not normally
13492 reached because of previous transformations. */
13497 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13499 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13503 arg1
= const_binop (PLUS_EXPR
, arg1
,
13504 build_int_cst (TREE_TYPE (arg1
), 1));
13505 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13506 fold_convert_loc (loc
,
13507 TREE_TYPE (arg1
), arg0
),
13510 arg1
= const_binop (PLUS_EXPR
, arg1
,
13511 build_int_cst (TREE_TYPE (arg1
), 1));
13512 return fold_build2_loc (loc
, NE_EXPR
, type
,
13513 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13519 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13521 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13525 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13528 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13531 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13534 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13539 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13541 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13545 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13546 return fold_build2_loc (loc
, NE_EXPR
, type
,
13547 fold_convert_loc (loc
,
13548 TREE_TYPE (arg1
), arg0
),
13551 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13552 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13553 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13560 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13561 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13562 && TYPE_UNSIGNED (arg1_type
)
13563 /* We will flip the signedness of the comparison operator
13564 associated with the mode of arg1, so the sign bit is
13565 specified by this mode. Check that arg1 is the signed
13566 max associated with this sign bit. */
13567 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13568 /* signed_type does not work on pointer types. */
13569 && INTEGRAL_TYPE_P (arg1_type
))
13571 /* The following case also applies to X < signed_max+1
13572 and X >= signed_max+1 because previous transformations. */
13573 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13576 st
= signed_type_for (TREE_TYPE (arg1
));
13577 return fold_build2_loc (loc
,
13578 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13579 type
, fold_convert_loc (loc
, st
, arg0
),
13580 build_int_cst (st
, 0));
13586 /* If we are comparing an ABS_EXPR with a constant, we can
13587 convert all the cases into explicit comparisons, but they may
13588 well not be faster than doing the ABS and one comparison.
13589 But ABS (X) <= C is a range comparison, which becomes a subtraction
13590 and a comparison, and is probably faster. */
13591 if (code
== LE_EXPR
13592 && TREE_CODE (arg1
) == INTEGER_CST
13593 && TREE_CODE (arg0
) == ABS_EXPR
13594 && ! TREE_SIDE_EFFECTS (arg0
)
13595 && (0 != (tem
= negate_expr (arg1
)))
13596 && TREE_CODE (tem
) == INTEGER_CST
13597 && !TREE_OVERFLOW (tem
))
13598 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13599 build2 (GE_EXPR
, type
,
13600 TREE_OPERAND (arg0
, 0), tem
),
13601 build2 (LE_EXPR
, type
,
13602 TREE_OPERAND (arg0
, 0), arg1
));
13604 /* Convert ABS_EXPR<x> >= 0 to true. */
13605 strict_overflow_p
= false;
13606 if (code
== GE_EXPR
13607 && (integer_zerop (arg1
)
13608 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13609 && real_zerop (arg1
)))
13610 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13612 if (strict_overflow_p
)
13613 fold_overflow_warning (("assuming signed overflow does not occur "
13614 "when simplifying comparison of "
13615 "absolute value and zero"),
13616 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13617 return omit_one_operand_loc (loc
, type
,
13618 constant_boolean_node (true, type
),
13622 /* Convert ABS_EXPR<x> < 0 to false. */
13623 strict_overflow_p
= false;
13624 if (code
== LT_EXPR
13625 && (integer_zerop (arg1
) || real_zerop (arg1
))
13626 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13628 if (strict_overflow_p
)
13629 fold_overflow_warning (("assuming signed overflow does not occur "
13630 "when simplifying comparison of "
13631 "absolute value and zero"),
13632 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13633 return omit_one_operand_loc (loc
, type
,
13634 constant_boolean_node (false, type
),
13638 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13639 and similarly for >= into !=. */
13640 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13641 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13642 && TREE_CODE (arg1
) == LSHIFT_EXPR
13643 && integer_onep (TREE_OPERAND (arg1
, 0)))
13644 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13645 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13646 TREE_OPERAND (arg1
, 1)),
13647 build_zero_cst (TREE_TYPE (arg0
)));
13649 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13650 otherwise Y might be >= # of bits in X's type and thus e.g.
13651 (unsigned char) (1 << Y) for Y 15 might be 0.
13652 If the cast is widening, then 1 << Y should have unsigned type,
13653 otherwise if Y is number of bits in the signed shift type minus 1,
13654 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13655 31 might be 0xffffffff80000000. */
13656 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13657 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13658 && CONVERT_EXPR_P (arg1
)
13659 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13660 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13661 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13662 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13663 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13664 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13665 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13667 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13668 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13669 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13670 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13671 build_zero_cst (TREE_TYPE (arg0
)));
13676 case UNORDERED_EXPR
:
13684 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13686 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13687 if (t1
!= NULL_TREE
)
13691 /* If the first operand is NaN, the result is constant. */
13692 if (TREE_CODE (arg0
) == REAL_CST
13693 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13694 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13696 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13697 ? integer_zero_node
13698 : integer_one_node
;
13699 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13702 /* If the second operand is NaN, the result is constant. */
13703 if (TREE_CODE (arg1
) == REAL_CST
13704 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13705 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13707 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13708 ? integer_zero_node
13709 : integer_one_node
;
13710 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13713 /* Simplify unordered comparison of something with itself. */
13714 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13715 && operand_equal_p (arg0
, arg1
, 0))
13716 return constant_boolean_node (1, type
);
13718 if (code
== LTGT_EXPR
13719 && !flag_trapping_math
13720 && operand_equal_p (arg0
, arg1
, 0))
13721 return constant_boolean_node (0, type
);
13723 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13725 tree targ0
= strip_float_extensions (arg0
);
13726 tree targ1
= strip_float_extensions (arg1
);
13727 tree newtype
= TREE_TYPE (targ0
);
13729 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13730 newtype
= TREE_TYPE (targ1
);
13732 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13733 return fold_build2_loc (loc
, code
, type
,
13734 fold_convert_loc (loc
, newtype
, targ0
),
13735 fold_convert_loc (loc
, newtype
, targ1
));
13740 case COMPOUND_EXPR
:
13741 /* When pedantic, a compound expression can be neither an lvalue
13742 nor an integer constant expression. */
13743 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13745 /* Don't let (0, 0) be null pointer constant. */
13746 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13747 : fold_convert_loc (loc
, type
, arg1
);
13748 return pedantic_non_lvalue_loc (loc
, tem
);
13751 if ((TREE_CODE (arg0
) == REAL_CST
13752 && TREE_CODE (arg1
) == REAL_CST
)
13753 || (TREE_CODE (arg0
) == INTEGER_CST
13754 && TREE_CODE (arg1
) == INTEGER_CST
))
13755 return build_complex (type
, arg0
, arg1
);
13756 if (TREE_CODE (arg0
) == REALPART_EXPR
13757 && TREE_CODE (arg1
) == IMAGPART_EXPR
13758 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13759 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13760 TREE_OPERAND (arg1
, 0), 0))
13761 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13762 TREE_OPERAND (arg1
, 0));
13766 /* An ASSERT_EXPR should never be passed to fold_binary. */
13767 gcc_unreachable ();
13769 case VEC_PACK_TRUNC_EXPR
:
13770 case VEC_PACK_FIX_TRUNC_EXPR
:
13772 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13775 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13776 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13777 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13780 elts
= XALLOCAVEC (tree
, nelts
);
13781 if (!vec_cst_ctor_to_array (arg0
, elts
)
13782 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13785 for (i
= 0; i
< nelts
; i
++)
13787 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13788 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13789 TREE_TYPE (type
), elts
[i
]);
13790 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13794 return build_vector (type
, elts
);
13797 case VEC_WIDEN_MULT_LO_EXPR
:
13798 case VEC_WIDEN_MULT_HI_EXPR
:
13799 case VEC_WIDEN_MULT_EVEN_EXPR
:
13800 case VEC_WIDEN_MULT_ODD_EXPR
:
13802 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13803 unsigned int out
, ofs
, scale
;
13806 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13807 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13808 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13811 elts
= XALLOCAVEC (tree
, nelts
* 4);
13812 if (!vec_cst_ctor_to_array (arg0
, elts
)
13813 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13816 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13817 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13818 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13819 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13820 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13821 scale
= 1, ofs
= 0;
13822 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13823 scale
= 1, ofs
= 1;
13825 for (out
= 0; out
< nelts
; out
++)
13827 unsigned int in1
= (out
<< scale
) + ofs
;
13828 unsigned int in2
= in1
+ nelts
* 2;
13831 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13832 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13834 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13836 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13837 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13841 return build_vector (type
, elts
);
13846 } /* switch (code) */
13849 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13850 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13854 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13856 switch (TREE_CODE (*tp
))
13862 *walk_subtrees
= 0;
13864 /* ... fall through ... */
13871 /* Return whether the sub-tree ST contains a label which is accessible from
13872 outside the sub-tree. */
13875 contains_label_p (tree st
)
13878 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13881 /* Fold a ternary expression of code CODE and type TYPE with operands
13882 OP0, OP1, and OP2. Return the folded expression if folding is
13883 successful. Otherwise, return NULL_TREE. */
13886 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13887 tree op0
, tree op1
, tree op2
)
13890 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13891 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13893 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13894 && TREE_CODE_LENGTH (code
) == 3);
13896 /* Strip any conversions that don't change the mode. This is safe
13897 for every expression, except for a comparison expression because
13898 its signedness is derived from its operands. So, in the latter
13899 case, only strip conversions that don't change the signedness.
13901 Note that this is done as an internal manipulation within the
13902 constant folder, in order to find the simplest representation of
13903 the arguments so that their form can be studied. In any cases,
13904 the appropriate type conversions should be put back in the tree
13905 that will get out of the constant folder. */
13926 case COMPONENT_REF
:
13927 if (TREE_CODE (arg0
) == CONSTRUCTOR
13928 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13930 unsigned HOST_WIDE_INT idx
;
13932 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13939 case VEC_COND_EXPR
:
13940 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13941 so all simple results must be passed through pedantic_non_lvalue. */
13942 if (TREE_CODE (arg0
) == INTEGER_CST
)
13944 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13945 tem
= integer_zerop (arg0
) ? op2
: op1
;
13946 /* Only optimize constant conditions when the selected branch
13947 has the same type as the COND_EXPR. This avoids optimizing
13948 away "c ? x : throw", where the throw has a void type.
13949 Avoid throwing away that operand which contains label. */
13950 if ((!TREE_SIDE_EFFECTS (unused_op
)
13951 || !contains_label_p (unused_op
))
13952 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13953 || VOID_TYPE_P (type
)))
13954 return pedantic_non_lvalue_loc (loc
, tem
);
13957 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13959 if (integer_all_onesp (arg0
))
13960 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
13961 if (integer_zerop (arg0
))
13962 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
13964 if ((TREE_CODE (arg1
) == VECTOR_CST
13965 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13966 && (TREE_CODE (arg2
) == VECTOR_CST
13967 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13969 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13970 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13971 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13972 for (i
= 0; i
< nelts
; i
++)
13974 tree val
= VECTOR_CST_ELT (arg0
, i
);
13975 if (integer_all_onesp (val
))
13977 else if (integer_zerop (val
))
13978 sel
[i
] = nelts
+ i
;
13979 else /* Currently unreachable. */
13982 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13983 if (t
!= NULL_TREE
)
13988 if (operand_equal_p (arg1
, op2
, 0))
13989 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13991 /* If we have A op B ? A : C, we may be able to convert this to a
13992 simpler expression, depending on the operation and the values
13993 of B and C. Signed zeros prevent all of these transformations,
13994 for reasons given above each one.
13996 Also try swapping the arguments and inverting the conditional. */
13997 if (COMPARISON_CLASS_P (arg0
)
13998 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13999 arg1
, TREE_OPERAND (arg0
, 1))
14000 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14002 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14007 if (COMPARISON_CLASS_P (arg0
)
14008 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14010 TREE_OPERAND (arg0
, 1))
14011 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14013 location_t loc0
= expr_location_or (arg0
, loc
);
14014 tem
= fold_truth_not_expr (loc0
, arg0
);
14015 if (tem
&& COMPARISON_CLASS_P (tem
))
14017 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14023 /* ??? Fixup the code below for VEC_COND_EXPR. */
14024 if (code
== VEC_COND_EXPR
)
14027 /* If the second operand is simpler than the third, swap them
14028 since that produces better jump optimization results. */
14029 if (truth_value_p (TREE_CODE (arg0
))
14030 && tree_swap_operands_p (op1
, op2
, false))
14032 location_t loc0
= expr_location_or (arg0
, loc
);
14033 /* See if this can be inverted. If it can't, possibly because
14034 it was a floating-point inequality comparison, don't do
14036 tem
= fold_truth_not_expr (loc0
, arg0
);
14038 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14041 /* Convert A ? 1 : 0 to simply A. */
14042 if (integer_onep (op1
)
14043 && integer_zerop (op2
)
14044 /* If we try to convert OP0 to our type, the
14045 call to fold will try to move the conversion inside
14046 a COND, which will recurse. In that case, the COND_EXPR
14047 is probably the best choice, so leave it alone. */
14048 && type
== TREE_TYPE (arg0
))
14049 return pedantic_non_lvalue_loc (loc
, arg0
);
14051 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14052 over COND_EXPR in cases such as floating point comparisons. */
14053 if (integer_zerop (op1
)
14054 && integer_onep (op2
)
14055 && truth_value_p (TREE_CODE (arg0
)))
14056 return pedantic_non_lvalue_loc (loc
,
14057 fold_convert_loc (loc
, type
,
14058 invert_truthvalue_loc (loc
,
14061 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14062 if (TREE_CODE (arg0
) == LT_EXPR
14063 && integer_zerop (TREE_OPERAND (arg0
, 1))
14064 && integer_zerop (op2
)
14065 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14067 /* sign_bit_p only checks ARG1 bits within A's precision.
14068 If <sign bit of A> has wider type than A, bits outside
14069 of A's precision in <sign bit of A> need to be checked.
14070 If they are all 0, this optimization needs to be done
14071 in unsigned A's type, if they are all 1 in signed A's type,
14072 otherwise this can't be done. */
14073 if (TYPE_PRECISION (TREE_TYPE (tem
))
14074 < TYPE_PRECISION (TREE_TYPE (arg1
))
14075 && TYPE_PRECISION (TREE_TYPE (tem
))
14076 < TYPE_PRECISION (type
))
14078 unsigned HOST_WIDE_INT mask_lo
;
14079 HOST_WIDE_INT mask_hi
;
14080 int inner_width
, outer_width
;
14083 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14084 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14085 if (outer_width
> TYPE_PRECISION (type
))
14086 outer_width
= TYPE_PRECISION (type
);
14088 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14090 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
14091 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14097 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
14098 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14100 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14102 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
14103 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14107 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
14108 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14110 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14111 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14113 tem_type
= signed_type_for (TREE_TYPE (tem
));
14114 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14116 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14117 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14119 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14120 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14128 fold_convert_loc (loc
, type
,
14129 fold_build2_loc (loc
, BIT_AND_EXPR
,
14130 TREE_TYPE (tem
), tem
,
14131 fold_convert_loc (loc
,
14136 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14137 already handled above. */
14138 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14139 && integer_onep (TREE_OPERAND (arg0
, 1))
14140 && integer_zerop (op2
)
14141 && integer_pow2p (arg1
))
14143 tree tem
= TREE_OPERAND (arg0
, 0);
14145 if (TREE_CODE (tem
) == RSHIFT_EXPR
14146 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14147 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14148 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14149 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14150 TREE_OPERAND (tem
, 0), arg1
);
14153 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14154 is probably obsolete because the first operand should be a
14155 truth value (that's why we have the two cases above), but let's
14156 leave it in until we can confirm this for all front-ends. */
14157 if (integer_zerop (op2
)
14158 && TREE_CODE (arg0
) == NE_EXPR
14159 && integer_zerop (TREE_OPERAND (arg0
, 1))
14160 && integer_pow2p (arg1
)
14161 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14162 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14163 arg1
, OEP_ONLY_CONST
))
14164 return pedantic_non_lvalue_loc (loc
,
14165 fold_convert_loc (loc
, type
,
14166 TREE_OPERAND (arg0
, 0)));
14168 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14169 if (integer_zerop (op2
)
14170 && truth_value_p (TREE_CODE (arg0
))
14171 && truth_value_p (TREE_CODE (arg1
)))
14172 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14173 fold_convert_loc (loc
, type
, arg0
),
14176 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14177 if (integer_onep (op2
)
14178 && truth_value_p (TREE_CODE (arg0
))
14179 && truth_value_p (TREE_CODE (arg1
)))
14181 location_t loc0
= expr_location_or (arg0
, loc
);
14182 /* Only perform transformation if ARG0 is easily inverted. */
14183 tem
= fold_truth_not_expr (loc0
, arg0
);
14185 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14186 fold_convert_loc (loc
, type
, tem
),
14190 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14191 if (integer_zerop (arg1
)
14192 && truth_value_p (TREE_CODE (arg0
))
14193 && truth_value_p (TREE_CODE (op2
)))
14195 location_t loc0
= expr_location_or (arg0
, loc
);
14196 /* Only perform transformation if ARG0 is easily inverted. */
14197 tem
= fold_truth_not_expr (loc0
, arg0
);
14199 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14200 fold_convert_loc (loc
, type
, tem
),
14204 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14205 if (integer_onep (arg1
)
14206 && truth_value_p (TREE_CODE (arg0
))
14207 && truth_value_p (TREE_CODE (op2
)))
14208 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14209 fold_convert_loc (loc
, type
, arg0
),
14215 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14216 of fold_ternary on them. */
14217 gcc_unreachable ();
14219 case BIT_FIELD_REF
:
14220 if ((TREE_CODE (arg0
) == VECTOR_CST
14221 || (TREE_CODE (arg0
) == CONSTRUCTOR
14222 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14223 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14224 || (TREE_CODE (type
) == VECTOR_TYPE
14225 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14227 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14228 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14229 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14230 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14233 && (idx
% width
) == 0
14234 && (n
% width
) == 0
14235 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14240 if (TREE_CODE (arg0
) == VECTOR_CST
)
14243 return VECTOR_CST_ELT (arg0
, idx
);
14245 tree
*vals
= XALLOCAVEC (tree
, n
);
14246 for (unsigned i
= 0; i
< n
; ++i
)
14247 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14248 return build_vector (type
, vals
);
14251 /* Constructor elements can be subvectors. */
14252 unsigned HOST_WIDE_INT k
= 1;
14253 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14255 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14256 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14257 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14260 /* We keep an exact subset of the constructor elements. */
14261 if ((idx
% k
) == 0 && (n
% k
) == 0)
14263 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14264 return build_constructor (type
, NULL
);
14269 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14270 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14271 return build_zero_cst (type
);
14274 vec
<constructor_elt
, va_gc
> *vals
;
14275 vec_alloc (vals
, n
);
14276 for (unsigned i
= 0;
14277 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14279 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14281 (arg0
, idx
+ i
)->value
);
14282 return build_constructor (type
, vals
);
14284 /* The bitfield references a single constructor element. */
14285 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14287 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14288 return build_zero_cst (type
);
14290 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14292 return fold_build3_loc (loc
, code
, type
,
14293 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14294 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14299 /* A bit-field-ref that referenced the full argument can be stripped. */
14300 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14301 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14302 && integer_zerop (op2
))
14303 return fold_convert_loc (loc
, type
, arg0
);
14305 /* On constants we can use native encode/interpret to constant
14306 fold (nearly) all BIT_FIELD_REFs. */
14307 if (CONSTANT_CLASS_P (arg0
)
14308 && can_native_interpret_type_p (type
)
14309 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14310 /* This limitation should not be necessary, we just need to
14311 round this up to mode size. */
14312 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14313 /* Need bit-shifting of the buffer to relax the following. */
14314 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14316 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14317 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14318 unsigned HOST_WIDE_INT clen
;
14319 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14320 /* ??? We cannot tell native_encode_expr to start at
14321 some random byte only. So limit us to a reasonable amount
14325 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14326 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14328 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14330 tree v
= native_interpret_expr (type
,
14331 b
+ bitpos
/ BITS_PER_UNIT
,
14332 bitsize
/ BITS_PER_UNIT
);
14342 /* For integers we can decompose the FMA if possible. */
14343 if (TREE_CODE (arg0
) == INTEGER_CST
14344 && TREE_CODE (arg1
) == INTEGER_CST
)
14345 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14346 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14347 if (integer_zerop (arg2
))
14348 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14350 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14352 case VEC_PERM_EXPR
:
14353 if (TREE_CODE (arg2
) == VECTOR_CST
)
14355 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14356 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14358 bool need_mask_canon
= false;
14359 bool all_in_vec0
= true;
14360 bool all_in_vec1
= true;
14361 bool maybe_identity
= true;
14362 bool single_arg
= (op0
== op1
);
14363 bool changed
= false;
14365 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14366 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14367 for (i
= 0; i
< nelts
; i
++)
14369 tree val
= VECTOR_CST_ELT (arg2
, i
);
14370 if (TREE_CODE (val
) != INTEGER_CST
)
14373 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14374 if (TREE_INT_CST_HIGH (val
)
14375 || ((unsigned HOST_WIDE_INT
)
14376 TREE_INT_CST_LOW (val
) != sel
[i
]))
14377 need_mask_canon
= true;
14379 if (sel
[i
] < nelts
)
14380 all_in_vec1
= false;
14382 all_in_vec0
= false;
14384 if ((sel
[i
] & (nelts
-1)) != i
)
14385 maybe_identity
= false;
14388 if (maybe_identity
)
14398 else if (all_in_vec1
)
14401 for (i
= 0; i
< nelts
; i
++)
14403 need_mask_canon
= true;
14406 if ((TREE_CODE (op0
) == VECTOR_CST
14407 || TREE_CODE (op0
) == CONSTRUCTOR
)
14408 && (TREE_CODE (op1
) == VECTOR_CST
14409 || TREE_CODE (op1
) == CONSTRUCTOR
))
14411 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14412 if (t
!= NULL_TREE
)
14416 if (op0
== op1
&& !single_arg
)
14419 if (need_mask_canon
&& arg2
== op2
)
14421 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14422 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14423 for (i
= 0; i
< nelts
; i
++)
14424 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14425 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14430 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14436 } /* switch (code) */
14439 /* Perform constant folding and related simplification of EXPR.
14440 The related simplifications include x*1 => x, x*0 => 0, etc.,
14441 and application of the associative law.
14442 NOP_EXPR conversions may be removed freely (as long as we
14443 are careful not to change the type of the overall expression).
14444 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14445 but we can constant-fold them if they have constant operands. */
14447 #ifdef ENABLE_FOLD_CHECKING
14448 # define fold(x) fold_1 (x)
14449 static tree
fold_1 (tree
);
14455 const tree t
= expr
;
14456 enum tree_code code
= TREE_CODE (t
);
14457 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14459 location_t loc
= EXPR_LOCATION (expr
);
14461 /* Return right away if a constant. */
14462 if (kind
== tcc_constant
)
14465 /* CALL_EXPR-like objects with variable numbers of operands are
14466 treated specially. */
14467 if (kind
== tcc_vl_exp
)
14469 if (code
== CALL_EXPR
)
14471 tem
= fold_call_expr (loc
, expr
, false);
14472 return tem
? tem
: expr
;
14477 if (IS_EXPR_CODE_CLASS (kind
))
14479 tree type
= TREE_TYPE (t
);
14480 tree op0
, op1
, op2
;
14482 switch (TREE_CODE_LENGTH (code
))
14485 op0
= TREE_OPERAND (t
, 0);
14486 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14487 return tem
? tem
: expr
;
14489 op0
= TREE_OPERAND (t
, 0);
14490 op1
= TREE_OPERAND (t
, 1);
14491 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14492 return tem
? tem
: expr
;
14494 op0
= TREE_OPERAND (t
, 0);
14495 op1
= TREE_OPERAND (t
, 1);
14496 op2
= TREE_OPERAND (t
, 2);
14497 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14498 return tem
? tem
: expr
;
14508 tree op0
= TREE_OPERAND (t
, 0);
14509 tree op1
= TREE_OPERAND (t
, 1);
14511 if (TREE_CODE (op1
) == INTEGER_CST
14512 && TREE_CODE (op0
) == CONSTRUCTOR
14513 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14515 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14516 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14517 unsigned HOST_WIDE_INT begin
= 0;
14519 /* Find a matching index by means of a binary search. */
14520 while (begin
!= end
)
14522 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14523 tree index
= (*elts
)[middle
].index
;
14525 if (TREE_CODE (index
) == INTEGER_CST
14526 && tree_int_cst_lt (index
, op1
))
14527 begin
= middle
+ 1;
14528 else if (TREE_CODE (index
) == INTEGER_CST
14529 && tree_int_cst_lt (op1
, index
))
14531 else if (TREE_CODE (index
) == RANGE_EXPR
14532 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14533 begin
= middle
+ 1;
14534 else if (TREE_CODE (index
) == RANGE_EXPR
14535 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14538 return (*elts
)[middle
].value
;
14545 /* Return a VECTOR_CST if possible. */
14548 tree type
= TREE_TYPE (t
);
14549 if (TREE_CODE (type
) != VECTOR_TYPE
)
14552 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14553 unsigned HOST_WIDE_INT idx
, pos
= 0;
14556 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14558 if (!CONSTANT_CLASS_P (value
))
14560 if (TREE_CODE (value
) == VECTOR_CST
)
14562 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14563 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14566 vec
[pos
++] = value
;
14568 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14569 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14571 return build_vector (type
, vec
);
14575 return fold (DECL_INITIAL (t
));
14579 } /* switch (code) */
14582 #ifdef ENABLE_FOLD_CHECKING
14585 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14586 hash_table
<pointer_hash
<tree_node
> >);
14587 static void fold_check_failed (const_tree
, const_tree
);
14588 void print_fold_checksum (const_tree
);
14590 /* When --enable-checking=fold, compute a digest of expr before
14591 and after actual fold call to see if fold did not accidentally
14592 change original expr. */
14598 struct md5_ctx ctx
;
14599 unsigned char checksum_before
[16], checksum_after
[16];
14600 hash_table
<pointer_hash
<tree_node
> > ht
;
14603 md5_init_ctx (&ctx
);
14604 fold_checksum_tree (expr
, &ctx
, ht
);
14605 md5_finish_ctx (&ctx
, checksum_before
);
14608 ret
= fold_1 (expr
);
14610 md5_init_ctx (&ctx
);
14611 fold_checksum_tree (expr
, &ctx
, ht
);
14612 md5_finish_ctx (&ctx
, checksum_after
);
14615 if (memcmp (checksum_before
, checksum_after
, 16))
14616 fold_check_failed (expr
, ret
);
14622 print_fold_checksum (const_tree expr
)
14624 struct md5_ctx ctx
;
14625 unsigned char checksum
[16], cnt
;
14626 hash_table
<pointer_hash
<tree_node
> > ht
;
14629 md5_init_ctx (&ctx
);
14630 fold_checksum_tree (expr
, &ctx
, ht
);
14631 md5_finish_ctx (&ctx
, checksum
);
14633 for (cnt
= 0; cnt
< 16; ++cnt
)
14634 fprintf (stderr
, "%02x", checksum
[cnt
]);
14635 putc ('\n', stderr
);
14639 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14641 internal_error ("fold check: original tree changed by fold");
14645 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14646 hash_table
<pointer_hash
<tree_node
> > ht
)
14649 enum tree_code code
;
14650 union tree_node buf
;
14656 slot
= ht
.find_slot (expr
, INSERT
);
14659 *slot
= CONST_CAST_TREE (expr
);
14660 code
= TREE_CODE (expr
);
14661 if (TREE_CODE_CLASS (code
) == tcc_declaration
14662 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14664 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14665 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14666 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14667 expr
= (tree
) &buf
;
14669 else if (TREE_CODE_CLASS (code
) == tcc_type
14670 && (TYPE_POINTER_TO (expr
)
14671 || TYPE_REFERENCE_TO (expr
)
14672 || TYPE_CACHED_VALUES_P (expr
)
14673 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14674 || TYPE_NEXT_VARIANT (expr
)))
14676 /* Allow these fields to be modified. */
14678 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14679 expr
= tmp
= (tree
) &buf
;
14680 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14681 TYPE_POINTER_TO (tmp
) = NULL
;
14682 TYPE_REFERENCE_TO (tmp
) = NULL
;
14683 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14684 if (TYPE_CACHED_VALUES_P (tmp
))
14686 TYPE_CACHED_VALUES_P (tmp
) = 0;
14687 TYPE_CACHED_VALUES (tmp
) = NULL
;
14690 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14691 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14692 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14693 if (TREE_CODE_CLASS (code
) != tcc_type
14694 && TREE_CODE_CLASS (code
) != tcc_declaration
14695 && code
!= TREE_LIST
14696 && code
!= SSA_NAME
14697 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14698 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14699 switch (TREE_CODE_CLASS (code
))
14705 md5_process_bytes (TREE_STRING_POINTER (expr
),
14706 TREE_STRING_LENGTH (expr
), ctx
);
14709 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14710 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14713 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14714 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14720 case tcc_exceptional
:
14724 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14725 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14726 expr
= TREE_CHAIN (expr
);
14727 goto recursive_label
;
14730 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14731 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14737 case tcc_expression
:
14738 case tcc_reference
:
14739 case tcc_comparison
:
14742 case tcc_statement
:
14744 len
= TREE_OPERAND_LENGTH (expr
);
14745 for (i
= 0; i
< len
; ++i
)
14746 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14748 case tcc_declaration
:
14749 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14750 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14751 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14753 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14754 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14755 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14756 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14757 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14759 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14760 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14762 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14764 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14765 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14766 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14770 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14771 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14772 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14773 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14774 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14775 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14776 if (INTEGRAL_TYPE_P (expr
)
14777 || SCALAR_FLOAT_TYPE_P (expr
))
14779 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14780 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14782 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14783 if (TREE_CODE (expr
) == RECORD_TYPE
14784 || TREE_CODE (expr
) == UNION_TYPE
14785 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14786 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14787 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14794 /* Helper function for outputting the checksum of a tree T. When
14795 debugging with gdb, you can "define mynext" to be "next" followed
14796 by "call debug_fold_checksum (op0)", then just trace down till the
14799 DEBUG_FUNCTION
void
14800 debug_fold_checksum (const_tree t
)
14803 unsigned char checksum
[16];
14804 struct md5_ctx ctx
;
14805 hash_table
<pointer_hash
<tree_node
> > ht
;
14808 md5_init_ctx (&ctx
);
14809 fold_checksum_tree (t
, &ctx
, ht
);
14810 md5_finish_ctx (&ctx
, checksum
);
14813 for (i
= 0; i
< 16; i
++)
14814 fprintf (stderr
, "%d ", checksum
[i
]);
14816 fprintf (stderr
, "\n");
14821 /* Fold a unary tree expression with code CODE of type TYPE with an
14822 operand OP0. LOC is the location of the resulting expression.
14823 Return a folded expression if successful. Otherwise, return a tree
14824 expression with code CODE of type TYPE with an operand OP0. */
14827 fold_build1_stat_loc (location_t loc
,
14828 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14831 #ifdef ENABLE_FOLD_CHECKING
14832 unsigned char checksum_before
[16], checksum_after
[16];
14833 struct md5_ctx ctx
;
14834 hash_table
<pointer_hash
<tree_node
> > ht
;
14837 md5_init_ctx (&ctx
);
14838 fold_checksum_tree (op0
, &ctx
, ht
);
14839 md5_finish_ctx (&ctx
, checksum_before
);
14843 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14845 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14847 #ifdef ENABLE_FOLD_CHECKING
14848 md5_init_ctx (&ctx
);
14849 fold_checksum_tree (op0
, &ctx
, ht
);
14850 md5_finish_ctx (&ctx
, checksum_after
);
14853 if (memcmp (checksum_before
, checksum_after
, 16))
14854 fold_check_failed (op0
, tem
);
14859 /* Fold a binary tree expression with code CODE of type TYPE with
14860 operands OP0 and OP1. LOC is the location of the resulting
14861 expression. Return a folded expression if successful. Otherwise,
14862 return a tree expression with code CODE of type TYPE with operands
14866 fold_build2_stat_loc (location_t loc
,
14867 enum tree_code code
, tree type
, tree op0
, tree op1
14871 #ifdef ENABLE_FOLD_CHECKING
14872 unsigned char checksum_before_op0
[16],
14873 checksum_before_op1
[16],
14874 checksum_after_op0
[16],
14875 checksum_after_op1
[16];
14876 struct md5_ctx ctx
;
14877 hash_table
<pointer_hash
<tree_node
> > ht
;
14880 md5_init_ctx (&ctx
);
14881 fold_checksum_tree (op0
, &ctx
, ht
);
14882 md5_finish_ctx (&ctx
, checksum_before_op0
);
14885 md5_init_ctx (&ctx
);
14886 fold_checksum_tree (op1
, &ctx
, ht
);
14887 md5_finish_ctx (&ctx
, checksum_before_op1
);
14891 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14893 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14895 #ifdef ENABLE_FOLD_CHECKING
14896 md5_init_ctx (&ctx
);
14897 fold_checksum_tree (op0
, &ctx
, ht
);
14898 md5_finish_ctx (&ctx
, checksum_after_op0
);
14901 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14902 fold_check_failed (op0
, tem
);
14904 md5_init_ctx (&ctx
);
14905 fold_checksum_tree (op1
, &ctx
, ht
);
14906 md5_finish_ctx (&ctx
, checksum_after_op1
);
14909 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14910 fold_check_failed (op1
, tem
);
14915 /* Fold a ternary tree expression with code CODE of type TYPE with
14916 operands OP0, OP1, and OP2. Return a folded expression if
14917 successful. Otherwise, return a tree expression with code CODE of
14918 type TYPE with operands OP0, OP1, and OP2. */
14921 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14922 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14925 #ifdef ENABLE_FOLD_CHECKING
14926 unsigned char checksum_before_op0
[16],
14927 checksum_before_op1
[16],
14928 checksum_before_op2
[16],
14929 checksum_after_op0
[16],
14930 checksum_after_op1
[16],
14931 checksum_after_op2
[16];
14932 struct md5_ctx ctx
;
14933 hash_table
<pointer_hash
<tree_node
> > ht
;
14936 md5_init_ctx (&ctx
);
14937 fold_checksum_tree (op0
, &ctx
, ht
);
14938 md5_finish_ctx (&ctx
, checksum_before_op0
);
14941 md5_init_ctx (&ctx
);
14942 fold_checksum_tree (op1
, &ctx
, ht
);
14943 md5_finish_ctx (&ctx
, checksum_before_op1
);
14946 md5_init_ctx (&ctx
);
14947 fold_checksum_tree (op2
, &ctx
, ht
);
14948 md5_finish_ctx (&ctx
, checksum_before_op2
);
14952 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14953 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14955 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14957 #ifdef ENABLE_FOLD_CHECKING
14958 md5_init_ctx (&ctx
);
14959 fold_checksum_tree (op0
, &ctx
, ht
);
14960 md5_finish_ctx (&ctx
, checksum_after_op0
);
14963 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14964 fold_check_failed (op0
, tem
);
14966 md5_init_ctx (&ctx
);
14967 fold_checksum_tree (op1
, &ctx
, ht
);
14968 md5_finish_ctx (&ctx
, checksum_after_op1
);
14971 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14972 fold_check_failed (op1
, tem
);
14974 md5_init_ctx (&ctx
);
14975 fold_checksum_tree (op2
, &ctx
, ht
);
14976 md5_finish_ctx (&ctx
, checksum_after_op2
);
14979 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14980 fold_check_failed (op2
, tem
);
14985 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14986 arguments in ARGARRAY, and a null static chain.
14987 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14988 of type TYPE from the given operands as constructed by build_call_array. */
14991 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14992 int nargs
, tree
*argarray
)
14995 #ifdef ENABLE_FOLD_CHECKING
14996 unsigned char checksum_before_fn
[16],
14997 checksum_before_arglist
[16],
14998 checksum_after_fn
[16],
14999 checksum_after_arglist
[16];
15000 struct md5_ctx ctx
;
15001 hash_table
<pointer_hash
<tree_node
> > ht
;
15005 md5_init_ctx (&ctx
);
15006 fold_checksum_tree (fn
, &ctx
, ht
);
15007 md5_finish_ctx (&ctx
, checksum_before_fn
);
15010 md5_init_ctx (&ctx
);
15011 for (i
= 0; i
< nargs
; i
++)
15012 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15013 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15017 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15019 #ifdef ENABLE_FOLD_CHECKING
15020 md5_init_ctx (&ctx
);
15021 fold_checksum_tree (fn
, &ctx
, ht
);
15022 md5_finish_ctx (&ctx
, checksum_after_fn
);
15025 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15026 fold_check_failed (fn
, tem
);
15028 md5_init_ctx (&ctx
);
15029 for (i
= 0; i
< nargs
; i
++)
15030 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15031 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15034 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15035 fold_check_failed (NULL_TREE
, tem
);
15040 /* Perform constant folding and related simplification of initializer
15041 expression EXPR. These behave identically to "fold_buildN" but ignore
15042 potential run-time traps and exceptions that fold must preserve. */
15044 #define START_FOLD_INIT \
15045 int saved_signaling_nans = flag_signaling_nans;\
15046 int saved_trapping_math = flag_trapping_math;\
15047 int saved_rounding_math = flag_rounding_math;\
15048 int saved_trapv = flag_trapv;\
15049 int saved_folding_initializer = folding_initializer;\
15050 flag_signaling_nans = 0;\
15051 flag_trapping_math = 0;\
15052 flag_rounding_math = 0;\
15054 folding_initializer = 1;
15056 #define END_FOLD_INIT \
15057 flag_signaling_nans = saved_signaling_nans;\
15058 flag_trapping_math = saved_trapping_math;\
15059 flag_rounding_math = saved_rounding_math;\
15060 flag_trapv = saved_trapv;\
15061 folding_initializer = saved_folding_initializer;
15064 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15065 tree type
, tree op
)
15070 result
= fold_build1_loc (loc
, code
, type
, op
);
15077 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15078 tree type
, tree op0
, tree op1
)
15083 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15090 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
15091 tree type
, tree op0
, tree op1
, tree op2
)
15096 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
15103 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15104 int nargs
, tree
*argarray
)
15109 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15115 #undef START_FOLD_INIT
15116 #undef END_FOLD_INIT
15118 /* Determine if first argument is a multiple of second argument. Return 0 if
15119 it is not, or we cannot easily determined it to be.
15121 An example of the sort of thing we care about (at this point; this routine
15122 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15123 fold cases do now) is discovering that
15125 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15131 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15133 This code also handles discovering that
15135 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15137 is a multiple of 8 so we don't have to worry about dealing with a
15138 possible remainder.
15140 Note that we *look* inside a SAVE_EXPR only to determine how it was
15141 calculated; it is not safe for fold to do much of anything else with the
15142 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15143 at run time. For example, the latter example above *cannot* be implemented
15144 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15145 evaluation time of the original SAVE_EXPR is not necessarily the same at
15146 the time the new expression is evaluated. The only optimization of this
15147 sort that would be valid is changing
15149 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15153 SAVE_EXPR (I) * SAVE_EXPR (J)
15155 (where the same SAVE_EXPR (J) is used in the original and the
15156 transformed version). */
15159 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15161 if (operand_equal_p (top
, bottom
, 0))
15164 if (TREE_CODE (type
) != INTEGER_TYPE
)
15167 switch (TREE_CODE (top
))
15170 /* Bitwise and provides a power of two multiple. If the mask is
15171 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15172 if (!integer_pow2p (bottom
))
15177 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15178 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15182 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15183 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15186 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15190 op1
= TREE_OPERAND (top
, 1);
15191 /* const_binop may not detect overflow correctly,
15192 so check for it explicitly here. */
15193 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15194 > TREE_INT_CST_LOW (op1
)
15195 && TREE_INT_CST_HIGH (op1
) == 0
15196 && 0 != (t1
= fold_convert (type
,
15197 const_binop (LSHIFT_EXPR
,
15200 && !TREE_OVERFLOW (t1
))
15201 return multiple_of_p (type
, t1
, bottom
);
15206 /* Can't handle conversions from non-integral or wider integral type. */
15207 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15208 || (TYPE_PRECISION (type
)
15209 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15212 /* .. fall through ... */
15215 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15218 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15219 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15222 if (TREE_CODE (bottom
) != INTEGER_CST
15223 || integer_zerop (bottom
)
15224 || (TYPE_UNSIGNED (type
)
15225 && (tree_int_cst_sgn (top
) < 0
15226 || tree_int_cst_sgn (bottom
) < 0)))
15228 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15236 /* Return true if CODE or TYPE is known to be non-negative. */
15239 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15241 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15242 && truth_value_p (code
))
15243 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15244 have a signed:1 type (where the value is -1 and 0). */
15249 /* Return true if (CODE OP0) is known to be non-negative. If the return
15250 value is based on the assumption that signed overflow is undefined,
15251 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15252 *STRICT_OVERFLOW_P. */
15255 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15256 bool *strict_overflow_p
)
15258 if (TYPE_UNSIGNED (type
))
15264 /* We can't return 1 if flag_wrapv is set because
15265 ABS_EXPR<INT_MIN> = INT_MIN. */
15266 if (!INTEGRAL_TYPE_P (type
))
15268 if (TYPE_OVERFLOW_UNDEFINED (type
))
15270 *strict_overflow_p
= true;
15275 case NON_LVALUE_EXPR
:
15277 case FIX_TRUNC_EXPR
:
15278 return tree_expr_nonnegative_warnv_p (op0
,
15279 strict_overflow_p
);
15283 tree inner_type
= TREE_TYPE (op0
);
15284 tree outer_type
= type
;
15286 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15288 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15289 return tree_expr_nonnegative_warnv_p (op0
,
15290 strict_overflow_p
);
15291 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15293 if (TYPE_UNSIGNED (inner_type
))
15295 return tree_expr_nonnegative_warnv_p (op0
,
15296 strict_overflow_p
);
15299 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
15301 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15302 return tree_expr_nonnegative_warnv_p (op0
,
15303 strict_overflow_p
);
15304 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15305 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15306 && TYPE_UNSIGNED (inner_type
);
15312 return tree_simple_nonnegative_warnv_p (code
, type
);
15315 /* We don't know sign of `t', so be conservative and return false. */
15319 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15320 value is based on the assumption that signed overflow is undefined,
15321 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15322 *STRICT_OVERFLOW_P. */
15325 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15326 tree op1
, bool *strict_overflow_p
)
15328 if (TYPE_UNSIGNED (type
))
15333 case POINTER_PLUS_EXPR
:
15335 if (FLOAT_TYPE_P (type
))
15336 return (tree_expr_nonnegative_warnv_p (op0
,
15338 && tree_expr_nonnegative_warnv_p (op1
,
15339 strict_overflow_p
));
15341 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15342 both unsigned and at least 2 bits shorter than the result. */
15343 if (TREE_CODE (type
) == INTEGER_TYPE
15344 && TREE_CODE (op0
) == NOP_EXPR
15345 && TREE_CODE (op1
) == NOP_EXPR
)
15347 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15348 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15349 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15350 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15352 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15353 TYPE_PRECISION (inner2
)) + 1;
15354 return prec
< TYPE_PRECISION (type
);
15360 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15362 /* x * x is always non-negative for floating point x
15363 or without overflow. */
15364 if (operand_equal_p (op0
, op1
, 0)
15365 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15366 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15368 if (TYPE_OVERFLOW_UNDEFINED (type
))
15369 *strict_overflow_p
= true;
15374 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15375 both unsigned and their total bits is shorter than the result. */
15376 if (TREE_CODE (type
) == INTEGER_TYPE
15377 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15378 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15380 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15381 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15383 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15384 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15387 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15388 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15390 if (TREE_CODE (op0
) == INTEGER_CST
)
15391 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15393 if (TREE_CODE (op1
) == INTEGER_CST
)
15394 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15396 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15397 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15399 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15400 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15401 : TYPE_PRECISION (inner0
);
15403 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15404 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15405 : TYPE_PRECISION (inner1
);
15407 return precision0
+ precision1
< TYPE_PRECISION (type
);
15414 return (tree_expr_nonnegative_warnv_p (op0
,
15416 || tree_expr_nonnegative_warnv_p (op1
,
15417 strict_overflow_p
));
15423 case TRUNC_DIV_EXPR
:
15424 case CEIL_DIV_EXPR
:
15425 case FLOOR_DIV_EXPR
:
15426 case ROUND_DIV_EXPR
:
15427 return (tree_expr_nonnegative_warnv_p (op0
,
15429 && tree_expr_nonnegative_warnv_p (op1
,
15430 strict_overflow_p
));
15432 case TRUNC_MOD_EXPR
:
15433 case CEIL_MOD_EXPR
:
15434 case FLOOR_MOD_EXPR
:
15435 case ROUND_MOD_EXPR
:
15436 return tree_expr_nonnegative_warnv_p (op0
,
15437 strict_overflow_p
);
15439 return tree_simple_nonnegative_warnv_p (code
, type
);
15442 /* We don't know sign of `t', so be conservative and return false. */
15446 /* Return true if T is known to be non-negative. If the return
15447 value is based on the assumption that signed overflow is undefined,
15448 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15449 *STRICT_OVERFLOW_P. */
15452 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15454 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15457 switch (TREE_CODE (t
))
15460 return tree_int_cst_sgn (t
) >= 0;
15463 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15466 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15469 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15471 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15472 strict_overflow_p
));
15474 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15477 /* We don't know sign of `t', so be conservative and return false. */
15481 /* Return true if T is known to be non-negative. If the return
15482 value is based on the assumption that signed overflow is undefined,
15483 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15484 *STRICT_OVERFLOW_P. */
15487 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15488 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15490 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15491 switch (DECL_FUNCTION_CODE (fndecl
))
15493 CASE_FLT_FN (BUILT_IN_ACOS
):
15494 CASE_FLT_FN (BUILT_IN_ACOSH
):
15495 CASE_FLT_FN (BUILT_IN_CABS
):
15496 CASE_FLT_FN (BUILT_IN_COSH
):
15497 CASE_FLT_FN (BUILT_IN_ERFC
):
15498 CASE_FLT_FN (BUILT_IN_EXP
):
15499 CASE_FLT_FN (BUILT_IN_EXP10
):
15500 CASE_FLT_FN (BUILT_IN_EXP2
):
15501 CASE_FLT_FN (BUILT_IN_FABS
):
15502 CASE_FLT_FN (BUILT_IN_FDIM
):
15503 CASE_FLT_FN (BUILT_IN_HYPOT
):
15504 CASE_FLT_FN (BUILT_IN_POW10
):
15505 CASE_INT_FN (BUILT_IN_FFS
):
15506 CASE_INT_FN (BUILT_IN_PARITY
):
15507 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15508 case BUILT_IN_BSWAP32
:
15509 case BUILT_IN_BSWAP64
:
15513 CASE_FLT_FN (BUILT_IN_SQRT
):
15514 /* sqrt(-0.0) is -0.0. */
15515 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15517 return tree_expr_nonnegative_warnv_p (arg0
,
15518 strict_overflow_p
);
15520 CASE_FLT_FN (BUILT_IN_ASINH
):
15521 CASE_FLT_FN (BUILT_IN_ATAN
):
15522 CASE_FLT_FN (BUILT_IN_ATANH
):
15523 CASE_FLT_FN (BUILT_IN_CBRT
):
15524 CASE_FLT_FN (BUILT_IN_CEIL
):
15525 CASE_FLT_FN (BUILT_IN_ERF
):
15526 CASE_FLT_FN (BUILT_IN_EXPM1
):
15527 CASE_FLT_FN (BUILT_IN_FLOOR
):
15528 CASE_FLT_FN (BUILT_IN_FMOD
):
15529 CASE_FLT_FN (BUILT_IN_FREXP
):
15530 CASE_FLT_FN (BUILT_IN_ICEIL
):
15531 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15532 CASE_FLT_FN (BUILT_IN_IRINT
):
15533 CASE_FLT_FN (BUILT_IN_IROUND
):
15534 CASE_FLT_FN (BUILT_IN_LCEIL
):
15535 CASE_FLT_FN (BUILT_IN_LDEXP
):
15536 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15537 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15538 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15539 CASE_FLT_FN (BUILT_IN_LLRINT
):
15540 CASE_FLT_FN (BUILT_IN_LLROUND
):
15541 CASE_FLT_FN (BUILT_IN_LRINT
):
15542 CASE_FLT_FN (BUILT_IN_LROUND
):
15543 CASE_FLT_FN (BUILT_IN_MODF
):
15544 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15545 CASE_FLT_FN (BUILT_IN_RINT
):
15546 CASE_FLT_FN (BUILT_IN_ROUND
):
15547 CASE_FLT_FN (BUILT_IN_SCALB
):
15548 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15549 CASE_FLT_FN (BUILT_IN_SCALBN
):
15550 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15551 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15552 CASE_FLT_FN (BUILT_IN_SINH
):
15553 CASE_FLT_FN (BUILT_IN_TANH
):
15554 CASE_FLT_FN (BUILT_IN_TRUNC
):
15555 /* True if the 1st argument is nonnegative. */
15556 return tree_expr_nonnegative_warnv_p (arg0
,
15557 strict_overflow_p
);
15559 CASE_FLT_FN (BUILT_IN_FMAX
):
15560 /* True if the 1st OR 2nd arguments are nonnegative. */
15561 return (tree_expr_nonnegative_warnv_p (arg0
,
15563 || (tree_expr_nonnegative_warnv_p (arg1
,
15564 strict_overflow_p
)));
15566 CASE_FLT_FN (BUILT_IN_FMIN
):
15567 /* True if the 1st AND 2nd arguments are nonnegative. */
15568 return (tree_expr_nonnegative_warnv_p (arg0
,
15570 && (tree_expr_nonnegative_warnv_p (arg1
,
15571 strict_overflow_p
)));
15573 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15574 /* True if the 2nd argument is nonnegative. */
15575 return tree_expr_nonnegative_warnv_p (arg1
,
15576 strict_overflow_p
);
15578 CASE_FLT_FN (BUILT_IN_POWI
):
15579 /* True if the 1st argument is nonnegative or the second
15580 argument is an even integer. */
15581 if (TREE_CODE (arg1
) == INTEGER_CST
15582 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15584 return tree_expr_nonnegative_warnv_p (arg0
,
15585 strict_overflow_p
);
15587 CASE_FLT_FN (BUILT_IN_POW
):
15588 /* True if the 1st argument is nonnegative or the second
15589 argument is an even integer valued real. */
15590 if (TREE_CODE (arg1
) == REAL_CST
)
15595 c
= TREE_REAL_CST (arg1
);
15596 n
= real_to_integer (&c
);
15599 REAL_VALUE_TYPE cint
;
15600 real_from_integer (&cint
, VOIDmode
, n
,
15601 n
< 0 ? -1 : 0, 0);
15602 if (real_identical (&c
, &cint
))
15606 return tree_expr_nonnegative_warnv_p (arg0
,
15607 strict_overflow_p
);
15612 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15616 /* Return true if T is known to be non-negative. If the return
15617 value is based on the assumption that signed overflow is undefined,
15618 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15619 *STRICT_OVERFLOW_P. */
15622 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15624 enum tree_code code
= TREE_CODE (t
);
15625 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15632 tree temp
= TARGET_EXPR_SLOT (t
);
15633 t
= TARGET_EXPR_INITIAL (t
);
15635 /* If the initializer is non-void, then it's a normal expression
15636 that will be assigned to the slot. */
15637 if (!VOID_TYPE_P (t
))
15638 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15640 /* Otherwise, the initializer sets the slot in some way. One common
15641 way is an assignment statement at the end of the initializer. */
15644 if (TREE_CODE (t
) == BIND_EXPR
)
15645 t
= expr_last (BIND_EXPR_BODY (t
));
15646 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15647 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15648 t
= expr_last (TREE_OPERAND (t
, 0));
15649 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15654 if (TREE_CODE (t
) == MODIFY_EXPR
15655 && TREE_OPERAND (t
, 0) == temp
)
15656 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15657 strict_overflow_p
);
15664 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15665 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15667 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15668 get_callee_fndecl (t
),
15671 strict_overflow_p
);
15673 case COMPOUND_EXPR
:
15675 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15676 strict_overflow_p
);
15678 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15679 strict_overflow_p
);
15681 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15682 strict_overflow_p
);
15685 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15689 /* We don't know sign of `t', so be conservative and return false. */
15693 /* Return true if T is known to be non-negative. If the return
15694 value is based on the assumption that signed overflow is undefined,
15695 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15696 *STRICT_OVERFLOW_P. */
15699 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15701 enum tree_code code
;
15702 if (t
== error_mark_node
)
15705 code
= TREE_CODE (t
);
15706 switch (TREE_CODE_CLASS (code
))
15709 case tcc_comparison
:
15710 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15712 TREE_OPERAND (t
, 0),
15713 TREE_OPERAND (t
, 1),
15714 strict_overflow_p
);
15717 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15719 TREE_OPERAND (t
, 0),
15720 strict_overflow_p
);
15723 case tcc_declaration
:
15724 case tcc_reference
:
15725 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15733 case TRUTH_AND_EXPR
:
15734 case TRUTH_OR_EXPR
:
15735 case TRUTH_XOR_EXPR
:
15736 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15738 TREE_OPERAND (t
, 0),
15739 TREE_OPERAND (t
, 1),
15740 strict_overflow_p
);
15741 case TRUTH_NOT_EXPR
:
15742 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15744 TREE_OPERAND (t
, 0),
15745 strict_overflow_p
);
15752 case WITH_SIZE_EXPR
:
15754 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15757 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15761 /* Return true if `t' is known to be non-negative. Handle warnings
15762 about undefined signed overflow. */
15765 tree_expr_nonnegative_p (tree t
)
15767 bool ret
, strict_overflow_p
;
15769 strict_overflow_p
= false;
15770 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15771 if (strict_overflow_p
)
15772 fold_overflow_warning (("assuming signed overflow does not occur when "
15773 "determining that expression is always "
15775 WARN_STRICT_OVERFLOW_MISC
);
15780 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15781 For floating point we further ensure that T is not denormal.
15782 Similar logic is present in nonzero_address in rtlanal.h.
15784 If the return value is based on the assumption that signed overflow
15785 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15786 change *STRICT_OVERFLOW_P. */
15789 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15790 bool *strict_overflow_p
)
15795 return tree_expr_nonzero_warnv_p (op0
,
15796 strict_overflow_p
);
15800 tree inner_type
= TREE_TYPE (op0
);
15801 tree outer_type
= type
;
15803 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15804 && tree_expr_nonzero_warnv_p (op0
,
15805 strict_overflow_p
));
15809 case NON_LVALUE_EXPR
:
15810 return tree_expr_nonzero_warnv_p (op0
,
15811 strict_overflow_p
);
15820 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15821 For floating point we further ensure that T is not denormal.
15822 Similar logic is present in nonzero_address in rtlanal.h.
15824 If the return value is based on the assumption that signed overflow
15825 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15826 change *STRICT_OVERFLOW_P. */
15829 tree_binary_nonzero_warnv_p (enum tree_code code
,
15832 tree op1
, bool *strict_overflow_p
)
15834 bool sub_strict_overflow_p
;
15837 case POINTER_PLUS_EXPR
:
15839 if (TYPE_OVERFLOW_UNDEFINED (type
))
15841 /* With the presence of negative values it is hard
15842 to say something. */
15843 sub_strict_overflow_p
= false;
15844 if (!tree_expr_nonnegative_warnv_p (op0
,
15845 &sub_strict_overflow_p
)
15846 || !tree_expr_nonnegative_warnv_p (op1
,
15847 &sub_strict_overflow_p
))
15849 /* One of operands must be positive and the other non-negative. */
15850 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15851 overflows, on a twos-complement machine the sum of two
15852 nonnegative numbers can never be zero. */
15853 return (tree_expr_nonzero_warnv_p (op0
,
15855 || tree_expr_nonzero_warnv_p (op1
,
15856 strict_overflow_p
));
15861 if (TYPE_OVERFLOW_UNDEFINED (type
))
15863 if (tree_expr_nonzero_warnv_p (op0
,
15865 && tree_expr_nonzero_warnv_p (op1
,
15866 strict_overflow_p
))
15868 *strict_overflow_p
= true;
15875 sub_strict_overflow_p
= false;
15876 if (tree_expr_nonzero_warnv_p (op0
,
15877 &sub_strict_overflow_p
)
15878 && tree_expr_nonzero_warnv_p (op1
,
15879 &sub_strict_overflow_p
))
15881 if (sub_strict_overflow_p
)
15882 *strict_overflow_p
= true;
15887 sub_strict_overflow_p
= false;
15888 if (tree_expr_nonzero_warnv_p (op0
,
15889 &sub_strict_overflow_p
))
15891 if (sub_strict_overflow_p
)
15892 *strict_overflow_p
= true;
15894 /* When both operands are nonzero, then MAX must be too. */
15895 if (tree_expr_nonzero_warnv_p (op1
,
15896 strict_overflow_p
))
15899 /* MAX where operand 0 is positive is positive. */
15900 return tree_expr_nonnegative_warnv_p (op0
,
15901 strict_overflow_p
);
15903 /* MAX where operand 1 is positive is positive. */
15904 else if (tree_expr_nonzero_warnv_p (op1
,
15905 &sub_strict_overflow_p
)
15906 && tree_expr_nonnegative_warnv_p (op1
,
15907 &sub_strict_overflow_p
))
15909 if (sub_strict_overflow_p
)
15910 *strict_overflow_p
= true;
15916 return (tree_expr_nonzero_warnv_p (op1
,
15918 || tree_expr_nonzero_warnv_p (op0
,
15919 strict_overflow_p
));
15928 /* Return true when T is an address and is known to be nonzero.
15929 For floating point we further ensure that T is not denormal.
15930 Similar logic is present in nonzero_address in rtlanal.h.
15932 If the return value is based on the assumption that signed overflow
15933 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15934 change *STRICT_OVERFLOW_P. */
15937 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15939 bool sub_strict_overflow_p
;
15940 switch (TREE_CODE (t
))
15943 return !integer_zerop (t
);
15947 tree base
= TREE_OPERAND (t
, 0);
15948 if (!DECL_P (base
))
15949 base
= get_base_address (base
);
15954 /* Weak declarations may link to NULL. Other things may also be NULL
15955 so protect with -fdelete-null-pointer-checks; but not variables
15956 allocated on the stack. */
15958 && (flag_delete_null_pointer_checks
15959 || (DECL_CONTEXT (base
)
15960 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15961 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15962 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15964 /* Constants are never weak. */
15965 if (CONSTANT_CLASS_P (base
))
15972 sub_strict_overflow_p
= false;
15973 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15974 &sub_strict_overflow_p
)
15975 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15976 &sub_strict_overflow_p
))
15978 if (sub_strict_overflow_p
)
15979 *strict_overflow_p
= true;
15990 /* Return true when T is an address and is known to be nonzero.
15991 For floating point we further ensure that T is not denormal.
15992 Similar logic is present in nonzero_address in rtlanal.h.
15994 If the return value is based on the assumption that signed overflow
15995 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15996 change *STRICT_OVERFLOW_P. */
15999 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16001 tree type
= TREE_TYPE (t
);
16002 enum tree_code code
;
16004 /* Doing something useful for floating point would need more work. */
16005 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
16008 code
= TREE_CODE (t
);
16009 switch (TREE_CODE_CLASS (code
))
16012 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
16013 strict_overflow_p
);
16015 case tcc_comparison
:
16016 return tree_binary_nonzero_warnv_p (code
, type
,
16017 TREE_OPERAND (t
, 0),
16018 TREE_OPERAND (t
, 1),
16019 strict_overflow_p
);
16021 case tcc_declaration
:
16022 case tcc_reference
:
16023 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
16031 case TRUTH_NOT_EXPR
:
16032 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
16033 strict_overflow_p
);
16035 case TRUTH_AND_EXPR
:
16036 case TRUTH_OR_EXPR
:
16037 case TRUTH_XOR_EXPR
:
16038 return tree_binary_nonzero_warnv_p (code
, type
,
16039 TREE_OPERAND (t
, 0),
16040 TREE_OPERAND (t
, 1),
16041 strict_overflow_p
);
16048 case WITH_SIZE_EXPR
:
16050 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
16052 case COMPOUND_EXPR
:
16055 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16056 strict_overflow_p
);
16059 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
16060 strict_overflow_p
);
16063 return alloca_call_p (t
);
16071 /* Return true when T is an address and is known to be nonzero.
16072 Handle warnings about undefined signed overflow. */
16075 tree_expr_nonzero_p (tree t
)
16077 bool ret
, strict_overflow_p
;
16079 strict_overflow_p
= false;
16080 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
16081 if (strict_overflow_p
)
16082 fold_overflow_warning (("assuming signed overflow does not occur when "
16083 "determining that expression is always "
16085 WARN_STRICT_OVERFLOW_MISC
);
16089 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16090 attempt to fold the expression to a constant without modifying TYPE,
16093 If the expression could be simplified to a constant, then return
16094 the constant. If the expression would not be simplified to a
16095 constant, then return NULL_TREE. */
16098 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16100 tree tem
= fold_binary (code
, type
, op0
, op1
);
16101 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16104 /* Given the components of a unary expression CODE, TYPE and OP0,
16105 attempt to fold the expression to a constant without modifying
16108 If the expression could be simplified to a constant, then return
16109 the constant. If the expression would not be simplified to a
16110 constant, then return NULL_TREE. */
16113 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16115 tree tem
= fold_unary (code
, type
, op0
);
16116 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16119 /* If EXP represents referencing an element in a constant string
16120 (either via pointer arithmetic or array indexing), return the
16121 tree representing the value accessed, otherwise return NULL. */
16124 fold_read_from_constant_string (tree exp
)
16126 if ((TREE_CODE (exp
) == INDIRECT_REF
16127 || TREE_CODE (exp
) == ARRAY_REF
)
16128 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16130 tree exp1
= TREE_OPERAND (exp
, 0);
16133 location_t loc
= EXPR_LOCATION (exp
);
16135 if (TREE_CODE (exp
) == INDIRECT_REF
)
16136 string
= string_constant (exp1
, &index
);
16139 tree low_bound
= array_ref_low_bound (exp
);
16140 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16142 /* Optimize the special-case of a zero lower bound.
16144 We convert the low_bound to sizetype to avoid some problems
16145 with constant folding. (E.g. suppose the lower bound is 1,
16146 and its mode is QI. Without the conversion,l (ARRAY
16147 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16148 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16149 if (! integer_zerop (low_bound
))
16150 index
= size_diffop_loc (loc
, index
,
16151 fold_convert_loc (loc
, sizetype
, low_bound
));
16157 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16158 && TREE_CODE (string
) == STRING_CST
16159 && TREE_CODE (index
) == INTEGER_CST
16160 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16161 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16163 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16164 return build_int_cst_type (TREE_TYPE (exp
),
16165 (TREE_STRING_POINTER (string
)
16166 [TREE_INT_CST_LOW (index
)]));
16171 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16172 an integer constant, real, or fixed-point constant.
16174 TYPE is the type of the result. */
16177 fold_negate_const (tree arg0
, tree type
)
16179 tree t
= NULL_TREE
;
16181 switch (TREE_CODE (arg0
))
16185 double_int val
= tree_to_double_int (arg0
);
16187 val
= val
.neg_with_overflow (&overflow
);
16188 t
= force_fit_type_double (type
, val
, 1,
16189 (overflow
| TREE_OVERFLOW (arg0
))
16190 && !TYPE_UNSIGNED (type
));
16195 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16200 FIXED_VALUE_TYPE f
;
16201 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16202 &(TREE_FIXED_CST (arg0
)), NULL
,
16203 TYPE_SATURATING (type
));
16204 t
= build_fixed (type
, f
);
16205 /* Propagate overflow flags. */
16206 if (overflow_p
| TREE_OVERFLOW (arg0
))
16207 TREE_OVERFLOW (t
) = 1;
16212 gcc_unreachable ();
16218 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16219 an integer constant or real constant.
16221 TYPE is the type of the result. */
16224 fold_abs_const (tree arg0
, tree type
)
16226 tree t
= NULL_TREE
;
16228 switch (TREE_CODE (arg0
))
16232 double_int val
= tree_to_double_int (arg0
);
16234 /* If the value is unsigned or non-negative, then the absolute value
16235 is the same as the ordinary value. */
16236 if (TYPE_UNSIGNED (type
)
16237 || !val
.is_negative ())
16240 /* If the value is negative, then the absolute value is
16245 val
= val
.neg_with_overflow (&overflow
);
16246 t
= force_fit_type_double (type
, val
, -1,
16247 overflow
| TREE_OVERFLOW (arg0
));
16253 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16254 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16260 gcc_unreachable ();
16266 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16267 constant. TYPE is the type of the result. */
16270 fold_not_const (const_tree arg0
, tree type
)
16274 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16276 val
= ~tree_to_double_int (arg0
);
16277 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16280 /* Given CODE, a relational operator, the target type, TYPE and two
16281 constant operands OP0 and OP1, return the result of the
16282 relational operation. If the result is not a compile time
16283 constant, then return NULL_TREE. */
16286 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16288 int result
, invert
;
16290 /* From here on, the only cases we handle are when the result is
16291 known to be a constant. */
16293 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16295 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16296 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16298 /* Handle the cases where either operand is a NaN. */
16299 if (real_isnan (c0
) || real_isnan (c1
))
16309 case UNORDERED_EXPR
:
16323 if (flag_trapping_math
)
16329 gcc_unreachable ();
16332 return constant_boolean_node (result
, type
);
16335 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16338 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16340 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16341 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16342 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16345 /* Handle equality/inequality of complex constants. */
16346 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16348 tree rcond
= fold_relational_const (code
, type
,
16349 TREE_REALPART (op0
),
16350 TREE_REALPART (op1
));
16351 tree icond
= fold_relational_const (code
, type
,
16352 TREE_IMAGPART (op0
),
16353 TREE_IMAGPART (op1
));
16354 if (code
== EQ_EXPR
)
16355 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16356 else if (code
== NE_EXPR
)
16357 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16362 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16364 unsigned count
= VECTOR_CST_NELTS (op0
);
16365 tree
*elts
= XALLOCAVEC (tree
, count
);
16366 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16367 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16369 for (unsigned i
= 0; i
< count
; i
++)
16371 tree elem_type
= TREE_TYPE (type
);
16372 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16373 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16375 tree tem
= fold_relational_const (code
, elem_type
,
16378 if (tem
== NULL_TREE
)
16381 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16384 return build_vector (type
, elts
);
16387 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16389 To compute GT, swap the arguments and do LT.
16390 To compute GE, do LT and invert the result.
16391 To compute LE, swap the arguments, do LT and invert the result.
16392 To compute NE, do EQ and invert the result.
16394 Therefore, the code below must handle only EQ and LT. */
16396 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16401 code
= swap_tree_comparison (code
);
16404 /* Note that it is safe to invert for real values here because we
16405 have already handled the one case that it matters. */
16408 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16411 code
= invert_tree_comparison (code
, false);
16414 /* Compute a result for LT or EQ if args permit;
16415 Otherwise return T. */
16416 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16418 if (code
== EQ_EXPR
)
16419 result
= tree_int_cst_equal (op0
, op1
);
16420 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16421 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16423 result
= INT_CST_LT (op0
, op1
);
16430 return constant_boolean_node (result
, type
);
16433 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16434 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16438 fold_build_cleanup_point_expr (tree type
, tree expr
)
16440 /* If the expression does not have side effects then we don't have to wrap
16441 it with a cleanup point expression. */
16442 if (!TREE_SIDE_EFFECTS (expr
))
16445 /* If the expression is a return, check to see if the expression inside the
16446 return has no side effects or the right hand side of the modify expression
16447 inside the return. If either don't have side effects set we don't need to
16448 wrap the expression in a cleanup point expression. Note we don't check the
16449 left hand side of the modify because it should always be a return decl. */
16450 if (TREE_CODE (expr
) == RETURN_EXPR
)
16452 tree op
= TREE_OPERAND (expr
, 0);
16453 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16455 op
= TREE_OPERAND (op
, 1);
16456 if (!TREE_SIDE_EFFECTS (op
))
16460 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16463 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16464 of an indirection through OP0, or NULL_TREE if no simplification is
16468 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16474 subtype
= TREE_TYPE (sub
);
16475 if (!POINTER_TYPE_P (subtype
))
16478 if (TREE_CODE (sub
) == ADDR_EXPR
)
16480 tree op
= TREE_OPERAND (sub
, 0);
16481 tree optype
= TREE_TYPE (op
);
16482 /* *&CONST_DECL -> to the value of the const decl. */
16483 if (TREE_CODE (op
) == CONST_DECL
)
16484 return DECL_INITIAL (op
);
16485 /* *&p => p; make sure to handle *&"str"[cst] here. */
16486 if (type
== optype
)
16488 tree fop
= fold_read_from_constant_string (op
);
16494 /* *(foo *)&fooarray => fooarray[0] */
16495 else if (TREE_CODE (optype
) == ARRAY_TYPE
16496 && type
== TREE_TYPE (optype
)
16497 && (!in_gimple_form
16498 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16500 tree type_domain
= TYPE_DOMAIN (optype
);
16501 tree min_val
= size_zero_node
;
16502 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16503 min_val
= TYPE_MIN_VALUE (type_domain
);
16505 && TREE_CODE (min_val
) != INTEGER_CST
)
16507 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16508 NULL_TREE
, NULL_TREE
);
16510 /* *(foo *)&complexfoo => __real__ complexfoo */
16511 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16512 && type
== TREE_TYPE (optype
))
16513 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16514 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16515 else if (TREE_CODE (optype
) == VECTOR_TYPE
16516 && type
== TREE_TYPE (optype
))
16518 tree part_width
= TYPE_SIZE (type
);
16519 tree index
= bitsize_int (0);
16520 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16524 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16525 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16527 tree op00
= TREE_OPERAND (sub
, 0);
16528 tree op01
= TREE_OPERAND (sub
, 1);
16531 if (TREE_CODE (op00
) == ADDR_EXPR
)
16534 op00
= TREE_OPERAND (op00
, 0);
16535 op00type
= TREE_TYPE (op00
);
16537 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16538 if (TREE_CODE (op00type
) == VECTOR_TYPE
16539 && type
== TREE_TYPE (op00type
))
16541 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16542 tree part_width
= TYPE_SIZE (type
);
16543 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16544 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16545 tree index
= bitsize_int (indexi
);
16547 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16548 return fold_build3_loc (loc
,
16549 BIT_FIELD_REF
, type
, op00
,
16550 part_width
, index
);
16553 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16554 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16555 && type
== TREE_TYPE (op00type
))
16557 tree size
= TYPE_SIZE_UNIT (type
);
16558 if (tree_int_cst_equal (size
, op01
))
16559 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16561 /* ((foo *)&fooarray)[1] => fooarray[1] */
16562 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16563 && type
== TREE_TYPE (op00type
))
16565 tree type_domain
= TYPE_DOMAIN (op00type
);
16566 tree min_val
= size_zero_node
;
16567 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16568 min_val
= TYPE_MIN_VALUE (type_domain
);
16569 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16570 TYPE_SIZE_UNIT (type
));
16571 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16572 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16573 NULL_TREE
, NULL_TREE
);
16578 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16579 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16580 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16581 && (!in_gimple_form
16582 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16585 tree min_val
= size_zero_node
;
16586 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16587 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16588 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16589 min_val
= TYPE_MIN_VALUE (type_domain
);
16591 && TREE_CODE (min_val
) != INTEGER_CST
)
16593 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16600 /* Builds an expression for an indirection through T, simplifying some
16604 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16606 tree type
= TREE_TYPE (TREE_TYPE (t
));
16607 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16612 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16615 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16618 fold_indirect_ref_loc (location_t loc
, tree t
)
16620 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16628 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16629 whose result is ignored. The type of the returned tree need not be
16630 the same as the original expression. */
16633 fold_ignored_result (tree t
)
16635 if (!TREE_SIDE_EFFECTS (t
))
16636 return integer_zero_node
;
16639 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16642 t
= TREE_OPERAND (t
, 0);
16646 case tcc_comparison
:
16647 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16648 t
= TREE_OPERAND (t
, 0);
16649 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16650 t
= TREE_OPERAND (t
, 1);
16655 case tcc_expression
:
16656 switch (TREE_CODE (t
))
16658 case COMPOUND_EXPR
:
16659 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16661 t
= TREE_OPERAND (t
, 0);
16665 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16666 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16668 t
= TREE_OPERAND (t
, 0);
16681 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16682 This can only be applied to objects of a sizetype. */
16685 round_up_loc (location_t loc
, tree value
, int divisor
)
16687 tree div
= NULL_TREE
;
16689 gcc_assert (divisor
> 0);
16693 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16694 have to do anything. Only do this when we are not given a const,
16695 because in that case, this check is more expensive than just
16697 if (TREE_CODE (value
) != INTEGER_CST
)
16699 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16701 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16705 /* If divisor is a power of two, simplify this to bit manipulation. */
16706 if (divisor
== (divisor
& -divisor
))
16708 if (TREE_CODE (value
) == INTEGER_CST
)
16710 double_int val
= tree_to_double_int (value
);
16713 if ((val
.low
& (divisor
- 1)) == 0)
16716 overflow_p
= TREE_OVERFLOW (value
);
16717 val
.low
&= ~(divisor
- 1);
16718 val
.low
+= divisor
;
16726 return force_fit_type_double (TREE_TYPE (value
), val
,
16733 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16734 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16735 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16736 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16742 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16743 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16744 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16750 /* Likewise, but round down. */
16753 round_down_loc (location_t loc
, tree value
, int divisor
)
16755 tree div
= NULL_TREE
;
16757 gcc_assert (divisor
> 0);
16761 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16762 have to do anything. Only do this when we are not given a const,
16763 because in that case, this check is more expensive than just
16765 if (TREE_CODE (value
) != INTEGER_CST
)
16767 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16769 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16773 /* If divisor is a power of two, simplify this to bit manipulation. */
16774 if (divisor
== (divisor
& -divisor
))
16778 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16779 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16784 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16785 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16786 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16792 /* Returns the pointer to the base of the object addressed by EXP and
16793 extracts the information about the offset of the access, storing it
16794 to PBITPOS and POFFSET. */
16797 split_address_to_core_and_offset (tree exp
,
16798 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16801 enum machine_mode mode
;
16802 int unsignedp
, volatilep
;
16803 HOST_WIDE_INT bitsize
;
16804 location_t loc
= EXPR_LOCATION (exp
);
16806 if (TREE_CODE (exp
) == ADDR_EXPR
)
16808 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16809 poffset
, &mode
, &unsignedp
, &volatilep
,
16811 core
= build_fold_addr_expr_loc (loc
, core
);
16817 *poffset
= NULL_TREE
;
16823 /* Returns true if addresses of E1 and E2 differ by a constant, false
16824 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16827 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16830 HOST_WIDE_INT bitpos1
, bitpos2
;
16831 tree toffset1
, toffset2
, tdiff
, type
;
16833 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16834 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16836 if (bitpos1
% BITS_PER_UNIT
!= 0
16837 || bitpos2
% BITS_PER_UNIT
!= 0
16838 || !operand_equal_p (core1
, core2
, 0))
16841 if (toffset1
&& toffset2
)
16843 type
= TREE_TYPE (toffset1
);
16844 if (type
!= TREE_TYPE (toffset2
))
16845 toffset2
= fold_convert (type
, toffset2
);
16847 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16848 if (!cst_and_fits_in_hwi (tdiff
))
16851 *diff
= int_cst_value (tdiff
);
16853 else if (toffset1
|| toffset2
)
16855 /* If only one of the offsets is non-constant, the difference cannot
16862 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16866 /* Simplify the floating point expression EXP when the sign of the
16867 result is not significant. Return NULL_TREE if no simplification
16871 fold_strip_sign_ops (tree exp
)
16874 location_t loc
= EXPR_LOCATION (exp
);
16876 switch (TREE_CODE (exp
))
16880 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16881 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16885 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16887 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16888 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16889 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16890 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16891 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16892 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16895 case COMPOUND_EXPR
:
16896 arg0
= TREE_OPERAND (exp
, 0);
16897 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16899 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16903 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16904 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16906 return fold_build3_loc (loc
,
16907 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16908 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16909 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16914 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16917 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16918 /* Strip copysign function call, return the 1st argument. */
16919 arg0
= CALL_EXPR_ARG (exp
, 0);
16920 arg1
= CALL_EXPR_ARG (exp
, 1);
16921 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16924 /* Strip sign ops from the argument of "odd" math functions. */
16925 if (negate_mathfn_p (fcode
))
16927 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16929 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);