1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "diagnostic-core.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
65 int folding_initializer
= 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code
{
89 static bool negate_mathfn_p (enum built_in_function
);
90 static bool negate_expr_p (tree
);
91 static tree
negate_expr (tree
);
92 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
93 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
94 static tree
const_binop (enum tree_code
, tree
, tree
);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
101 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (location_t
, tree
, tree
,
103 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
104 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
106 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (const_tree
, int);
111 static tree
sign_bit_p (tree
, const_tree
);
112 static int simple_operand_p (const_tree
);
113 static bool simple_operand_p_2 (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
range_predecessor (tree
);
116 static tree
range_successor (tree
);
117 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
118 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
119 static tree
unextend (tree
, int, int, tree
);
120 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
122 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
123 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
124 static tree
fold_binary_op_with_conditional_arg (location_t
,
125 enum tree_code
, tree
,
128 static tree
fold_mathfn_compare (location_t
,
129 enum built_in_function
, enum tree_code
,
131 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
132 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
133 static bool reorder_operands_p (const_tree
, const_tree
);
134 static tree
fold_negate_const (tree
, tree
);
135 static tree
fold_not_const (const_tree
, tree
);
136 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
143 expr_location_or (tree t
, location_t loc
)
145 location_t tloc
= EXPR_LOCATION (t
);
146 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
153 protected_set_expr_location_unshare (tree x
, location_t loc
)
155 if (CAN_HAVE_LOCATION_P (x
)
156 && EXPR_LOCATION (x
) != loc
157 && !(TREE_CODE (x
) == SAVE_EXPR
158 || TREE_CODE (x
) == TARGET_EXPR
159 || TREE_CODE (x
) == BIND_EXPR
))
162 SET_EXPR_LOCATION (x
, loc
);
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
172 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
182 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
186 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
200 static int fold_deferring_overflow_warnings
;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning
;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings
;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
233 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
238 gcc_assert (fold_deferring_overflow_warnings
> 0);
239 --fold_deferring_overflow_warnings
;
240 if (fold_deferring_overflow_warnings
> 0)
242 if (fold_deferred_overflow_warning
!= NULL
244 && code
< (int) fold_deferred_overflow_code
)
245 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
249 warnmsg
= fold_deferred_overflow_warning
;
250 fold_deferred_overflow_warning
= NULL
;
252 if (!issue
|| warnmsg
== NULL
)
255 if (gimple_no_warning_p (stmt
))
258 /* Use the smallest code level when deciding to issue the
260 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
261 code
= fold_deferred_overflow_code
;
263 if (!issue_strict_overflow_warning (code
))
267 locus
= input_location
;
269 locus
= gimple_location (stmt
);
270 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
273 /* Stop deferring overflow warnings, ignoring any deferred
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL
, 0);
282 /* Whether we are deferring overflow warnings. */
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings
> 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
294 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
296 if (fold_deferring_overflow_warnings
> 0)
298 if (fold_deferred_overflow_warning
== NULL
299 || wc
< fold_deferred_overflow_code
)
301 fold_deferred_overflow_warning
= gmsgid
;
302 fold_deferred_overflow_code
= wc
;
305 else if (issue_strict_overflow_warning (wc
))
306 warning (OPT_Wstrict_overflow
, gmsgid
);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
313 negate_mathfn_p (enum built_in_function code
)
317 CASE_FLT_FN (BUILT_IN_ASIN
):
318 CASE_FLT_FN (BUILT_IN_ASINH
):
319 CASE_FLT_FN (BUILT_IN_ATAN
):
320 CASE_FLT_FN (BUILT_IN_ATANH
):
321 CASE_FLT_FN (BUILT_IN_CASIN
):
322 CASE_FLT_FN (BUILT_IN_CASINH
):
323 CASE_FLT_FN (BUILT_IN_CATAN
):
324 CASE_FLT_FN (BUILT_IN_CATANH
):
325 CASE_FLT_FN (BUILT_IN_CBRT
):
326 CASE_FLT_FN (BUILT_IN_CPROJ
):
327 CASE_FLT_FN (BUILT_IN_CSIN
):
328 CASE_FLT_FN (BUILT_IN_CSINH
):
329 CASE_FLT_FN (BUILT_IN_CTAN
):
330 CASE_FLT_FN (BUILT_IN_CTANH
):
331 CASE_FLT_FN (BUILT_IN_ERF
):
332 CASE_FLT_FN (BUILT_IN_LLROUND
):
333 CASE_FLT_FN (BUILT_IN_LROUND
):
334 CASE_FLT_FN (BUILT_IN_ROUND
):
335 CASE_FLT_FN (BUILT_IN_SIN
):
336 CASE_FLT_FN (BUILT_IN_SINH
):
337 CASE_FLT_FN (BUILT_IN_TAN
):
338 CASE_FLT_FN (BUILT_IN_TANH
):
339 CASE_FLT_FN (BUILT_IN_TRUNC
):
342 CASE_FLT_FN (BUILT_IN_LLRINT
):
343 CASE_FLT_FN (BUILT_IN_LRINT
):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
345 CASE_FLT_FN (BUILT_IN_RINT
):
346 return !flag_rounding_math
;
354 /* Check whether we may negate an integer constant T without causing
358 may_negate_without_overflow_p (const_tree t
)
360 unsigned HOST_WIDE_INT val
;
364 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
366 type
= TREE_TYPE (t
);
367 if (TYPE_UNSIGNED (type
))
370 prec
= TYPE_PRECISION (type
);
371 if (prec
> HOST_BITS_PER_WIDE_INT
)
373 if (TREE_INT_CST_LOW (t
) != 0)
375 prec
-= HOST_BITS_PER_WIDE_INT
;
376 val
= TREE_INT_CST_HIGH (t
);
379 val
= TREE_INT_CST_LOW (t
);
380 if (prec
< HOST_BITS_PER_WIDE_INT
)
381 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
382 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t
)
396 type
= TREE_TYPE (t
);
399 switch (TREE_CODE (t
))
402 if (TYPE_OVERFLOW_WRAPS (type
))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t
);
408 return (INTEGRAL_TYPE_P (type
)
409 && TYPE_OVERFLOW_WRAPS (type
));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
421 return negate_expr_p (TREE_REALPART (t
))
422 && negate_expr_p (TREE_IMAGPART (t
));
425 return negate_expr_p (TREE_OPERAND (t
, 0))
426 && negate_expr_p (TREE_OPERAND (t
, 1));
429 return negate_expr_p (TREE_OPERAND (t
, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t
, 1))
437 && reorder_operands_p (TREE_OPERAND (t
, 0),
438 TREE_OPERAND (t
, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t
, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
447 && reorder_operands_p (TREE_OPERAND (t
, 0),
448 TREE_OPERAND (t
, 1));
451 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
458 return negate_expr_p (TREE_OPERAND (t
, 1))
459 || negate_expr_p (TREE_OPERAND (t
, 0));
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type
) == REAL_TYPE
)
482 tree tem
= strip_float_extensions (t
);
484 return negate_expr_p (tem
);
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t
)))
491 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
498 tree op1
= TREE_OPERAND (t
, 1);
499 if (TREE_INT_CST_HIGH (op1
) == 0
500 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
501 == TREE_INT_CST_LOW (op1
))
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
518 fold_negate_expr (location_t loc
, tree t
)
520 tree type
= TREE_TYPE (t
);
523 switch (TREE_CODE (t
))
525 /* Convert - (~A) to A + 1. */
527 if (INTEGRAL_TYPE_P (type
))
528 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
529 build_int_cst (type
, 1));
533 tem
= fold_negate_const (t
, type
);
534 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
535 || !TYPE_OVERFLOW_TRAPS (type
))
540 tem
= fold_negate_const (t
, type
);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
547 tem
= fold_negate_const (t
, type
);
552 tree rpart
= negate_expr (TREE_REALPART (t
));
553 tree ipart
= negate_expr (TREE_IMAGPART (t
));
555 if ((TREE_CODE (rpart
) == REAL_CST
556 && TREE_CODE (ipart
) == REAL_CST
)
557 || (TREE_CODE (rpart
) == INTEGER_CST
558 && TREE_CODE (ipart
) == INTEGER_CST
))
559 return build_complex (type
, rpart
, ipart
);
564 if (negate_expr_p (t
))
565 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
566 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
567 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
571 if (negate_expr_p (t
))
572 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
573 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
577 return TREE_OPERAND (t
, 0);
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t
, 1))
585 && reorder_operands_p (TREE_OPERAND (t
, 0),
586 TREE_OPERAND (t
, 1)))
588 tem
= negate_expr (TREE_OPERAND (t
, 1));
589 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
590 tem
, TREE_OPERAND (t
, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t
, 0)))
596 tem
= negate_expr (TREE_OPERAND (t
, 0));
597 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
598 tem
, TREE_OPERAND (t
, 1));
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
607 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
608 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
609 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
613 if (TYPE_UNSIGNED (type
))
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
621 tem
= TREE_OPERAND (t
, 1);
622 if (negate_expr_p (tem
))
623 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
624 TREE_OPERAND (t
, 0), negate_expr (tem
));
625 tem
= TREE_OPERAND (t
, 0);
626 if (negate_expr_p (tem
))
627 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
628 negate_expr (tem
), TREE_OPERAND (t
, 1));
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
642 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
644 const char * const warnmsg
= G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem
= TREE_OPERAND (t
, 1);
647 if (negate_expr_p (tem
))
649 if (INTEGRAL_TYPE_P (type
)
650 && (TREE_CODE (tem
) != INTEGER_CST
651 || integer_onep (tem
)))
652 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
653 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
654 TREE_OPERAND (t
, 0), negate_expr (tem
));
656 tem
= TREE_OPERAND (t
, 0);
657 if (negate_expr_p (tem
))
659 if (INTEGRAL_TYPE_P (type
)
660 && (TREE_CODE (tem
) != INTEGER_CST
661 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
662 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
663 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
664 negate_expr (tem
), TREE_OPERAND (t
, 1));
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type
) == REAL_TYPE
)
673 tem
= strip_float_extensions (t
);
674 if (tem
!= t
&& negate_expr_p (tem
))
675 return fold_convert_loc (loc
, type
, negate_expr (tem
));
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t
))
682 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
686 fndecl
= get_callee_fndecl (t
);
687 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
688 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
696 tree op1
= TREE_OPERAND (t
, 1);
697 if (TREE_INT_CST_HIGH (op1
) == 0
698 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
699 == TREE_INT_CST_LOW (op1
))
701 tree ntype
= TYPE_UNSIGNED (type
)
702 ? signed_type_for (type
)
703 : unsigned_type_for (type
);
704 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
705 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
706 return fold_convert_loc (loc
, type
, temp
);
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 loc
= EXPR_LOCATION (t
);
732 type
= TREE_TYPE (t
);
735 tem
= fold_negate_expr (loc
, t
);
737 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
738 return fold_convert_loc (loc
, type
, tem
);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
762 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
763 tree
*minus_litp
, int negate_p
)
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in
);
774 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
775 || TREE_CODE (in
) == FIXED_CST
)
777 else if (TREE_CODE (in
) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
785 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
787 tree op0
= TREE_OPERAND (in
, 0);
788 tree op1
= TREE_OPERAND (in
, 1);
789 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
790 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
794 || TREE_CODE (op0
) == FIXED_CST
)
795 *litp
= op0
, op0
= 0;
796 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
797 || TREE_CODE (op1
) == FIXED_CST
)
798 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
800 if (op0
!= 0 && TREE_CONSTANT (op0
))
801 *conp
= op0
, op0
= 0;
802 else if (op1
!= 0 && TREE_CONSTANT (op1
))
803 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0
!= 0 && op1
!= 0)
812 var
= op1
, neg_var_p
= neg1_p
;
814 /* Now do any needed negations. */
816 *minus_litp
= *litp
, *litp
= 0;
818 *conp
= negate_expr (*conp
);
820 var
= negate_expr (var
);
822 else if (TREE_CODE (in
) == BIT_NOT_EXPR
823 && code
== PLUS_EXPR
)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp
= build_one_cst (TREE_TYPE (in
));
827 var
= negate_expr (TREE_OPERAND (in
, 0));
829 else if (TREE_CONSTANT (in
))
837 *minus_litp
= *litp
, *litp
= 0;
838 else if (*minus_litp
)
839 *litp
= *minus_litp
, *minus_litp
= 0;
840 *conp
= negate_expr (*conp
);
841 var
= negate_expr (var
);
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
853 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
864 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
866 if (code
== PLUS_EXPR
)
868 if (TREE_CODE (t1
) == NEGATE_EXPR
)
869 return build2_loc (loc
, MINUS_EXPR
, type
,
870 fold_convert_loc (loc
, type
, t2
),
871 fold_convert_loc (loc
, type
,
872 TREE_OPERAND (t1
, 0)));
873 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
874 return build2_loc (loc
, MINUS_EXPR
, type
,
875 fold_convert_loc (loc
, type
, t1
),
876 fold_convert_loc (loc
, type
,
877 TREE_OPERAND (t2
, 0)));
878 else if (integer_zerop (t2
))
879 return fold_convert_loc (loc
, type
, t1
);
881 else if (code
== MINUS_EXPR
)
883 if (integer_zerop (t2
))
884 return fold_convert_loc (loc
, type
, t1
);
887 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
888 fold_convert_loc (loc
, type
, t2
));
891 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
892 fold_convert_loc (loc
, type
, t2
));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
899 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
901 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
903 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
918 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
919 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
920 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
929 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
932 double_int op1
, op2
, res
, tmp
;
934 tree type
= TREE_TYPE (arg1
);
935 bool uns
= TYPE_UNSIGNED (type
);
936 bool overflow
= false;
938 op1
= tree_to_double_int (arg1
);
939 op2
= tree_to_double_int (arg2
);
956 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
967 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
971 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
975 res
= op1
.add_with_sign (op2
, false, &overflow
);
979 res
= op1
.sub_with_overflow (op2
, &overflow
);
983 res
= op1
.mul_with_sign (op2
, false, &overflow
);
986 case MULT_HIGHPART_EXPR
:
987 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
990 if (TYPE_PRECISION (type
) != 2 * HOST_BITS_PER_WIDE_INT
)
992 op1
.wide_mul_with_sign (op2
, uns
, &res
, &dummy_overflow
);
997 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
998 is performed in twice the precision of arguments. */
999 tmp
= op1
.mul_with_sign (op2
, false, &dummy_overflow
);
1000 res
= tmp
.rshift (TYPE_PRECISION (type
),
1001 2 * TYPE_PRECISION (type
), !uns
);
1005 case TRUNC_DIV_EXPR
:
1006 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1007 case EXACT_DIV_EXPR
:
1008 /* This is a shortcut for a common special case. */
1009 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1010 && !TREE_OVERFLOW (arg1
)
1011 && !TREE_OVERFLOW (arg2
)
1012 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1014 if (code
== CEIL_DIV_EXPR
)
1015 op1
.low
+= op2
.low
- 1;
1017 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1021 /* ... fall through ... */
1023 case ROUND_DIV_EXPR
:
1031 if (op1
== op2
&& !op1
.is_zero ())
1033 res
= double_int_one
;
1036 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1039 case TRUNC_MOD_EXPR
:
1040 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1041 /* This is a shortcut for a common special case. */
1042 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1043 && !TREE_OVERFLOW (arg1
)
1044 && !TREE_OVERFLOW (arg2
)
1045 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1047 if (code
== CEIL_MOD_EXPR
)
1048 op1
.low
+= op2
.low
- 1;
1049 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1053 /* ... fall through ... */
1055 case ROUND_MOD_EXPR
:
1058 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1062 res
= op1
.min (op2
, uns
);
1066 res
= op1
.max (op2
, uns
);
1073 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1075 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1081 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1083 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1086 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1087 constant. We assume ARG1 and ARG2 have the same data type, or at least
1088 are the same kind of constant and the same machine mode. Return zero if
1089 combining the constants is not allowed in the current operating mode. */
1092 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1094 /* Sanity check for the recursive cases. */
1101 if (TREE_CODE (arg1
) == INTEGER_CST
)
1102 return int_const_binop (code
, arg1
, arg2
);
1104 if (TREE_CODE (arg1
) == REAL_CST
)
1106 enum machine_mode mode
;
1109 REAL_VALUE_TYPE value
;
1110 REAL_VALUE_TYPE result
;
1114 /* The following codes are handled by real_arithmetic. */
1129 d1
= TREE_REAL_CST (arg1
);
1130 d2
= TREE_REAL_CST (arg2
);
1132 type
= TREE_TYPE (arg1
);
1133 mode
= TYPE_MODE (type
);
1135 /* Don't perform operation if we honor signaling NaNs and
1136 either operand is a NaN. */
1137 if (HONOR_SNANS (mode
)
1138 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1141 /* Don't perform operation if it would raise a division
1142 by zero exception. */
1143 if (code
== RDIV_EXPR
1144 && REAL_VALUES_EQUAL (d2
, dconst0
)
1145 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1148 /* If either operand is a NaN, just return it. Otherwise, set up
1149 for floating-point trap; we return an overflow. */
1150 if (REAL_VALUE_ISNAN (d1
))
1152 else if (REAL_VALUE_ISNAN (d2
))
1155 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1156 real_convert (&result
, mode
, &value
);
1158 /* Don't constant fold this floating point operation if
1159 the result has overflowed and flag_trapping_math. */
1160 if (flag_trapping_math
1161 && MODE_HAS_INFINITIES (mode
)
1162 && REAL_VALUE_ISINF (result
)
1163 && !REAL_VALUE_ISINF (d1
)
1164 && !REAL_VALUE_ISINF (d2
))
1167 /* Don't constant fold this floating point operation if the
1168 result may dependent upon the run-time rounding mode and
1169 flag_rounding_math is set, or if GCC's software emulation
1170 is unable to accurately represent the result. */
1171 if ((flag_rounding_math
1172 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1173 && (inexact
|| !real_identical (&result
, &value
)))
1176 t
= build_real (type
, result
);
1178 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1182 if (TREE_CODE (arg1
) == FIXED_CST
)
1184 FIXED_VALUE_TYPE f1
;
1185 FIXED_VALUE_TYPE f2
;
1186 FIXED_VALUE_TYPE result
;
1191 /* The following codes are handled by fixed_arithmetic. */
1197 case TRUNC_DIV_EXPR
:
1198 f2
= TREE_FIXED_CST (arg2
);
1203 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1204 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1212 f1
= TREE_FIXED_CST (arg1
);
1213 type
= TREE_TYPE (arg1
);
1214 sat_p
= TYPE_SATURATING (type
);
1215 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1216 t
= build_fixed (type
, result
);
1217 /* Propagate overflow flags. */
1218 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1219 TREE_OVERFLOW (t
) = 1;
1223 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1225 tree type
= TREE_TYPE (arg1
);
1226 tree r1
= TREE_REALPART (arg1
);
1227 tree i1
= TREE_IMAGPART (arg1
);
1228 tree r2
= TREE_REALPART (arg2
);
1229 tree i2
= TREE_IMAGPART (arg2
);
1236 real
= const_binop (code
, r1
, r2
);
1237 imag
= const_binop (code
, i1
, i2
);
1241 if (COMPLEX_FLOAT_TYPE_P (type
))
1242 return do_mpc_arg2 (arg1
, arg2
, type
,
1243 /* do_nonfinite= */ folding_initializer
,
1246 real
= const_binop (MINUS_EXPR
,
1247 const_binop (MULT_EXPR
, r1
, r2
),
1248 const_binop (MULT_EXPR
, i1
, i2
));
1249 imag
= const_binop (PLUS_EXPR
,
1250 const_binop (MULT_EXPR
, r1
, i2
),
1251 const_binop (MULT_EXPR
, i1
, r2
));
1255 if (COMPLEX_FLOAT_TYPE_P (type
))
1256 return do_mpc_arg2 (arg1
, arg2
, type
,
1257 /* do_nonfinite= */ folding_initializer
,
1260 case TRUNC_DIV_EXPR
:
1262 case FLOOR_DIV_EXPR
:
1263 case ROUND_DIV_EXPR
:
1264 if (flag_complex_method
== 0)
1266 /* Keep this algorithm in sync with
1267 tree-complex.c:expand_complex_div_straight().
1269 Expand complex division to scalars, straightforward algorithm.
1270 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1274 = const_binop (PLUS_EXPR
,
1275 const_binop (MULT_EXPR
, r2
, r2
),
1276 const_binop (MULT_EXPR
, i2
, i2
));
1278 = const_binop (PLUS_EXPR
,
1279 const_binop (MULT_EXPR
, r1
, r2
),
1280 const_binop (MULT_EXPR
, i1
, i2
));
1282 = const_binop (MINUS_EXPR
,
1283 const_binop (MULT_EXPR
, i1
, r2
),
1284 const_binop (MULT_EXPR
, r1
, i2
));
1286 real
= const_binop (code
, t1
, magsquared
);
1287 imag
= const_binop (code
, t2
, magsquared
);
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_wide().
1294 Expand complex division to scalars, modified algorithm to minimize
1295 overflow with wide input ranges. */
1296 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1297 fold_abs_const (r2
, TREE_TYPE (type
)),
1298 fold_abs_const (i2
, TREE_TYPE (type
)));
1300 if (integer_nonzerop (compare
))
1302 /* In the TRUE branch, we compute
1304 div = (br * ratio) + bi;
1305 tr = (ar * ratio) + ai;
1306 ti = (ai * ratio) - ar;
1309 tree ratio
= const_binop (code
, r2
, i2
);
1310 tree div
= const_binop (PLUS_EXPR
, i2
,
1311 const_binop (MULT_EXPR
, r2
, ratio
));
1312 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1313 real
= const_binop (PLUS_EXPR
, real
, i1
);
1314 real
= const_binop (code
, real
, div
);
1316 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1317 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1318 imag
= const_binop (code
, imag
, div
);
1322 /* In the FALSE branch, we compute
1324 divisor = (d * ratio) + c;
1325 tr = (b * ratio) + a;
1326 ti = b - (a * ratio);
1329 tree ratio
= const_binop (code
, i2
, r2
);
1330 tree div
= const_binop (PLUS_EXPR
, r2
,
1331 const_binop (MULT_EXPR
, i2
, ratio
));
1333 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1334 real
= const_binop (PLUS_EXPR
, real
, r1
);
1335 real
= const_binop (code
, real
, div
);
1337 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1338 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1339 imag
= const_binop (code
, imag
, div
);
1349 return build_complex (type
, real
, imag
);
1352 if (TREE_CODE (arg1
) == VECTOR_CST
1353 && TREE_CODE (arg2
) == VECTOR_CST
)
1355 tree type
= TREE_TYPE (arg1
);
1356 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1357 tree
*elts
= XALLOCAVEC (tree
, count
);
1359 for (i
= 0; i
< count
; i
++)
1361 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1362 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1364 elts
[i
] = const_binop (code
, elem1
, elem2
);
1366 /* It is possible that const_binop cannot handle the given
1367 code and return NULL_TREE */
1368 if (elts
[i
] == NULL_TREE
)
1372 return build_vector (type
, elts
);
1375 /* Shifts allow a scalar offset for a vector. */
1376 if (TREE_CODE (arg1
) == VECTOR_CST
1377 && TREE_CODE (arg2
) == INTEGER_CST
)
1379 tree type
= TREE_TYPE (arg1
);
1380 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1381 tree
*elts
= XALLOCAVEC (tree
, count
);
1383 if (code
== VEC_LSHIFT_EXPR
1384 || code
== VEC_RSHIFT_EXPR
)
1386 if (!host_integerp (arg2
, 1))
1389 unsigned HOST_WIDE_INT shiftc
= tree_low_cst (arg2
, 1);
1390 unsigned HOST_WIDE_INT outerc
= tree_low_cst (TYPE_SIZE (type
), 1);
1391 unsigned HOST_WIDE_INT innerc
1392 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type
)), 1);
1393 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1395 int offset
= shiftc
/ innerc
;
1396 if (code
== VEC_LSHIFT_EXPR
)
1398 tree zero
= build_zero_cst (TREE_TYPE (type
));
1399 for (i
= 0; i
< count
; i
++)
1401 if (i
+ offset
< 0 || i
+ offset
>= count
)
1404 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1408 for (i
= 0; i
< count
; i
++)
1410 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1412 elts
[i
] = const_binop (code
, elem1
, arg2
);
1414 /* It is possible that const_binop cannot handle the given
1415 code and return NULL_TREE */
1416 if (elts
[i
] == NULL_TREE
)
1420 return build_vector (type
, elts
);
1425 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1426 indicates which particular sizetype to create. */
1429 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1431 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1434 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1435 is a tree code. The type of the result is taken from the operands.
1436 Both must be equivalent integer types, ala int_binop_types_match_p.
1437 If the operands are constant, so is the result. */
1440 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1442 tree type
= TREE_TYPE (arg0
);
1444 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1445 return error_mark_node
;
1447 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1450 /* Handle the special case of two integer constants faster. */
1451 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1453 /* And some specific cases even faster than that. */
1454 if (code
== PLUS_EXPR
)
1456 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1458 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1461 else if (code
== MINUS_EXPR
)
1463 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1466 else if (code
== MULT_EXPR
)
1468 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1472 /* Handle general case of two integer constants. For sizetype
1473 constant calculations we always want to know about overflow,
1474 even in the unsigned case. */
1475 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1478 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1481 /* Given two values, either both of sizetype or both of bitsizetype,
1482 compute the difference between the two values. Return the value
1483 in signed type corresponding to the type of the operands. */
1486 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1488 tree type
= TREE_TYPE (arg0
);
1491 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1494 /* If the type is already signed, just do the simple thing. */
1495 if (!TYPE_UNSIGNED (type
))
1496 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1498 if (type
== sizetype
)
1500 else if (type
== bitsizetype
)
1501 ctype
= sbitsizetype
;
1503 ctype
= signed_type_for (type
);
1505 /* If either operand is not a constant, do the conversions to the signed
1506 type and subtract. The hardware will do the right thing with any
1507 overflow in the subtraction. */
1508 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1509 return size_binop_loc (loc
, MINUS_EXPR
,
1510 fold_convert_loc (loc
, ctype
, arg0
),
1511 fold_convert_loc (loc
, ctype
, arg1
));
1513 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1514 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1515 overflow) and negate (which can't either). Special-case a result
1516 of zero while we're here. */
1517 if (tree_int_cst_equal (arg0
, arg1
))
1518 return build_int_cst (ctype
, 0);
1519 else if (tree_int_cst_lt (arg1
, arg0
))
1520 return fold_convert_loc (loc
, ctype
,
1521 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1523 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1524 fold_convert_loc (loc
, ctype
,
1525 size_binop_loc (loc
,
1530 /* A subroutine of fold_convert_const handling conversions of an
1531 INTEGER_CST to another integer type. */
1534 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1538 /* Given an integer constant, make new constant with new type,
1539 appropriately sign-extended or truncated. */
1540 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1541 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1542 (TREE_INT_CST_HIGH (arg1
) < 0
1543 && (TYPE_UNSIGNED (type
)
1544 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1545 | TREE_OVERFLOW (arg1
));
1550 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1551 to an integer type. */
1554 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1559 /* The following code implements the floating point to integer
1560 conversion rules required by the Java Language Specification,
1561 that IEEE NaNs are mapped to zero and values that overflow
1562 the target precision saturate, i.e. values greater than
1563 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1564 are mapped to INT_MIN. These semantics are allowed by the
1565 C and C++ standards that simply state that the behavior of
1566 FP-to-integer conversion is unspecified upon overflow. */
1570 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1574 case FIX_TRUNC_EXPR
:
1575 real_trunc (&r
, VOIDmode
, &x
);
1582 /* If R is NaN, return zero and show we have an overflow. */
1583 if (REAL_VALUE_ISNAN (r
))
1586 val
= double_int_zero
;
1589 /* See if R is less than the lower bound or greater than the
1594 tree lt
= TYPE_MIN_VALUE (type
);
1595 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1596 if (REAL_VALUES_LESS (r
, l
))
1599 val
= tree_to_double_int (lt
);
1605 tree ut
= TYPE_MAX_VALUE (type
);
1608 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1609 if (REAL_VALUES_LESS (u
, r
))
1612 val
= tree_to_double_int (ut
);
1618 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1620 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1624 /* A subroutine of fold_convert_const handling conversions of a
1625 FIXED_CST to an integer type. */
1628 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1631 double_int temp
, temp_trunc
;
1634 /* Right shift FIXED_CST to temp by fbit. */
1635 temp
= TREE_FIXED_CST (arg1
).data
;
1636 mode
= TREE_FIXED_CST (arg1
).mode
;
1637 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1639 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1640 HOST_BITS_PER_DOUBLE_INT
,
1641 SIGNED_FIXED_POINT_MODE_P (mode
));
1643 /* Left shift temp to temp_trunc by fbit. */
1644 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1645 HOST_BITS_PER_DOUBLE_INT
,
1646 SIGNED_FIXED_POINT_MODE_P (mode
));
1650 temp
= double_int_zero
;
1651 temp_trunc
= double_int_zero
;
1654 /* If FIXED_CST is negative, we need to round the value toward 0.
1655 By checking if the fractional bits are not zero to add 1 to temp. */
1656 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1657 && temp_trunc
.is_negative ()
1658 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1659 temp
+= double_int_one
;
1661 /* Given a fixed-point constant, make new constant with new type,
1662 appropriately sign-extended or truncated. */
1663 t
= force_fit_type_double (type
, temp
, -1,
1664 (temp
.is_negative ()
1665 && (TYPE_UNSIGNED (type
)
1666 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1667 | TREE_OVERFLOW (arg1
));
1672 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1673 to another floating point type. */
1676 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1678 REAL_VALUE_TYPE value
;
1681 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1682 t
= build_real (type
, value
);
1684 /* If converting an infinity or NAN to a representation that doesn't
1685 have one, set the overflow bit so that we can produce some kind of
1686 error message at the appropriate point if necessary. It's not the
1687 most user-friendly message, but it's better than nothing. */
1688 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1689 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1690 TREE_OVERFLOW (t
) = 1;
1691 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1692 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1693 TREE_OVERFLOW (t
) = 1;
1694 /* Regular overflow, conversion produced an infinity in a mode that
1695 can't represent them. */
1696 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1697 && REAL_VALUE_ISINF (value
)
1698 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1699 TREE_OVERFLOW (t
) = 1;
1701 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1705 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1706 to a floating point type. */
1709 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1711 REAL_VALUE_TYPE value
;
1714 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1715 t
= build_real (type
, value
);
1717 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1721 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1722 to another fixed-point type. */
1725 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1727 FIXED_VALUE_TYPE value
;
1731 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1732 TYPE_SATURATING (type
));
1733 t
= build_fixed (type
, value
);
1735 /* Propagate overflow flags. */
1736 if (overflow_p
| TREE_OVERFLOW (arg1
))
1737 TREE_OVERFLOW (t
) = 1;
1741 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1742 to a fixed-point type. */
1745 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1747 FIXED_VALUE_TYPE value
;
1751 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1752 TREE_INT_CST (arg1
),
1753 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1754 TYPE_SATURATING (type
));
1755 t
= build_fixed (type
, value
);
1757 /* Propagate overflow flags. */
1758 if (overflow_p
| TREE_OVERFLOW (arg1
))
1759 TREE_OVERFLOW (t
) = 1;
1763 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1764 to a fixed-point type. */
1767 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1769 FIXED_VALUE_TYPE value
;
1773 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1774 &TREE_REAL_CST (arg1
),
1775 TYPE_SATURATING (type
));
1776 t
= build_fixed (type
, value
);
1778 /* Propagate overflow flags. */
1779 if (overflow_p
| TREE_OVERFLOW (arg1
))
1780 TREE_OVERFLOW (t
) = 1;
1784 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1785 type TYPE. If no simplification can be done return NULL_TREE. */
1788 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1790 if (TREE_TYPE (arg1
) == type
)
1793 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1794 || TREE_CODE (type
) == OFFSET_TYPE
)
1796 if (TREE_CODE (arg1
) == INTEGER_CST
)
1797 return fold_convert_const_int_from_int (type
, arg1
);
1798 else if (TREE_CODE (arg1
) == REAL_CST
)
1799 return fold_convert_const_int_from_real (code
, type
, arg1
);
1800 else if (TREE_CODE (arg1
) == FIXED_CST
)
1801 return fold_convert_const_int_from_fixed (type
, arg1
);
1803 else if (TREE_CODE (type
) == REAL_TYPE
)
1805 if (TREE_CODE (arg1
) == INTEGER_CST
)
1806 return build_real_from_int_cst (type
, arg1
);
1807 else if (TREE_CODE (arg1
) == REAL_CST
)
1808 return fold_convert_const_real_from_real (type
, arg1
);
1809 else if (TREE_CODE (arg1
) == FIXED_CST
)
1810 return fold_convert_const_real_from_fixed (type
, arg1
);
1812 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1814 if (TREE_CODE (arg1
) == FIXED_CST
)
1815 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1816 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1817 return fold_convert_const_fixed_from_int (type
, arg1
);
1818 else if (TREE_CODE (arg1
) == REAL_CST
)
1819 return fold_convert_const_fixed_from_real (type
, arg1
);
1824 /* Construct a vector of zero elements of vector type TYPE. */
1827 build_zero_vector (tree type
)
1831 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1832 return build_vector_from_val (type
, t
);
1835 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1838 fold_convertible_p (const_tree type
, const_tree arg
)
1840 tree orig
= TREE_TYPE (arg
);
1845 if (TREE_CODE (arg
) == ERROR_MARK
1846 || TREE_CODE (type
) == ERROR_MARK
1847 || TREE_CODE (orig
) == ERROR_MARK
)
1850 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1853 switch (TREE_CODE (type
))
1855 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1856 case POINTER_TYPE
: case REFERENCE_TYPE
:
1858 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1859 || TREE_CODE (orig
) == OFFSET_TYPE
)
1861 return (TREE_CODE (orig
) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1865 case FIXED_POINT_TYPE
:
1869 return TREE_CODE (type
) == TREE_CODE (orig
);
1876 /* Convert expression ARG to type TYPE. Used by the middle-end for
1877 simple conversions in preference to calling the front-end's convert. */
1880 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1882 tree orig
= TREE_TYPE (arg
);
1888 if (TREE_CODE (arg
) == ERROR_MARK
1889 || TREE_CODE (type
) == ERROR_MARK
1890 || TREE_CODE (orig
) == ERROR_MARK
)
1891 return error_mark_node
;
1893 switch (TREE_CODE (type
))
1896 case REFERENCE_TYPE
:
1897 /* Handle conversions between pointers to different address spaces. */
1898 if (POINTER_TYPE_P (orig
)
1899 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1900 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1901 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1904 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1906 if (TREE_CODE (arg
) == INTEGER_CST
)
1908 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1909 if (tem
!= NULL_TREE
)
1912 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1913 || TREE_CODE (orig
) == OFFSET_TYPE
)
1914 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1915 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1916 return fold_convert_loc (loc
, type
,
1917 fold_build1_loc (loc
, REALPART_EXPR
,
1918 TREE_TYPE (orig
), arg
));
1919 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1920 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1921 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1924 if (TREE_CODE (arg
) == INTEGER_CST
)
1926 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1927 if (tem
!= NULL_TREE
)
1930 else if (TREE_CODE (arg
) == REAL_CST
)
1932 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1933 if (tem
!= NULL_TREE
)
1936 else if (TREE_CODE (arg
) == FIXED_CST
)
1938 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1939 if (tem
!= NULL_TREE
)
1943 switch (TREE_CODE (orig
))
1946 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1947 case POINTER_TYPE
: case REFERENCE_TYPE
:
1948 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1951 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1953 case FIXED_POINT_TYPE
:
1954 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1957 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1958 return fold_convert_loc (loc
, type
, tem
);
1964 case FIXED_POINT_TYPE
:
1965 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1966 || TREE_CODE (arg
) == REAL_CST
)
1968 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1969 if (tem
!= NULL_TREE
)
1970 goto fold_convert_exit
;
1973 switch (TREE_CODE (orig
))
1975 case FIXED_POINT_TYPE
:
1980 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1983 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1984 return fold_convert_loc (loc
, type
, tem
);
1991 switch (TREE_CODE (orig
))
1994 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1995 case POINTER_TYPE
: case REFERENCE_TYPE
:
1997 case FIXED_POINT_TYPE
:
1998 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1999 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2000 fold_convert_loc (loc
, TREE_TYPE (type
),
2001 integer_zero_node
));
2006 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2008 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2009 TREE_OPERAND (arg
, 0));
2010 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2011 TREE_OPERAND (arg
, 1));
2012 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2015 arg
= save_expr (arg
);
2016 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2017 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2018 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2019 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2020 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2028 if (integer_zerop (arg
))
2029 return build_zero_vector (type
);
2030 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2031 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2032 || TREE_CODE (orig
) == VECTOR_TYPE
);
2033 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2036 tem
= fold_ignored_result (arg
);
2037 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2040 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2041 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2045 protected_set_expr_location_unshare (tem
, loc
);
2049 /* Return false if expr can be assumed not to be an lvalue, true
2053 maybe_lvalue_p (const_tree x
)
2055 /* We only need to wrap lvalue tree codes. */
2056 switch (TREE_CODE (x
))
2069 case ARRAY_RANGE_REF
:
2075 case PREINCREMENT_EXPR
:
2076 case PREDECREMENT_EXPR
:
2078 case TRY_CATCH_EXPR
:
2079 case WITH_CLEANUP_EXPR
:
2088 /* Assume the worst for front-end tree codes. */
2089 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2097 /* Return an expr equal to X but certainly not valid as an lvalue. */
2100 non_lvalue_loc (location_t loc
, tree x
)
2102 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2107 if (! maybe_lvalue_p (x
))
2109 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2112 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2113 Zero means allow extended lvalues. */
2115 int pedantic_lvalues
;
2117 /* When pedantic, return an expr equal to X but certainly not valid as a
2118 pedantic lvalue. Otherwise, return X. */
2121 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2123 if (pedantic_lvalues
)
2124 return non_lvalue_loc (loc
, x
);
2126 return protected_set_expr_location_unshare (x
, loc
);
2129 /* Given a tree comparison code, return the code that is the logical inverse.
2130 It is generally not safe to do this for floating-point comparisons, except
2131 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2132 ERROR_MARK in this case. */
2135 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2137 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2138 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2148 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2150 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2152 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2154 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2168 return UNORDERED_EXPR
;
2169 case UNORDERED_EXPR
:
2170 return ORDERED_EXPR
;
2176 /* Similar, but return the comparison that results if the operands are
2177 swapped. This is safe for floating-point. */
2180 swap_tree_comparison (enum tree_code code
)
2187 case UNORDERED_EXPR
:
2213 /* Convert a comparison tree code from an enum tree_code representation
2214 into a compcode bit-based encoding. This function is the inverse of
2215 compcode_to_comparison. */
2217 static enum comparison_code
2218 comparison_to_compcode (enum tree_code code
)
2235 return COMPCODE_ORD
;
2236 case UNORDERED_EXPR
:
2237 return COMPCODE_UNORD
;
2239 return COMPCODE_UNLT
;
2241 return COMPCODE_UNEQ
;
2243 return COMPCODE_UNLE
;
2245 return COMPCODE_UNGT
;
2247 return COMPCODE_LTGT
;
2249 return COMPCODE_UNGE
;
2255 /* Convert a compcode bit-based encoding of a comparison operator back
2256 to GCC's enum tree_code representation. This function is the
2257 inverse of comparison_to_compcode. */
2259 static enum tree_code
2260 compcode_to_comparison (enum comparison_code code
)
2277 return ORDERED_EXPR
;
2278 case COMPCODE_UNORD
:
2279 return UNORDERED_EXPR
;
2297 /* Return a tree for the comparison which is the combination of
2298 doing the AND or OR (depending on CODE) of the two operations LCODE
2299 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2300 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2301 if this makes the transformation invalid. */
2304 combine_comparisons (location_t loc
,
2305 enum tree_code code
, enum tree_code lcode
,
2306 enum tree_code rcode
, tree truth_type
,
2307 tree ll_arg
, tree lr_arg
)
2309 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2310 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2311 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2316 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2317 compcode
= lcompcode
& rcompcode
;
2320 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2321 compcode
= lcompcode
| rcompcode
;
2330 /* Eliminate unordered comparisons, as well as LTGT and ORD
2331 which are not used unless the mode has NaNs. */
2332 compcode
&= ~COMPCODE_UNORD
;
2333 if (compcode
== COMPCODE_LTGT
)
2334 compcode
= COMPCODE_NE
;
2335 else if (compcode
== COMPCODE_ORD
)
2336 compcode
= COMPCODE_TRUE
;
2338 else if (flag_trapping_math
)
2340 /* Check that the original operation and the optimized ones will trap
2341 under the same condition. */
2342 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2343 && (lcompcode
!= COMPCODE_EQ
)
2344 && (lcompcode
!= COMPCODE_ORD
);
2345 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2346 && (rcompcode
!= COMPCODE_EQ
)
2347 && (rcompcode
!= COMPCODE_ORD
);
2348 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2349 && (compcode
!= COMPCODE_EQ
)
2350 && (compcode
!= COMPCODE_ORD
);
2352 /* In a short-circuited boolean expression the LHS might be
2353 such that the RHS, if evaluated, will never trap. For
2354 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2355 if neither x nor y is NaN. (This is a mixed blessing: for
2356 example, the expression above will never trap, hence
2357 optimizing it to x < y would be invalid). */
2358 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2359 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2362 /* If the comparison was short-circuited, and only the RHS
2363 trapped, we may now generate a spurious trap. */
2365 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2368 /* If we changed the conditions that cause a trap, we lose. */
2369 if ((ltrap
|| rtrap
) != trap
)
2373 if (compcode
== COMPCODE_TRUE
)
2374 return constant_boolean_node (true, truth_type
);
2375 else if (compcode
== COMPCODE_FALSE
)
2376 return constant_boolean_node (false, truth_type
);
2379 enum tree_code tcode
;
2381 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2382 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2386 /* Return nonzero if two operands (typically of the same tree node)
2387 are necessarily equal. If either argument has side-effects this
2388 function returns zero. FLAGS modifies behavior as follows:
2390 If OEP_ONLY_CONST is set, only return nonzero for constants.
2391 This function tests whether the operands are indistinguishable;
2392 it does not test whether they are equal using C's == operation.
2393 The distinction is important for IEEE floating point, because
2394 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2395 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2397 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2398 even though it may hold multiple values during a function.
2399 This is because a GCC tree node guarantees that nothing else is
2400 executed between the evaluation of its "operands" (which may often
2401 be evaluated in arbitrary order). Hence if the operands themselves
2402 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2403 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2404 unset means assuming isochronic (or instantaneous) tree equivalence.
2405 Unless comparing arbitrary expression trees, such as from different
2406 statements, this flag can usually be left unset.
2408 If OEP_PURE_SAME is set, then pure functions with identical arguments
2409 are considered the same. It is used when the caller has other ways
2410 to ensure that global memory is unchanged in between. */
2413 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2415 /* If either is ERROR_MARK, they aren't equal. */
2416 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2417 || TREE_TYPE (arg0
) == error_mark_node
2418 || TREE_TYPE (arg1
) == error_mark_node
)
2421 /* Similar, if either does not have a type (like a released SSA name),
2422 they aren't equal. */
2423 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2426 /* Check equality of integer constants before bailing out due to
2427 precision differences. */
2428 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2429 return tree_int_cst_equal (arg0
, arg1
);
2431 /* If both types don't have the same signedness, then we can't consider
2432 them equal. We must check this before the STRIP_NOPS calls
2433 because they may change the signedness of the arguments. As pointers
2434 strictly don't have a signedness, require either two pointers or
2435 two non-pointers as well. */
2436 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2437 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2440 /* We cannot consider pointers to different address space equal. */
2441 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2442 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2443 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2446 /* If both types don't have the same precision, then it is not safe
2448 if (element_precision (TREE_TYPE (arg0
))
2449 != element_precision (TREE_TYPE (arg1
)))
2455 /* In case both args are comparisons but with different comparison
2456 code, try to swap the comparison operands of one arg to produce
2457 a match and compare that variant. */
2458 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2459 && COMPARISON_CLASS_P (arg0
)
2460 && COMPARISON_CLASS_P (arg1
))
2462 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2464 if (TREE_CODE (arg0
) == swap_code
)
2465 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2466 TREE_OPERAND (arg1
, 1), flags
)
2467 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2468 TREE_OPERAND (arg1
, 0), flags
);
2471 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2472 /* This is needed for conversions and for COMPONENT_REF.
2473 Might as well play it safe and always test this. */
2474 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2475 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2476 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2479 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2480 We don't care about side effects in that case because the SAVE_EXPR
2481 takes care of that for us. In all other cases, two expressions are
2482 equal if they have no side effects. If we have two identical
2483 expressions with side effects that should be treated the same due
2484 to the only side effects being identical SAVE_EXPR's, that will
2485 be detected in the recursive calls below.
2486 If we are taking an invariant address of two identical objects
2487 they are necessarily equal as well. */
2488 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2489 && (TREE_CODE (arg0
) == SAVE_EXPR
2490 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2491 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2494 /* Next handle constant cases, those for which we can return 1 even
2495 if ONLY_CONST is set. */
2496 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2497 switch (TREE_CODE (arg0
))
2500 return tree_int_cst_equal (arg0
, arg1
);
2503 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2504 TREE_FIXED_CST (arg1
));
2507 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2508 TREE_REAL_CST (arg1
)))
2512 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2514 /* If we do not distinguish between signed and unsigned zero,
2515 consider them equal. */
2516 if (real_zerop (arg0
) && real_zerop (arg1
))
2525 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2528 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2530 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2531 VECTOR_CST_ELT (arg1
, i
), flags
))
2538 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2540 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2544 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2545 && ! memcmp (TREE_STRING_POINTER (arg0
),
2546 TREE_STRING_POINTER (arg1
),
2547 TREE_STRING_LENGTH (arg0
)));
2550 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2551 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2552 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2557 if (flags
& OEP_ONLY_CONST
)
2560 /* Define macros to test an operand from arg0 and arg1 for equality and a
2561 variant that allows null and views null as being different from any
2562 non-null value. In the latter case, if either is null, the both
2563 must be; otherwise, do the normal comparison. */
2564 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2565 TREE_OPERAND (arg1, N), flags)
2567 #define OP_SAME_WITH_NULL(N) \
2568 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2569 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2571 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2574 /* Two conversions are equal only if signedness and modes match. */
2575 switch (TREE_CODE (arg0
))
2578 case FIX_TRUNC_EXPR
:
2579 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2580 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2590 case tcc_comparison
:
2592 if (OP_SAME (0) && OP_SAME (1))
2595 /* For commutative ops, allow the other order. */
2596 return (commutative_tree_code (TREE_CODE (arg0
))
2597 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2598 TREE_OPERAND (arg1
, 1), flags
)
2599 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2600 TREE_OPERAND (arg1
, 0), flags
));
2603 /* If either of the pointer (or reference) expressions we are
2604 dereferencing contain a side effect, these cannot be equal,
2605 but their addresses can be. */
2606 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2607 && (TREE_SIDE_EFFECTS (arg0
)
2608 || TREE_SIDE_EFFECTS (arg1
)))
2611 switch (TREE_CODE (arg0
))
2614 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2621 case TARGET_MEM_REF
:
2622 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2623 /* Require equal extra operands and then fall through to MEM_REF
2624 handling of the two common operands. */
2625 if (!OP_SAME_WITH_NULL (2)
2626 || !OP_SAME_WITH_NULL (3)
2627 || !OP_SAME_WITH_NULL (4))
2631 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2632 /* Require equal access sizes, and similar pointer types.
2633 We can have incomplete types for array references of
2634 variable-sized arrays from the Fortran frontend
2635 though. Also verify the types are compatible. */
2636 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2637 || (TYPE_SIZE (TREE_TYPE (arg0
))
2638 && TYPE_SIZE (TREE_TYPE (arg1
))
2639 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2640 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2641 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2642 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2643 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2644 && OP_SAME (0) && OP_SAME (1));
2647 case ARRAY_RANGE_REF
:
2648 /* Operands 2 and 3 may be null.
2649 Compare the array index by value if it is constant first as we
2650 may have different types but same value here. */
2653 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2654 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2655 TREE_OPERAND (arg1
, 1))
2657 && OP_SAME_WITH_NULL (2)
2658 && OP_SAME_WITH_NULL (3));
2661 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2662 may be NULL when we're called to compare MEM_EXPRs. */
2663 if (!OP_SAME_WITH_NULL (0))
2665 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2666 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2671 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2672 return OP_SAME (1) && OP_SAME (2);
2678 case tcc_expression
:
2679 switch (TREE_CODE (arg0
))
2682 case TRUTH_NOT_EXPR
:
2685 case TRUTH_ANDIF_EXPR
:
2686 case TRUTH_ORIF_EXPR
:
2687 return OP_SAME (0) && OP_SAME (1);
2690 case WIDEN_MULT_PLUS_EXPR
:
2691 case WIDEN_MULT_MINUS_EXPR
:
2694 /* The multiplcation operands are commutative. */
2697 case TRUTH_AND_EXPR
:
2699 case TRUTH_XOR_EXPR
:
2700 if (OP_SAME (0) && OP_SAME (1))
2703 /* Otherwise take into account this is a commutative operation. */
2704 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2705 TREE_OPERAND (arg1
, 1), flags
)
2706 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2707 TREE_OPERAND (arg1
, 0), flags
));
2712 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2719 switch (TREE_CODE (arg0
))
2722 /* If the CALL_EXPRs call different functions, then they
2723 clearly can not be equal. */
2724 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2729 unsigned int cef
= call_expr_flags (arg0
);
2730 if (flags
& OEP_PURE_SAME
)
2731 cef
&= ECF_CONST
| ECF_PURE
;
2738 /* Now see if all the arguments are the same. */
2740 const_call_expr_arg_iterator iter0
, iter1
;
2742 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2743 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2745 a0
= next_const_call_expr_arg (&iter0
),
2746 a1
= next_const_call_expr_arg (&iter1
))
2747 if (! operand_equal_p (a0
, a1
, flags
))
2750 /* If we get here and both argument lists are exhausted
2751 then the CALL_EXPRs are equal. */
2752 return ! (a0
|| a1
);
2758 case tcc_declaration
:
2759 /* Consider __builtin_sqrt equal to sqrt. */
2760 return (TREE_CODE (arg0
) == FUNCTION_DECL
2761 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2762 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2763 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2770 #undef OP_SAME_WITH_NULL
2773 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2774 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2776 When in doubt, return 0. */
2779 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2781 int unsignedp1
, unsignedpo
;
2782 tree primarg0
, primarg1
, primother
;
2783 unsigned int correct_width
;
2785 if (operand_equal_p (arg0
, arg1
, 0))
2788 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2789 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2792 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2793 and see if the inner values are the same. This removes any
2794 signedness comparison, which doesn't matter here. */
2795 primarg0
= arg0
, primarg1
= arg1
;
2796 STRIP_NOPS (primarg0
);
2797 STRIP_NOPS (primarg1
);
2798 if (operand_equal_p (primarg0
, primarg1
, 0))
2801 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2802 actual comparison operand, ARG0.
2804 First throw away any conversions to wider types
2805 already present in the operands. */
2807 primarg1
= get_narrower (arg1
, &unsignedp1
);
2808 primother
= get_narrower (other
, &unsignedpo
);
2810 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2811 if (unsignedp1
== unsignedpo
2812 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2813 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2815 tree type
= TREE_TYPE (arg0
);
2817 /* Make sure shorter operand is extended the right way
2818 to match the longer operand. */
2819 primarg1
= fold_convert (signed_or_unsigned_type_for
2820 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2822 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2829 /* See if ARG is an expression that is either a comparison or is performing
2830 arithmetic on comparisons. The comparisons must only be comparing
2831 two different values, which will be stored in *CVAL1 and *CVAL2; if
2832 they are nonzero it means that some operands have already been found.
2833 No variables may be used anywhere else in the expression except in the
2834 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2835 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2837 If this is true, return 1. Otherwise, return zero. */
2840 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2842 enum tree_code code
= TREE_CODE (arg
);
2843 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2845 /* We can handle some of the tcc_expression cases here. */
2846 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2848 else if (tclass
== tcc_expression
2849 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2850 || code
== COMPOUND_EXPR
))
2851 tclass
= tcc_binary
;
2853 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2854 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2856 /* If we've already found a CVAL1 or CVAL2, this expression is
2857 two complex to handle. */
2858 if (*cval1
|| *cval2
)
2868 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2871 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2872 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2873 cval1
, cval2
, save_p
));
2878 case tcc_expression
:
2879 if (code
== COND_EXPR
)
2880 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2881 cval1
, cval2
, save_p
)
2882 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2883 cval1
, cval2
, save_p
)
2884 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2885 cval1
, cval2
, save_p
));
2888 case tcc_comparison
:
2889 /* First see if we can handle the first operand, then the second. For
2890 the second operand, we know *CVAL1 can't be zero. It must be that
2891 one side of the comparison is each of the values; test for the
2892 case where this isn't true by failing if the two operands
2895 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2896 TREE_OPERAND (arg
, 1), 0))
2900 *cval1
= TREE_OPERAND (arg
, 0);
2901 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2903 else if (*cval2
== 0)
2904 *cval2
= TREE_OPERAND (arg
, 0);
2905 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2910 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2912 else if (*cval2
== 0)
2913 *cval2
= TREE_OPERAND (arg
, 1);
2914 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2926 /* ARG is a tree that is known to contain just arithmetic operations and
2927 comparisons. Evaluate the operations in the tree substituting NEW0 for
2928 any occurrence of OLD0 as an operand of a comparison and likewise for
2932 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2933 tree old1
, tree new1
)
2935 tree type
= TREE_TYPE (arg
);
2936 enum tree_code code
= TREE_CODE (arg
);
2937 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2939 /* We can handle some of the tcc_expression cases here. */
2940 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2942 else if (tclass
== tcc_expression
2943 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2944 tclass
= tcc_binary
;
2949 return fold_build1_loc (loc
, code
, type
,
2950 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2951 old0
, new0
, old1
, new1
));
2954 return fold_build2_loc (loc
, code
, type
,
2955 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2956 old0
, new0
, old1
, new1
),
2957 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2958 old0
, new0
, old1
, new1
));
2960 case tcc_expression
:
2964 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2968 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2972 return fold_build3_loc (loc
, code
, type
,
2973 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2974 old0
, new0
, old1
, new1
),
2975 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2976 old0
, new0
, old1
, new1
),
2977 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2978 old0
, new0
, old1
, new1
));
2982 /* Fall through - ??? */
2984 case tcc_comparison
:
2986 tree arg0
= TREE_OPERAND (arg
, 0);
2987 tree arg1
= TREE_OPERAND (arg
, 1);
2989 /* We need to check both for exact equality and tree equality. The
2990 former will be true if the operand has a side-effect. In that
2991 case, we know the operand occurred exactly once. */
2993 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2995 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2998 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3000 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3003 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3011 /* Return a tree for the case when the result of an expression is RESULT
3012 converted to TYPE and OMITTED was previously an operand of the expression
3013 but is now not needed (e.g., we folded OMITTED * 0).
3015 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3016 the conversion of RESULT to TYPE. */
3019 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3021 tree t
= fold_convert_loc (loc
, type
, result
);
3023 /* If the resulting operand is an empty statement, just return the omitted
3024 statement casted to void. */
3025 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3026 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3027 fold_ignored_result (omitted
));
3029 if (TREE_SIDE_EFFECTS (omitted
))
3030 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3031 fold_ignored_result (omitted
), t
);
3033 return non_lvalue_loc (loc
, t
);
3036 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3039 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3042 tree t
= fold_convert_loc (loc
, type
, result
);
3044 /* If the resulting operand is an empty statement, just return the omitted
3045 statement casted to void. */
3046 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3047 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3048 fold_ignored_result (omitted
));
3050 if (TREE_SIDE_EFFECTS (omitted
))
3051 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3052 fold_ignored_result (omitted
), t
);
3054 return pedantic_non_lvalue_loc (loc
, t
);
3057 /* Return a tree for the case when the result of an expression is RESULT
3058 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3059 of the expression but are now not needed.
3061 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3062 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3063 evaluated before OMITTED2. Otherwise, if neither has side effects,
3064 just do the conversion of RESULT to TYPE. */
3067 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3068 tree omitted1
, tree omitted2
)
3070 tree t
= fold_convert_loc (loc
, type
, result
);
3072 if (TREE_SIDE_EFFECTS (omitted2
))
3073 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3074 if (TREE_SIDE_EFFECTS (omitted1
))
3075 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3077 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3081 /* Return a simplified tree node for the truth-negation of ARG. This
3082 never alters ARG itself. We assume that ARG is an operation that
3083 returns a truth value (0 or 1).
3085 FIXME: one would think we would fold the result, but it causes
3086 problems with the dominator optimizer. */
3089 fold_truth_not_expr (location_t loc
, tree arg
)
3091 tree type
= TREE_TYPE (arg
);
3092 enum tree_code code
= TREE_CODE (arg
);
3093 location_t loc1
, loc2
;
3095 /* If this is a comparison, we can simply invert it, except for
3096 floating-point non-equality comparisons, in which case we just
3097 enclose a TRUTH_NOT_EXPR around what we have. */
3099 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3101 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3102 if (FLOAT_TYPE_P (op_type
)
3103 && flag_trapping_math
3104 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3105 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3108 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3109 if (code
== ERROR_MARK
)
3112 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3113 TREE_OPERAND (arg
, 1));
3119 return constant_boolean_node (integer_zerop (arg
), type
);
3121 case TRUTH_AND_EXPR
:
3122 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3123 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3124 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3125 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3126 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3129 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3130 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3131 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3132 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3133 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3135 case TRUTH_XOR_EXPR
:
3136 /* Here we can invert either operand. We invert the first operand
3137 unless the second operand is a TRUTH_NOT_EXPR in which case our
3138 result is the XOR of the first operand with the inside of the
3139 negation of the second operand. */
3141 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3142 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3143 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3145 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3146 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3147 TREE_OPERAND (arg
, 1));
3149 case TRUTH_ANDIF_EXPR
:
3150 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3151 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3152 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3153 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3154 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3156 case TRUTH_ORIF_EXPR
:
3157 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3158 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3159 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3160 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3161 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3163 case TRUTH_NOT_EXPR
:
3164 return TREE_OPERAND (arg
, 0);
3168 tree arg1
= TREE_OPERAND (arg
, 1);
3169 tree arg2
= TREE_OPERAND (arg
, 2);
3171 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3172 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3174 /* A COND_EXPR may have a throw as one operand, which
3175 then has void type. Just leave void operands
3177 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3178 VOID_TYPE_P (TREE_TYPE (arg1
))
3179 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3180 VOID_TYPE_P (TREE_TYPE (arg2
))
3181 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3185 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3186 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3187 TREE_OPERAND (arg
, 0),
3188 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3190 case NON_LVALUE_EXPR
:
3191 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3192 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3195 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3196 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3198 /* ... fall through ... */
3201 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3202 return build1_loc (loc
, TREE_CODE (arg
), type
,
3203 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3206 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3208 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3211 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3213 case CLEANUP_POINT_EXPR
:
3214 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3215 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3216 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3223 /* Return a simplified tree node for the truth-negation of ARG. This
3224 never alters ARG itself. We assume that ARG is an operation that
3225 returns a truth value (0 or 1).
3227 FIXME: one would think we would fold the result, but it causes
3228 problems with the dominator optimizer. */
3231 invert_truthvalue_loc (location_t loc
, tree arg
)
3235 if (TREE_CODE (arg
) == ERROR_MARK
)
3238 tem
= fold_truth_not_expr (loc
, arg
);
3240 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3245 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3246 operands are another bit-wise operation with a common input. If so,
3247 distribute the bit operations to save an operation and possibly two if
3248 constants are involved. For example, convert
3249 (A | B) & (A | C) into A | (B & C)
3250 Further simplification will occur if B and C are constants.
3252 If this optimization cannot be done, 0 will be returned. */
3255 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3256 tree arg0
, tree arg1
)
3261 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3262 || TREE_CODE (arg0
) == code
3263 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3264 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3267 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3269 common
= TREE_OPERAND (arg0
, 0);
3270 left
= TREE_OPERAND (arg0
, 1);
3271 right
= TREE_OPERAND (arg1
, 1);
3273 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3275 common
= TREE_OPERAND (arg0
, 0);
3276 left
= TREE_OPERAND (arg0
, 1);
3277 right
= TREE_OPERAND (arg1
, 0);
3279 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3281 common
= TREE_OPERAND (arg0
, 1);
3282 left
= TREE_OPERAND (arg0
, 0);
3283 right
= TREE_OPERAND (arg1
, 1);
3285 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3287 common
= TREE_OPERAND (arg0
, 1);
3288 left
= TREE_OPERAND (arg0
, 0);
3289 right
= TREE_OPERAND (arg1
, 0);
3294 common
= fold_convert_loc (loc
, type
, common
);
3295 left
= fold_convert_loc (loc
, type
, left
);
3296 right
= fold_convert_loc (loc
, type
, right
);
3297 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3298 fold_build2_loc (loc
, code
, type
, left
, right
));
3301 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3302 with code CODE. This optimization is unsafe. */
3304 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3305 tree arg0
, tree arg1
)
3307 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3308 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3310 /* (A / C) +- (B / C) -> (A +- B) / C. */
3312 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3313 TREE_OPERAND (arg1
, 1), 0))
3314 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3315 fold_build2_loc (loc
, code
, type
,
3316 TREE_OPERAND (arg0
, 0),
3317 TREE_OPERAND (arg1
, 0)),
3318 TREE_OPERAND (arg0
, 1));
3320 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3321 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3322 TREE_OPERAND (arg1
, 0), 0)
3323 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3324 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3326 REAL_VALUE_TYPE r0
, r1
;
3327 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3328 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3330 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3332 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3333 real_arithmetic (&r0
, code
, &r0
, &r1
);
3334 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3335 TREE_OPERAND (arg0
, 0),
3336 build_real (type
, r0
));
3342 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3343 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3346 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3347 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3349 tree result
, bftype
;
3353 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3354 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3355 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3356 && host_integerp (size
, 0)
3357 && tree_low_cst (size
, 0) == bitsize
)
3358 return fold_convert_loc (loc
, type
, inner
);
3362 if (TYPE_PRECISION (bftype
) != bitsize
3363 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3364 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3366 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3367 size_int (bitsize
), bitsize_int (bitpos
));
3370 result
= fold_convert_loc (loc
, type
, result
);
3375 /* Optimize a bit-field compare.
3377 There are two cases: First is a compare against a constant and the
3378 second is a comparison of two items where the fields are at the same
3379 bit position relative to the start of a chunk (byte, halfword, word)
3380 large enough to contain it. In these cases we can avoid the shift
3381 implicit in bitfield extractions.
3383 For constants, we emit a compare of the shifted constant with the
3384 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3385 compared. For two fields at the same position, we do the ANDs with the
3386 similar mask and compare the result of the ANDs.
3388 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3389 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3390 are the left and right operands of the comparison, respectively.
3392 If the optimization described above can be done, we return the resulting
3393 tree. Otherwise we return zero. */
3396 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3397 tree compare_type
, tree lhs
, tree rhs
)
3399 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3400 tree type
= TREE_TYPE (lhs
);
3401 tree signed_type
, unsigned_type
;
3402 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3403 enum machine_mode lmode
, rmode
, nmode
;
3404 int lunsignedp
, runsignedp
;
3405 int lvolatilep
= 0, rvolatilep
= 0;
3406 tree linner
, rinner
= NULL_TREE
;
3410 /* In the strict volatile bitfields case, doing code changes here may prevent
3411 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3412 if (flag_strict_volatile_bitfields
> 0)
3415 /* Get all the information about the extractions being done. If the bit size
3416 if the same as the size of the underlying object, we aren't doing an
3417 extraction at all and so can do nothing. We also don't want to
3418 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3419 then will no longer be able to replace it. */
3420 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3421 &lunsignedp
, &lvolatilep
, false);
3422 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3423 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3428 /* If this is not a constant, we can only do something if bit positions,
3429 sizes, and signedness are the same. */
3430 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3431 &runsignedp
, &rvolatilep
, false);
3433 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3434 || lunsignedp
!= runsignedp
|| offset
!= 0
3435 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3439 /* See if we can find a mode to refer to this field. We should be able to,
3440 but fail if we can't. */
3442 && GET_MODE_BITSIZE (lmode
) > 0
3443 && flag_strict_volatile_bitfields
> 0)
3446 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3447 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3448 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3449 TYPE_ALIGN (TREE_TYPE (rinner
))),
3450 word_mode
, lvolatilep
|| rvolatilep
);
3451 if (nmode
== VOIDmode
)
3454 /* Set signed and unsigned types of the precision of this mode for the
3456 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3457 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3459 /* Compute the bit position and size for the new reference and our offset
3460 within it. If the new reference is the same size as the original, we
3461 won't optimize anything, so return zero. */
3462 nbitsize
= GET_MODE_BITSIZE (nmode
);
3463 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3465 if (nbitsize
== lbitsize
)
3468 if (BYTES_BIG_ENDIAN
)
3469 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3471 /* Make the mask to be used against the extracted field. */
3472 mask
= build_int_cst_type (unsigned_type
, -1);
3473 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3474 mask
= const_binop (RSHIFT_EXPR
, mask
,
3475 size_int (nbitsize
- lbitsize
- lbitpos
));
3478 /* If not comparing with constant, just rework the comparison
3480 return fold_build2_loc (loc
, code
, compare_type
,
3481 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3482 make_bit_field_ref (loc
, linner
,
3487 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3488 make_bit_field_ref (loc
, rinner
,
3494 /* Otherwise, we are handling the constant case. See if the constant is too
3495 big for the field. Warn and return a tree of for 0 (false) if so. We do
3496 this not only for its own sake, but to avoid having to test for this
3497 error case below. If we didn't, we might generate wrong code.
3499 For unsigned fields, the constant shifted right by the field length should
3500 be all zero. For signed fields, the high-order bits should agree with
3505 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3506 fold_convert_loc (loc
,
3507 unsigned_type
, rhs
),
3508 size_int (lbitsize
))))
3510 warning (0, "comparison is always %d due to width of bit-field",
3512 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3517 tree tem
= const_binop (RSHIFT_EXPR
,
3518 fold_convert_loc (loc
, signed_type
, rhs
),
3519 size_int (lbitsize
- 1));
3520 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3522 warning (0, "comparison is always %d due to width of bit-field",
3524 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3528 /* Single-bit compares should always be against zero. */
3529 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3531 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3532 rhs
= build_int_cst (type
, 0);
3535 /* Make a new bitfield reference, shift the constant over the
3536 appropriate number of bits and mask it with the computed mask
3537 (in case this was a signed field). If we changed it, make a new one. */
3538 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3541 TREE_SIDE_EFFECTS (lhs
) = 1;
3542 TREE_THIS_VOLATILE (lhs
) = 1;
3545 rhs
= const_binop (BIT_AND_EXPR
,
3546 const_binop (LSHIFT_EXPR
,
3547 fold_convert_loc (loc
, unsigned_type
, rhs
),
3548 size_int (lbitpos
)),
3551 lhs
= build2_loc (loc
, code
, compare_type
,
3552 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3556 /* Subroutine for fold_truth_andor_1: decode a field reference.
3558 If EXP is a comparison reference, we return the innermost reference.
3560 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3561 set to the starting bit number.
3563 If the innermost field can be completely contained in a mode-sized
3564 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3566 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3567 otherwise it is not changed.
3569 *PUNSIGNEDP is set to the signedness of the field.
3571 *PMASK is set to the mask used. This is either contained in a
3572 BIT_AND_EXPR or derived from the width of the field.
3574 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3576 Return 0 if this is not a component reference or is one that we can't
3577 do anything with. */
3580 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3581 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3582 int *punsignedp
, int *pvolatilep
,
3583 tree
*pmask
, tree
*pand_mask
)
3585 tree outer_type
= 0;
3587 tree mask
, inner
, offset
;
3589 unsigned int precision
;
3591 /* All the optimizations using this function assume integer fields.
3592 There are problems with FP fields since the type_for_size call
3593 below can fail for, e.g., XFmode. */
3594 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3597 /* We are interested in the bare arrangement of bits, so strip everything
3598 that doesn't affect the machine mode. However, record the type of the
3599 outermost expression if it may matter below. */
3600 if (CONVERT_EXPR_P (exp
)
3601 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3602 outer_type
= TREE_TYPE (exp
);
3605 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3607 and_mask
= TREE_OPERAND (exp
, 1);
3608 exp
= TREE_OPERAND (exp
, 0);
3609 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3610 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3614 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3615 punsignedp
, pvolatilep
, false);
3616 if ((inner
== exp
&& and_mask
== 0)
3617 || *pbitsize
< 0 || offset
!= 0
3618 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3621 /* If the number of bits in the reference is the same as the bitsize of
3622 the outer type, then the outer type gives the signedness. Otherwise
3623 (in case of a small bitfield) the signedness is unchanged. */
3624 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3625 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3627 /* Compute the mask to access the bitfield. */
3628 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3629 precision
= TYPE_PRECISION (unsigned_type
);
3631 mask
= build_int_cst_type (unsigned_type
, -1);
3633 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3634 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3636 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3638 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3639 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3642 *pand_mask
= and_mask
;
3646 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3650 all_ones_mask_p (const_tree mask
, int size
)
3652 tree type
= TREE_TYPE (mask
);
3653 unsigned int precision
= TYPE_PRECISION (type
);
3656 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3659 tree_int_cst_equal (mask
,
3660 const_binop (RSHIFT_EXPR
,
3661 const_binop (LSHIFT_EXPR
, tmask
,
3662 size_int (precision
- size
)),
3663 size_int (precision
- size
)));
3666 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3667 represents the sign bit of EXP's type. If EXP represents a sign
3668 or zero extension, also test VAL against the unextended type.
3669 The return value is the (sub)expression whose sign bit is VAL,
3670 or NULL_TREE otherwise. */
3673 sign_bit_p (tree exp
, const_tree val
)
3675 unsigned HOST_WIDE_INT mask_lo
, lo
;
3676 HOST_WIDE_INT mask_hi
, hi
;
3680 /* Tree EXP must have an integral type. */
3681 t
= TREE_TYPE (exp
);
3682 if (! INTEGRAL_TYPE_P (t
))
3685 /* Tree VAL must be an integer constant. */
3686 if (TREE_CODE (val
) != INTEGER_CST
3687 || TREE_OVERFLOW (val
))
3690 width
= TYPE_PRECISION (t
);
3691 if (width
> HOST_BITS_PER_WIDE_INT
)
3693 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3696 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3697 >> (HOST_BITS_PER_DOUBLE_INT
- width
));
3703 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3706 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3707 >> (HOST_BITS_PER_WIDE_INT
- width
));
3710 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3711 treat VAL as if it were unsigned. */
3712 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3713 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3716 /* Handle extension from a narrower type. */
3717 if (TREE_CODE (exp
) == NOP_EXPR
3718 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3719 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3724 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3725 to be evaluated unconditionally. */
3728 simple_operand_p (const_tree exp
)
3730 /* Strip any conversions that don't change the machine mode. */
3733 return (CONSTANT_CLASS_P (exp
)
3734 || TREE_CODE (exp
) == SSA_NAME
3736 && ! TREE_ADDRESSABLE (exp
)
3737 && ! TREE_THIS_VOLATILE (exp
)
3738 && ! DECL_NONLOCAL (exp
)
3739 /* Don't regard global variables as simple. They may be
3740 allocated in ways unknown to the compiler (shared memory,
3741 #pragma weak, etc). */
3742 && ! TREE_PUBLIC (exp
)
3743 && ! DECL_EXTERNAL (exp
)
3744 /* Loading a static variable is unduly expensive, but global
3745 registers aren't expensive. */
3746 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3749 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3750 to be evaluated unconditionally.
3751 I addition to simple_operand_p, we assume that comparisons, conversions,
3752 and logic-not operations are simple, if their operands are simple, too. */
3755 simple_operand_p_2 (tree exp
)
3757 enum tree_code code
;
3759 if (TREE_SIDE_EFFECTS (exp
)
3760 || tree_could_trap_p (exp
))
3763 while (CONVERT_EXPR_P (exp
))
3764 exp
= TREE_OPERAND (exp
, 0);
3766 code
= TREE_CODE (exp
);
3768 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3769 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3770 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3772 if (code
== TRUTH_NOT_EXPR
)
3773 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3775 return simple_operand_p (exp
);
3779 /* The following functions are subroutines to fold_range_test and allow it to
3780 try to change a logical combination of comparisons into a range test.
3783 X == 2 || X == 3 || X == 4 || X == 5
3787 (unsigned) (X - 2) <= 3
3789 We describe each set of comparisons as being either inside or outside
3790 a range, using a variable named like IN_P, and then describe the
3791 range with a lower and upper bound. If one of the bounds is omitted,
3792 it represents either the highest or lowest value of the type.
3794 In the comments below, we represent a range by two numbers in brackets
3795 preceded by a "+" to designate being inside that range, or a "-" to
3796 designate being outside that range, so the condition can be inverted by
3797 flipping the prefix. An omitted bound is represented by a "-". For
3798 example, "- [-, 10]" means being outside the range starting at the lowest
3799 possible value and ending at 10, in other words, being greater than 10.
3800 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3803 We set up things so that the missing bounds are handled in a consistent
3804 manner so neither a missing bound nor "true" and "false" need to be
3805 handled using a special case. */
3807 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3808 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3809 and UPPER1_P are nonzero if the respective argument is an upper bound
3810 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3811 must be specified for a comparison. ARG1 will be converted to ARG0's
3812 type if both are specified. */
3815 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3816 tree arg1
, int upper1_p
)
3822 /* If neither arg represents infinity, do the normal operation.
3823 Else, if not a comparison, return infinity. Else handle the special
3824 comparison rules. Note that most of the cases below won't occur, but
3825 are handled for consistency. */
3827 if (arg0
!= 0 && arg1
!= 0)
3829 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3830 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3832 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3835 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3838 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3839 for neither. In real maths, we cannot assume open ended ranges are
3840 the same. But, this is computer arithmetic, where numbers are finite.
3841 We can therefore make the transformation of any unbounded range with
3842 the value Z, Z being greater than any representable number. This permits
3843 us to treat unbounded ranges as equal. */
3844 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3845 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3849 result
= sgn0
== sgn1
;
3852 result
= sgn0
!= sgn1
;
3855 result
= sgn0
< sgn1
;
3858 result
= sgn0
<= sgn1
;
3861 result
= sgn0
> sgn1
;
3864 result
= sgn0
>= sgn1
;
3870 return constant_boolean_node (result
, type
);
3873 /* Helper routine for make_range. Perform one step for it, return
3874 new expression if the loop should continue or NULL_TREE if it should
3878 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3879 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3880 bool *strict_overflow_p
)
3882 tree arg0_type
= TREE_TYPE (arg0
);
3883 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3884 int in_p
= *p_in_p
, n_in_p
;
3888 case TRUTH_NOT_EXPR
:
3889 /* We can only do something if the range is testing for zero. */
3890 if (low
== NULL_TREE
|| high
== NULL_TREE
3891 || ! integer_zerop (low
) || ! integer_zerop (high
))
3896 case EQ_EXPR
: case NE_EXPR
:
3897 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3898 /* We can only do something if the range is testing for zero
3899 and if the second operand is an integer constant. Note that
3900 saying something is "in" the range we make is done by
3901 complementing IN_P since it will set in the initial case of
3902 being not equal to zero; "out" is leaving it alone. */
3903 if (low
== NULL_TREE
|| high
== NULL_TREE
3904 || ! integer_zerop (low
) || ! integer_zerop (high
)
3905 || TREE_CODE (arg1
) != INTEGER_CST
)
3910 case NE_EXPR
: /* - [c, c] */
3913 case EQ_EXPR
: /* + [c, c] */
3914 in_p
= ! in_p
, low
= high
= arg1
;
3916 case GT_EXPR
: /* - [-, c] */
3917 low
= 0, high
= arg1
;
3919 case GE_EXPR
: /* + [c, -] */
3920 in_p
= ! in_p
, low
= arg1
, high
= 0;
3922 case LT_EXPR
: /* - [c, -] */
3923 low
= arg1
, high
= 0;
3925 case LE_EXPR
: /* + [-, c] */
3926 in_p
= ! in_p
, low
= 0, high
= arg1
;
3932 /* If this is an unsigned comparison, we also know that EXP is
3933 greater than or equal to zero. We base the range tests we make
3934 on that fact, so we record it here so we can parse existing
3935 range tests. We test arg0_type since often the return type
3936 of, e.g. EQ_EXPR, is boolean. */
3937 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3939 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3941 build_int_cst (arg0_type
, 0),
3945 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3947 /* If the high bound is missing, but we have a nonzero low
3948 bound, reverse the range so it goes from zero to the low bound
3950 if (high
== 0 && low
&& ! integer_zerop (low
))
3953 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3954 integer_one_node
, 0);
3955 low
= build_int_cst (arg0_type
, 0);
3965 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3966 low and high are non-NULL, then normalize will DTRT. */
3967 if (!TYPE_UNSIGNED (arg0_type
)
3968 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3970 if (low
== NULL_TREE
)
3971 low
= TYPE_MIN_VALUE (arg0_type
);
3972 if (high
== NULL_TREE
)
3973 high
= TYPE_MAX_VALUE (arg0_type
);
3976 /* (-x) IN [a,b] -> x in [-b, -a] */
3977 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3978 build_int_cst (exp_type
, 0),
3980 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3981 build_int_cst (exp_type
, 0),
3983 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3989 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3990 build_int_cst (exp_type
, 1));
3994 if (TREE_CODE (arg1
) != INTEGER_CST
)
3997 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3998 move a constant to the other side. */
3999 if (!TYPE_UNSIGNED (arg0_type
)
4000 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4003 /* If EXP is signed, any overflow in the computation is undefined,
4004 so we don't worry about it so long as our computations on
4005 the bounds don't overflow. For unsigned, overflow is defined
4006 and this is exactly the right thing. */
4007 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4008 arg0_type
, low
, 0, arg1
, 0);
4009 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4010 arg0_type
, high
, 1, arg1
, 0);
4011 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4012 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4015 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4016 *strict_overflow_p
= true;
4019 /* Check for an unsigned range which has wrapped around the maximum
4020 value thus making n_high < n_low, and normalize it. */
4021 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4023 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4024 integer_one_node
, 0);
4025 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4026 integer_one_node
, 0);
4028 /* If the range is of the form +/- [ x+1, x ], we won't
4029 be able to normalize it. But then, it represents the
4030 whole range or the empty set, so make it
4032 if (tree_int_cst_equal (n_low
, low
)
4033 && tree_int_cst_equal (n_high
, high
))
4039 low
= n_low
, high
= n_high
;
4047 case NON_LVALUE_EXPR
:
4048 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4051 if (! INTEGRAL_TYPE_P (arg0_type
)
4052 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4053 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4056 n_low
= low
, n_high
= high
;
4059 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4062 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4064 /* If we're converting arg0 from an unsigned type, to exp,
4065 a signed type, we will be doing the comparison as unsigned.
4066 The tests above have already verified that LOW and HIGH
4069 So we have to ensure that we will handle large unsigned
4070 values the same way that the current signed bounds treat
4073 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4077 /* For fixed-point modes, we need to pass the saturating flag
4078 as the 2nd parameter. */
4079 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4081 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4082 TYPE_SATURATING (arg0_type
));
4085 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4087 /* A range without an upper bound is, naturally, unbounded.
4088 Since convert would have cropped a very large value, use
4089 the max value for the destination type. */
4091 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4092 : TYPE_MAX_VALUE (arg0_type
);
4094 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4095 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4096 fold_convert_loc (loc
, arg0_type
,
4098 build_int_cst (arg0_type
, 1));
4100 /* If the low bound is specified, "and" the range with the
4101 range for which the original unsigned value will be
4105 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4106 1, fold_convert_loc (loc
, arg0_type
,
4111 in_p
= (n_in_p
== in_p
);
4115 /* Otherwise, "or" the range with the range of the input
4116 that will be interpreted as negative. */
4117 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4118 1, fold_convert_loc (loc
, arg0_type
,
4123 in_p
= (in_p
!= n_in_p
);
4137 /* Given EXP, a logical expression, set the range it is testing into
4138 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4139 actually being tested. *PLOW and *PHIGH will be made of the same
4140 type as the returned expression. If EXP is not a comparison, we
4141 will most likely not be returning a useful value and range. Set
4142 *STRICT_OVERFLOW_P to true if the return value is only valid
4143 because signed overflow is undefined; otherwise, do not change
4144 *STRICT_OVERFLOW_P. */
4147 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4148 bool *strict_overflow_p
)
4150 enum tree_code code
;
4151 tree arg0
, arg1
= NULL_TREE
;
4152 tree exp_type
, nexp
;
4155 location_t loc
= EXPR_LOCATION (exp
);
4157 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4158 and see if we can refine the range. Some of the cases below may not
4159 happen, but it doesn't seem worth worrying about this. We "continue"
4160 the outer loop when we've changed something; otherwise we "break"
4161 the switch, which will "break" the while. */
4164 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4168 code
= TREE_CODE (exp
);
4169 exp_type
= TREE_TYPE (exp
);
4172 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4174 if (TREE_OPERAND_LENGTH (exp
) > 0)
4175 arg0
= TREE_OPERAND (exp
, 0);
4176 if (TREE_CODE_CLASS (code
) == tcc_binary
4177 || TREE_CODE_CLASS (code
) == tcc_comparison
4178 || (TREE_CODE_CLASS (code
) == tcc_expression
4179 && TREE_OPERAND_LENGTH (exp
) > 1))
4180 arg1
= TREE_OPERAND (exp
, 1);
4182 if (arg0
== NULL_TREE
)
4185 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4186 &high
, &in_p
, strict_overflow_p
);
4187 if (nexp
== NULL_TREE
)
4192 /* If EXP is a constant, we can evaluate whether this is true or false. */
4193 if (TREE_CODE (exp
) == INTEGER_CST
)
4195 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4197 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4203 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4207 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4208 type, TYPE, return an expression to test if EXP is in (or out of, depending
4209 on IN_P) the range. Return 0 if the test couldn't be created. */
4212 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4213 tree low
, tree high
)
4215 tree etype
= TREE_TYPE (exp
), value
;
4217 #ifdef HAVE_canonicalize_funcptr_for_compare
4218 /* Disable this optimization for function pointer expressions
4219 on targets that require function pointer canonicalization. */
4220 if (HAVE_canonicalize_funcptr_for_compare
4221 && TREE_CODE (etype
) == POINTER_TYPE
4222 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4228 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4230 return invert_truthvalue_loc (loc
, value
);
4235 if (low
== 0 && high
== 0)
4236 return build_int_cst (type
, 1);
4239 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4240 fold_convert_loc (loc
, etype
, high
));
4243 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4244 fold_convert_loc (loc
, etype
, low
));
4246 if (operand_equal_p (low
, high
, 0))
4247 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4248 fold_convert_loc (loc
, etype
, low
));
4250 if (integer_zerop (low
))
4252 if (! TYPE_UNSIGNED (etype
))
4254 etype
= unsigned_type_for (etype
);
4255 high
= fold_convert_loc (loc
, etype
, high
);
4256 exp
= fold_convert_loc (loc
, etype
, exp
);
4258 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4261 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4262 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4264 unsigned HOST_WIDE_INT lo
;
4268 prec
= TYPE_PRECISION (etype
);
4269 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4272 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4276 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4277 lo
= (unsigned HOST_WIDE_INT
) -1;
4280 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4282 if (TYPE_UNSIGNED (etype
))
4284 tree signed_etype
= signed_type_for (etype
);
4285 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4287 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4289 etype
= signed_etype
;
4290 exp
= fold_convert_loc (loc
, etype
, exp
);
4292 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4293 build_int_cst (etype
, 0));
4297 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4298 This requires wrap-around arithmetics for the type of the expression.
4299 First make sure that arithmetics in this type is valid, then make sure
4300 that it wraps around. */
4301 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4302 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4303 TYPE_UNSIGNED (etype
));
4305 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4307 tree utype
, minv
, maxv
;
4309 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4310 for the type in question, as we rely on this here. */
4311 utype
= unsigned_type_for (etype
);
4312 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4313 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4314 integer_one_node
, 1);
4315 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4317 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4324 high
= fold_convert_loc (loc
, etype
, high
);
4325 low
= fold_convert_loc (loc
, etype
, low
);
4326 exp
= fold_convert_loc (loc
, etype
, exp
);
4328 value
= const_binop (MINUS_EXPR
, high
, low
);
4331 if (POINTER_TYPE_P (etype
))
4333 if (value
!= 0 && !TREE_OVERFLOW (value
))
4335 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4336 return build_range_check (loc
, type
,
4337 fold_build_pointer_plus_loc (loc
, exp
, low
),
4338 1, build_int_cst (etype
, 0), value
);
4343 if (value
!= 0 && !TREE_OVERFLOW (value
))
4344 return build_range_check (loc
, type
,
4345 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4346 1, build_int_cst (etype
, 0), value
);
4351 /* Return the predecessor of VAL in its type, handling the infinite case. */
4354 range_predecessor (tree val
)
4356 tree type
= TREE_TYPE (val
);
4358 if (INTEGRAL_TYPE_P (type
)
4359 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4362 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4365 /* Return the successor of VAL in its type, handling the infinite case. */
4368 range_successor (tree val
)
4370 tree type
= TREE_TYPE (val
);
4372 if (INTEGRAL_TYPE_P (type
)
4373 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4376 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4379 /* Given two ranges, see if we can merge them into one. Return 1 if we
4380 can, 0 if we can't. Set the output range into the specified parameters. */
4383 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4384 tree high0
, int in1_p
, tree low1
, tree high1
)
4392 int lowequal
= ((low0
== 0 && low1
== 0)
4393 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4394 low0
, 0, low1
, 0)));
4395 int highequal
= ((high0
== 0 && high1
== 0)
4396 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4397 high0
, 1, high1
, 1)));
4399 /* Make range 0 be the range that starts first, or ends last if they
4400 start at the same value. Swap them if it isn't. */
4401 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4404 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4405 high1
, 1, high0
, 1))))
4407 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4408 tem
= low0
, low0
= low1
, low1
= tem
;
4409 tem
= high0
, high0
= high1
, high1
= tem
;
4412 /* Now flag two cases, whether the ranges are disjoint or whether the
4413 second range is totally subsumed in the first. Note that the tests
4414 below are simplified by the ones above. */
4415 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4416 high0
, 1, low1
, 0));
4417 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4418 high1
, 1, high0
, 1));
4420 /* We now have four cases, depending on whether we are including or
4421 excluding the two ranges. */
4424 /* If they don't overlap, the result is false. If the second range
4425 is a subset it is the result. Otherwise, the range is from the start
4426 of the second to the end of the first. */
4428 in_p
= 0, low
= high
= 0;
4430 in_p
= 1, low
= low1
, high
= high1
;
4432 in_p
= 1, low
= low1
, high
= high0
;
4435 else if (in0_p
&& ! in1_p
)
4437 /* If they don't overlap, the result is the first range. If they are
4438 equal, the result is false. If the second range is a subset of the
4439 first, and the ranges begin at the same place, we go from just after
4440 the end of the second range to the end of the first. If the second
4441 range is not a subset of the first, or if it is a subset and both
4442 ranges end at the same place, the range starts at the start of the
4443 first range and ends just before the second range.
4444 Otherwise, we can't describe this as a single range. */
4446 in_p
= 1, low
= low0
, high
= high0
;
4447 else if (lowequal
&& highequal
)
4448 in_p
= 0, low
= high
= 0;
4449 else if (subset
&& lowequal
)
4451 low
= range_successor (high1
);
4456 /* We are in the weird situation where high0 > high1 but
4457 high1 has no successor. Punt. */
4461 else if (! subset
|| highequal
)
4464 high
= range_predecessor (low1
);
4468 /* low0 < low1 but low1 has no predecessor. Punt. */
4476 else if (! in0_p
&& in1_p
)
4478 /* If they don't overlap, the result is the second range. If the second
4479 is a subset of the first, the result is false. Otherwise,
4480 the range starts just after the first range and ends at the
4481 end of the second. */
4483 in_p
= 1, low
= low1
, high
= high1
;
4484 else if (subset
|| highequal
)
4485 in_p
= 0, low
= high
= 0;
4488 low
= range_successor (high0
);
4493 /* high1 > high0 but high0 has no successor. Punt. */
4501 /* The case where we are excluding both ranges. Here the complex case
4502 is if they don't overlap. In that case, the only time we have a
4503 range is if they are adjacent. If the second is a subset of the
4504 first, the result is the first. Otherwise, the range to exclude
4505 starts at the beginning of the first range and ends at the end of the
4509 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4510 range_successor (high0
),
4512 in_p
= 0, low
= low0
, high
= high1
;
4515 /* Canonicalize - [min, x] into - [-, x]. */
4516 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4517 switch (TREE_CODE (TREE_TYPE (low0
)))
4520 if (TYPE_PRECISION (TREE_TYPE (low0
))
4521 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4525 if (tree_int_cst_equal (low0
,
4526 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4530 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4531 && integer_zerop (low0
))
4538 /* Canonicalize - [x, max] into - [x, -]. */
4539 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4540 switch (TREE_CODE (TREE_TYPE (high1
)))
4543 if (TYPE_PRECISION (TREE_TYPE (high1
))
4544 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4548 if (tree_int_cst_equal (high1
,
4549 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4553 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4554 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4556 integer_one_node
, 1)))
4563 /* The ranges might be also adjacent between the maximum and
4564 minimum values of the given type. For
4565 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566 return + [x + 1, y - 1]. */
4567 if (low0
== 0 && high1
== 0)
4569 low
= range_successor (high0
);
4570 high
= range_predecessor (low1
);
4571 if (low
== 0 || high
== 0)
4581 in_p
= 0, low
= low0
, high
= high0
;
4583 in_p
= 0, low
= low0
, high
= high1
;
4586 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4591 /* Subroutine of fold, looking inside expressions of the form
4592 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593 of the COND_EXPR. This function is being used also to optimize
4594 A op B ? C : A, by reversing the comparison first.
4596 Return a folded expression whose code is not a COND_EXPR
4597 anymore, or NULL_TREE if no folding opportunity is found. */
4600 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4601 tree arg0
, tree arg1
, tree arg2
)
4603 enum tree_code comp_code
= TREE_CODE (arg0
);
4604 tree arg00
= TREE_OPERAND (arg0
, 0);
4605 tree arg01
= TREE_OPERAND (arg0
, 1);
4606 tree arg1_type
= TREE_TYPE (arg1
);
4612 /* If we have A op 0 ? A : -A, consider applying the following
4615 A == 0? A : -A same as -A
4616 A != 0? A : -A same as A
4617 A >= 0? A : -A same as abs (A)
4618 A > 0? A : -A same as abs (A)
4619 A <= 0? A : -A same as -abs (A)
4620 A < 0? A : -A same as -abs (A)
4622 None of these transformations work for modes with signed
4623 zeros. If A is +/-0, the first two transformations will
4624 change the sign of the result (from +0 to -0, or vice
4625 versa). The last four will fix the sign of the result,
4626 even though the original expressions could be positive or
4627 negative, depending on the sign of A.
4629 Note that all these transformations are correct if A is
4630 NaN, since the two alternatives (A and -A) are also NaNs. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4632 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4633 ? real_zerop (arg01
)
4634 : integer_zerop (arg01
))
4635 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4636 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4637 /* In the case that A is of the form X-Y, '-A' (arg2) may
4638 have already been folded to Y-X, check for that. */
4639 || (TREE_CODE (arg1
) == MINUS_EXPR
4640 && TREE_CODE (arg2
) == MINUS_EXPR
4641 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4642 TREE_OPERAND (arg2
, 1), 0)
4643 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4644 TREE_OPERAND (arg2
, 0), 0))))
4649 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4650 return pedantic_non_lvalue_loc (loc
,
4651 fold_convert_loc (loc
, type
,
4652 negate_expr (tem
)));
4655 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4658 if (flag_trapping_math
)
4663 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4664 arg1
= fold_convert_loc (loc
, signed_type_for
4665 (TREE_TYPE (arg1
)), arg1
);
4666 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4667 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4670 if (flag_trapping_math
)
4674 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4675 arg1
= fold_convert_loc (loc
, signed_type_for
4676 (TREE_TYPE (arg1
)), arg1
);
4677 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4678 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4680 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4684 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4685 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4686 both transformations are correct when A is NaN: A != 0
4687 is then true, and A == 0 is false. */
4689 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4690 && integer_zerop (arg01
) && integer_zerop (arg2
))
4692 if (comp_code
== NE_EXPR
)
4693 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4694 else if (comp_code
== EQ_EXPR
)
4695 return build_zero_cst (type
);
4698 /* Try some transformations of A op B ? A : B.
4700 A == B? A : B same as B
4701 A != B? A : B same as A
4702 A >= B? A : B same as max (A, B)
4703 A > B? A : B same as max (B, A)
4704 A <= B? A : B same as min (A, B)
4705 A < B? A : B same as min (B, A)
4707 As above, these transformations don't work in the presence
4708 of signed zeros. For example, if A and B are zeros of
4709 opposite sign, the first two transformations will change
4710 the sign of the result. In the last four, the original
4711 expressions give different results for (A=+0, B=-0) and
4712 (A=-0, B=+0), but the transformed expressions do not.
4714 The first two transformations are correct if either A or B
4715 is a NaN. In the first transformation, the condition will
4716 be false, and B will indeed be chosen. In the case of the
4717 second transformation, the condition A != B will be true,
4718 and A will be chosen.
4720 The conversions to max() and min() are not correct if B is
4721 a number and A is not. The conditions in the original
4722 expressions will be false, so all four give B. The min()
4723 and max() versions would give a NaN instead. */
4724 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4725 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4726 /* Avoid these transformations if the COND_EXPR may be used
4727 as an lvalue in the C++ front-end. PR c++/19199. */
4729 || VECTOR_TYPE_P (type
)
4730 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4731 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4732 || ! maybe_lvalue_p (arg1
)
4733 || ! maybe_lvalue_p (arg2
)))
4735 tree comp_op0
= arg00
;
4736 tree comp_op1
= arg01
;
4737 tree comp_type
= TREE_TYPE (comp_op0
);
4739 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4740 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4750 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4752 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4757 /* In C++ a ?: expression can be an lvalue, so put the
4758 operand which will be used if they are equal first
4759 so that we can convert this back to the
4760 corresponding COND_EXPR. */
4761 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4763 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4764 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4765 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4766 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4767 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4768 comp_op1
, comp_op0
);
4769 return pedantic_non_lvalue_loc (loc
,
4770 fold_convert_loc (loc
, type
, tem
));
4777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4779 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4780 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4781 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4782 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4783 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4784 comp_op1
, comp_op0
);
4785 return pedantic_non_lvalue_loc (loc
,
4786 fold_convert_loc (loc
, type
, tem
));
4790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4791 return pedantic_non_lvalue_loc (loc
,
4792 fold_convert_loc (loc
, type
, arg2
));
4795 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4796 return pedantic_non_lvalue_loc (loc
,
4797 fold_convert_loc (loc
, type
, arg1
));
4800 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4805 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4806 we might still be able to simplify this. For example,
4807 if C1 is one less or one more than C2, this might have started
4808 out as a MIN or MAX and been transformed by this function.
4809 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4811 if (INTEGRAL_TYPE_P (type
)
4812 && TREE_CODE (arg01
) == INTEGER_CST
4813 && TREE_CODE (arg2
) == INTEGER_CST
)
4817 if (TREE_CODE (arg1
) == INTEGER_CST
)
4819 /* We can replace A with C1 in this case. */
4820 arg1
= fold_convert_loc (loc
, type
, arg01
);
4821 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4824 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4825 MIN_EXPR, to preserve the signedness of the comparison. */
4826 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4828 && operand_equal_p (arg01
,
4829 const_binop (PLUS_EXPR
, arg2
,
4830 build_int_cst (type
, 1)),
4833 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4834 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4836 return pedantic_non_lvalue_loc (loc
,
4837 fold_convert_loc (loc
, type
, tem
));
4842 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4844 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4846 && operand_equal_p (arg01
,
4847 const_binop (MINUS_EXPR
, arg2
,
4848 build_int_cst (type
, 1)),
4851 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4852 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4854 return pedantic_non_lvalue_loc (loc
,
4855 fold_convert_loc (loc
, type
, tem
));
4860 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4861 MAX_EXPR, to preserve the signedness of the comparison. */
4862 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4864 && operand_equal_p (arg01
,
4865 const_binop (MINUS_EXPR
, arg2
,
4866 build_int_cst (type
, 1)),
4869 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4870 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4872 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4877 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4878 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4880 && operand_equal_p (arg01
,
4881 const_binop (PLUS_EXPR
, arg2
,
4882 build_int_cst (type
, 1)),
4885 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4886 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4888 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4902 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4903 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4904 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4908 /* EXP is some logical combination of boolean tests. See if we can
4909 merge it into some range test. Return the new tree if so. */
4912 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4915 int or_op
= (code
== TRUTH_ORIF_EXPR
4916 || code
== TRUTH_OR_EXPR
);
4917 int in0_p
, in1_p
, in_p
;
4918 tree low0
, low1
, low
, high0
, high1
, high
;
4919 bool strict_overflow_p
= false;
4920 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4921 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4923 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4924 "when simplifying range test");
4926 /* If this is an OR operation, invert both sides; we will invert
4927 again at the end. */
4929 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4931 /* If both expressions are the same, if we can merge the ranges, and we
4932 can build the range test, return it or it inverted. If one of the
4933 ranges is always true or always false, consider it to be the same
4934 expression as the other. */
4935 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4936 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4938 && 0 != (tem
= (build_range_check (loc
, type
,
4940 : rhs
!= 0 ? rhs
: integer_zero_node
,
4943 if (strict_overflow_p
)
4944 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4945 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4948 /* On machines where the branch cost is expensive, if this is a
4949 short-circuited branch and the underlying object on both sides
4950 is the same, make a non-short-circuit operation. */
4951 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4952 && lhs
!= 0 && rhs
!= 0
4953 && (code
== TRUTH_ANDIF_EXPR
4954 || code
== TRUTH_ORIF_EXPR
)
4955 && operand_equal_p (lhs
, rhs
, 0))
4957 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4958 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4959 which cases we can't do this. */
4960 if (simple_operand_p (lhs
))
4961 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4962 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4965 else if (!lang_hooks
.decls
.global_bindings_p ()
4966 && !CONTAINS_PLACEHOLDER_P (lhs
))
4968 tree common
= save_expr (lhs
);
4970 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4971 or_op
? ! in0_p
: in0_p
,
4973 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4974 or_op
? ! in1_p
: in1_p
,
4977 if (strict_overflow_p
)
4978 fold_overflow_warning (warnmsg
,
4979 WARN_STRICT_OVERFLOW_COMPARISON
);
4980 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4981 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4990 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4991 bit value. Arrange things so the extra bits will be set to zero if and
4992 only if C is signed-extended to its full width. If MASK is nonzero,
4993 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4996 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4998 tree type
= TREE_TYPE (c
);
4999 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5002 if (p
== modesize
|| unsignedp
)
5005 /* We work by getting just the sign bit into the low-order bit, then
5006 into the high-order bit, then sign-extend. We then XOR that value
5008 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
5009 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
5011 /* We must use a signed type in order to get an arithmetic right shift.
5012 However, we must also avoid introducing accidental overflows, so that
5013 a subsequent call to integer_zerop will work. Hence we must
5014 do the type conversion here. At this point, the constant is either
5015 zero or one, and the conversion to a signed type can never overflow.
5016 We could get an overflow if this conversion is done anywhere else. */
5017 if (TYPE_UNSIGNED (type
))
5018 temp
= fold_convert (signed_type_for (type
), temp
);
5020 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5021 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5023 temp
= const_binop (BIT_AND_EXPR
, temp
,
5024 fold_convert (TREE_TYPE (c
), mask
));
5025 /* If necessary, convert the type back to match the type of C. */
5026 if (TYPE_UNSIGNED (type
))
5027 temp
= fold_convert (type
, temp
);
5029 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5032 /* For an expression that has the form
5036 we can drop one of the inner expressions and simplify to
5040 LOC is the location of the resulting expression. OP is the inner
5041 logical operation; the left-hand side in the examples above, while CMPOP
5042 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5043 removing a condition that guards another, as in
5044 (A != NULL && A->...) || A == NULL
5045 which we must not transform. If RHS_ONLY is true, only eliminate the
5046 right-most operand of the inner logical operation. */
5049 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5052 tree type
= TREE_TYPE (cmpop
);
5053 enum tree_code code
= TREE_CODE (cmpop
);
5054 enum tree_code truthop_code
= TREE_CODE (op
);
5055 tree lhs
= TREE_OPERAND (op
, 0);
5056 tree rhs
= TREE_OPERAND (op
, 1);
5057 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5058 enum tree_code rhs_code
= TREE_CODE (rhs
);
5059 enum tree_code lhs_code
= TREE_CODE (lhs
);
5060 enum tree_code inv_code
;
5062 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5065 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5068 if (rhs_code
== truthop_code
)
5070 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5071 if (newrhs
!= NULL_TREE
)
5074 rhs_code
= TREE_CODE (rhs
);
5077 if (lhs_code
== truthop_code
&& !rhs_only
)
5079 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5080 if (newlhs
!= NULL_TREE
)
5083 lhs_code
= TREE_CODE (lhs
);
5087 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5088 if (inv_code
== rhs_code
5089 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5090 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5092 if (!rhs_only
&& inv_code
== lhs_code
5093 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5094 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5096 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5097 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5102 /* Find ways of folding logical expressions of LHS and RHS:
5103 Try to merge two comparisons to the same innermost item.
5104 Look for range tests like "ch >= '0' && ch <= '9'".
5105 Look for combinations of simple terms on machines with expensive branches
5106 and evaluate the RHS unconditionally.
5108 For example, if we have p->a == 2 && p->b == 4 and we can make an
5109 object large enough to span both A and B, we can do this with a comparison
5110 against the object ANDed with the a mask.
5112 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5113 operations to do this with one comparison.
5115 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5116 function and the one above.
5118 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5119 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5121 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5124 We return the simplified tree or 0 if no optimization is possible. */
5127 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5130 /* If this is the "or" of two comparisons, we can do something if
5131 the comparisons are NE_EXPR. If this is the "and", we can do something
5132 if the comparisons are EQ_EXPR. I.e.,
5133 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5135 WANTED_CODE is this operation code. For single bit fields, we can
5136 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5137 comparison for one-bit fields. */
5139 enum tree_code wanted_code
;
5140 enum tree_code lcode
, rcode
;
5141 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5142 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5143 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5144 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5145 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5146 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5147 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5148 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5149 enum machine_mode lnmode
, rnmode
;
5150 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5151 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5152 tree l_const
, r_const
;
5153 tree lntype
, rntype
, result
;
5154 HOST_WIDE_INT first_bit
, end_bit
;
5157 /* Start by getting the comparison codes. Fail if anything is volatile.
5158 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5159 it were surrounded with a NE_EXPR. */
5161 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5164 lcode
= TREE_CODE (lhs
);
5165 rcode
= TREE_CODE (rhs
);
5167 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5169 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5170 build_int_cst (TREE_TYPE (lhs
), 0));
5174 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5176 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5177 build_int_cst (TREE_TYPE (rhs
), 0));
5181 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5182 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5185 ll_arg
= TREE_OPERAND (lhs
, 0);
5186 lr_arg
= TREE_OPERAND (lhs
, 1);
5187 rl_arg
= TREE_OPERAND (rhs
, 0);
5188 rr_arg
= TREE_OPERAND (rhs
, 1);
5190 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5191 if (simple_operand_p (ll_arg
)
5192 && simple_operand_p (lr_arg
))
5194 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5195 && operand_equal_p (lr_arg
, rr_arg
, 0))
5197 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5198 truth_type
, ll_arg
, lr_arg
);
5202 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5203 && operand_equal_p (lr_arg
, rl_arg
, 0))
5205 result
= combine_comparisons (loc
, code
, lcode
,
5206 swap_tree_comparison (rcode
),
5207 truth_type
, ll_arg
, lr_arg
);
5213 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5214 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5216 /* If the RHS can be evaluated unconditionally and its operands are
5217 simple, it wins to evaluate the RHS unconditionally on machines
5218 with expensive branches. In this case, this isn't a comparison
5219 that can be merged. */
5221 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5223 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5224 && simple_operand_p (rl_arg
)
5225 && simple_operand_p (rr_arg
))
5227 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5228 if (code
== TRUTH_OR_EXPR
5229 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5230 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5231 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5232 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5233 return build2_loc (loc
, NE_EXPR
, truth_type
,
5234 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5236 build_int_cst (TREE_TYPE (ll_arg
), 0));
5238 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5239 if (code
== TRUTH_AND_EXPR
5240 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5241 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5242 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5244 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5245 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5247 build_int_cst (TREE_TYPE (ll_arg
), 0));
5250 /* See if the comparisons can be merged. Then get all the parameters for
5253 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5254 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5258 ll_inner
= decode_field_reference (loc
, ll_arg
,
5259 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5260 &ll_unsignedp
, &volatilep
, &ll_mask
,
5262 lr_inner
= decode_field_reference (loc
, lr_arg
,
5263 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5264 &lr_unsignedp
, &volatilep
, &lr_mask
,
5266 rl_inner
= decode_field_reference (loc
, rl_arg
,
5267 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5268 &rl_unsignedp
, &volatilep
, &rl_mask
,
5270 rr_inner
= decode_field_reference (loc
, rr_arg
,
5271 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5272 &rr_unsignedp
, &volatilep
, &rr_mask
,
5275 /* It must be true that the inner operation on the lhs of each
5276 comparison must be the same if we are to be able to do anything.
5277 Then see if we have constants. If not, the same must be true for
5279 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5280 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5283 if (TREE_CODE (lr_arg
) == INTEGER_CST
5284 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5285 l_const
= lr_arg
, r_const
= rr_arg
;
5286 else if (lr_inner
== 0 || rr_inner
== 0
5287 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5290 l_const
= r_const
= 0;
5292 /* If either comparison code is not correct for our logical operation,
5293 fail. However, we can convert a one-bit comparison against zero into
5294 the opposite comparison against that bit being set in the field. */
5296 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5297 if (lcode
!= wanted_code
)
5299 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5301 /* Make the left operand unsigned, since we are only interested
5302 in the value of one bit. Otherwise we are doing the wrong
5311 /* This is analogous to the code for l_const above. */
5312 if (rcode
!= wanted_code
)
5314 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5323 /* See if we can find a mode that contains both fields being compared on
5324 the left. If we can't, fail. Otherwise, update all constants and masks
5325 to be relative to a field of that size. */
5326 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5327 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5328 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5329 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5331 if (lnmode
== VOIDmode
)
5334 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5335 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5336 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5337 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5339 if (BYTES_BIG_ENDIAN
)
5341 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5342 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5345 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5346 size_int (xll_bitpos
));
5347 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5348 size_int (xrl_bitpos
));
5352 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5353 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5354 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5355 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5356 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5359 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5361 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5366 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5367 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5368 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5369 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5370 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5373 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5375 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5379 /* If the right sides are not constant, do the same for it. Also,
5380 disallow this optimization if a size or signedness mismatch occurs
5381 between the left and right sides. */
5384 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5385 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5386 /* Make sure the two fields on the right
5387 correspond to the left without being swapped. */
5388 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5391 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5392 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5393 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5394 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5396 if (rnmode
== VOIDmode
)
5399 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5400 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5401 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5402 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5404 if (BYTES_BIG_ENDIAN
)
5406 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5407 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5410 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5412 size_int (xlr_bitpos
));
5413 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5415 size_int (xrr_bitpos
));
5417 /* Make a mask that corresponds to both fields being compared.
5418 Do this for both items being compared. If the operands are the
5419 same size and the bits being compared are in the same position
5420 then we can do this by masking both and comparing the masked
5422 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5423 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5424 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5426 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5427 ll_unsignedp
|| rl_unsignedp
);
5428 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5429 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5431 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5432 lr_unsignedp
|| rr_unsignedp
);
5433 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5434 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5436 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5439 /* There is still another way we can do something: If both pairs of
5440 fields being compared are adjacent, we may be able to make a wider
5441 field containing them both.
5443 Note that we still must mask the lhs/rhs expressions. Furthermore,
5444 the mask must be shifted to account for the shift done by
5445 make_bit_field_ref. */
5446 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5447 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5448 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5449 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5453 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5454 ll_bitsize
+ rl_bitsize
,
5455 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5456 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5457 lr_bitsize
+ rr_bitsize
,
5458 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5460 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5461 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5462 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5463 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5465 /* Convert to the smaller type before masking out unwanted bits. */
5467 if (lntype
!= rntype
)
5469 if (lnbitsize
> rnbitsize
)
5471 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5472 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5475 else if (lnbitsize
< rnbitsize
)
5477 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5478 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5483 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5484 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5486 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5487 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5489 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5495 /* Handle the case of comparisons with constants. If there is something in
5496 common between the masks, those bits of the constants must be the same.
5497 If not, the condition is always false. Test for this to avoid generating
5498 incorrect code below. */
5499 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5500 if (! integer_zerop (result
)
5501 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5502 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5504 if (wanted_code
== NE_EXPR
)
5506 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5507 return constant_boolean_node (true, truth_type
);
5511 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5512 return constant_boolean_node (false, truth_type
);
5516 /* Construct the expression we will return. First get the component
5517 reference we will make. Unless the mask is all ones the width of
5518 that field, perform the mask operation. Then compare with the
5520 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5521 ll_unsignedp
|| rl_unsignedp
);
5523 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5524 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5525 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5527 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5528 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5531 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5535 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5539 enum tree_code op_code
;
5542 int consts_equal
, consts_lt
;
5545 STRIP_SIGN_NOPS (arg0
);
5547 op_code
= TREE_CODE (arg0
);
5548 minmax_const
= TREE_OPERAND (arg0
, 1);
5549 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5550 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5551 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5552 inner
= TREE_OPERAND (arg0
, 0);
5554 /* If something does not permit us to optimize, return the original tree. */
5555 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5556 || TREE_CODE (comp_const
) != INTEGER_CST
5557 || TREE_OVERFLOW (comp_const
)
5558 || TREE_CODE (minmax_const
) != INTEGER_CST
5559 || TREE_OVERFLOW (minmax_const
))
5562 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5563 and GT_EXPR, doing the rest with recursive calls using logical
5567 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5570 = optimize_minmax_comparison (loc
,
5571 invert_tree_comparison (code
, false),
5574 return invert_truthvalue_loc (loc
, tem
);
5580 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5581 optimize_minmax_comparison
5582 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5583 optimize_minmax_comparison
5584 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5587 if (op_code
== MAX_EXPR
&& consts_equal
)
5588 /* MAX (X, 0) == 0 -> X <= 0 */
5589 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5591 else if (op_code
== MAX_EXPR
&& consts_lt
)
5592 /* MAX (X, 0) == 5 -> X == 5 */
5593 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5595 else if (op_code
== MAX_EXPR
)
5596 /* MAX (X, 0) == -1 -> false */
5597 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5599 else if (consts_equal
)
5600 /* MIN (X, 0) == 0 -> X >= 0 */
5601 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5604 /* MIN (X, 0) == 5 -> false */
5605 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5608 /* MIN (X, 0) == -1 -> X == -1 */
5609 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5612 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5613 /* MAX (X, 0) > 0 -> X > 0
5614 MAX (X, 0) > 5 -> X > 5 */
5615 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5617 else if (op_code
== MAX_EXPR
)
5618 /* MAX (X, 0) > -1 -> true */
5619 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5621 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5622 /* MIN (X, 0) > 0 -> false
5623 MIN (X, 0) > 5 -> false */
5624 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5627 /* MIN (X, 0) > -1 -> X > -1 */
5628 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5635 /* T is an integer expression that is being multiplied, divided, or taken a
5636 modulus (CODE says which and what kind of divide or modulus) by a
5637 constant C. See if we can eliminate that operation by folding it with
5638 other operations already in T. WIDE_TYPE, if non-null, is a type that
5639 should be used for the computation if wider than our type.
5641 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5642 (X * 2) + (Y * 4). We must, however, be assured that either the original
5643 expression would not overflow or that overflow is undefined for the type
5644 in the language in question.
5646 If we return a non-null expression, it is an equivalent form of the
5647 original computation, but need not be in the original type.
5649 We set *STRICT_OVERFLOW_P to true if the return values depends on
5650 signed overflow being undefined. Otherwise we do not change
5651 *STRICT_OVERFLOW_P. */
5654 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5655 bool *strict_overflow_p
)
5657 /* To avoid exponential search depth, refuse to allow recursion past
5658 three levels. Beyond that (1) it's highly unlikely that we'll find
5659 something interesting and (2) we've probably processed it before
5660 when we built the inner expression. */
5669 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5676 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5677 bool *strict_overflow_p
)
5679 tree type
= TREE_TYPE (t
);
5680 enum tree_code tcode
= TREE_CODE (t
);
5681 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5682 > GET_MODE_SIZE (TYPE_MODE (type
)))
5683 ? wide_type
: type
);
5685 int same_p
= tcode
== code
;
5686 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5687 bool sub_strict_overflow_p
;
5689 /* Don't deal with constants of zero here; they confuse the code below. */
5690 if (integer_zerop (c
))
5693 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5694 op0
= TREE_OPERAND (t
, 0);
5696 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5697 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5699 /* Note that we need not handle conditional operations here since fold
5700 already handles those cases. So just do arithmetic here. */
5704 /* For a constant, we can always simplify if we are a multiply
5705 or (for divide and modulus) if it is a multiple of our constant. */
5706 if (code
== MULT_EXPR
5707 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5708 return const_binop (code
, fold_convert (ctype
, t
),
5709 fold_convert (ctype
, c
));
5712 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5713 /* If op0 is an expression ... */
5714 if ((COMPARISON_CLASS_P (op0
)
5715 || UNARY_CLASS_P (op0
)
5716 || BINARY_CLASS_P (op0
)
5717 || VL_EXP_CLASS_P (op0
)
5718 || EXPRESSION_CLASS_P (op0
))
5719 /* ... and has wrapping overflow, and its type is smaller
5720 than ctype, then we cannot pass through as widening. */
5721 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5722 && (TYPE_PRECISION (ctype
)
5723 > TYPE_PRECISION (TREE_TYPE (op0
))))
5724 /* ... or this is a truncation (t is narrower than op0),
5725 then we cannot pass through this narrowing. */
5726 || (TYPE_PRECISION (type
)
5727 < TYPE_PRECISION (TREE_TYPE (op0
)))
5728 /* ... or signedness changes for division or modulus,
5729 then we cannot pass through this conversion. */
5730 || (code
!= MULT_EXPR
5731 && (TYPE_UNSIGNED (ctype
)
5732 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5733 /* ... or has undefined overflow while the converted to
5734 type has not, we cannot do the operation in the inner type
5735 as that would introduce undefined overflow. */
5736 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5737 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5740 /* Pass the constant down and see if we can make a simplification. If
5741 we can, replace this expression with the inner simplification for
5742 possible later conversion to our or some other type. */
5743 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5744 && TREE_CODE (t2
) == INTEGER_CST
5745 && !TREE_OVERFLOW (t2
)
5746 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5748 ? ctype
: NULL_TREE
,
5749 strict_overflow_p
))))
5754 /* If widening the type changes it from signed to unsigned, then we
5755 must avoid building ABS_EXPR itself as unsigned. */
5756 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5758 tree cstype
= (*signed_type_for
) (ctype
);
5759 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5762 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5763 return fold_convert (ctype
, t1
);
5767 /* If the constant is negative, we cannot simplify this. */
5768 if (tree_int_cst_sgn (c
) == -1)
5772 /* For division and modulus, type can't be unsigned, as e.g.
5773 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5774 For signed types, even with wrapping overflow, this is fine. */
5775 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5777 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5779 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5782 case MIN_EXPR
: case MAX_EXPR
:
5783 /* If widening the type changes the signedness, then we can't perform
5784 this optimization as that changes the result. */
5785 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5788 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5789 sub_strict_overflow_p
= false;
5790 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5791 &sub_strict_overflow_p
)) != 0
5792 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5793 &sub_strict_overflow_p
)) != 0)
5795 if (tree_int_cst_sgn (c
) < 0)
5796 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5797 if (sub_strict_overflow_p
)
5798 *strict_overflow_p
= true;
5799 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5800 fold_convert (ctype
, t2
));
5804 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5805 /* If the second operand is constant, this is a multiplication
5806 or floor division, by a power of two, so we can treat it that
5807 way unless the multiplier or divisor overflows. Signed
5808 left-shift overflow is implementation-defined rather than
5809 undefined in C90, so do not convert signed left shift into
5811 if (TREE_CODE (op1
) == INTEGER_CST
5812 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5813 /* const_binop may not detect overflow correctly,
5814 so check for it explicitly here. */
5815 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5816 && TREE_INT_CST_HIGH (op1
) == 0
5817 && 0 != (t1
= fold_convert (ctype
,
5818 const_binop (LSHIFT_EXPR
,
5821 && !TREE_OVERFLOW (t1
))
5822 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5823 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5825 fold_convert (ctype
, op0
),
5827 c
, code
, wide_type
, strict_overflow_p
);
5830 case PLUS_EXPR
: case MINUS_EXPR
:
5831 /* See if we can eliminate the operation on both sides. If we can, we
5832 can return a new PLUS or MINUS. If we can't, the only remaining
5833 cases where we can do anything are if the second operand is a
5835 sub_strict_overflow_p
= false;
5836 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5837 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5838 if (t1
!= 0 && t2
!= 0
5839 && (code
== MULT_EXPR
5840 /* If not multiplication, we can only do this if both operands
5841 are divisible by c. */
5842 || (multiple_of_p (ctype
, op0
, c
)
5843 && multiple_of_p (ctype
, op1
, c
))))
5845 if (sub_strict_overflow_p
)
5846 *strict_overflow_p
= true;
5847 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5848 fold_convert (ctype
, t2
));
5851 /* If this was a subtraction, negate OP1 and set it to be an addition.
5852 This simplifies the logic below. */
5853 if (tcode
== MINUS_EXPR
)
5855 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5856 /* If OP1 was not easily negatable, the constant may be OP0. */
5857 if (TREE_CODE (op0
) == INTEGER_CST
)
5868 if (TREE_CODE (op1
) != INTEGER_CST
)
5871 /* If either OP1 or C are negative, this optimization is not safe for
5872 some of the division and remainder types while for others we need
5873 to change the code. */
5874 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5876 if (code
== CEIL_DIV_EXPR
)
5877 code
= FLOOR_DIV_EXPR
;
5878 else if (code
== FLOOR_DIV_EXPR
)
5879 code
= CEIL_DIV_EXPR
;
5880 else if (code
!= MULT_EXPR
5881 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5885 /* If it's a multiply or a division/modulus operation of a multiple
5886 of our constant, do the operation and verify it doesn't overflow. */
5887 if (code
== MULT_EXPR
5888 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5890 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5891 fold_convert (ctype
, c
));
5892 /* We allow the constant to overflow with wrapping semantics. */
5894 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5900 /* If we have an unsigned type, we cannot widen the operation since it
5901 will change the result if the original computation overflowed. */
5902 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5905 /* If we were able to eliminate our operation from the first side,
5906 apply our operation to the second side and reform the PLUS. */
5907 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5908 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5910 /* The last case is if we are a multiply. In that case, we can
5911 apply the distributive law to commute the multiply and addition
5912 if the multiplication of the constants doesn't overflow
5913 and overflow is defined. With undefined overflow
5914 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5915 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5916 return fold_build2 (tcode
, ctype
,
5917 fold_build2 (code
, ctype
,
5918 fold_convert (ctype
, op0
),
5919 fold_convert (ctype
, c
)),
5925 /* We have a special case here if we are doing something like
5926 (C * 8) % 4 since we know that's zero. */
5927 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5928 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5929 /* If the multiplication can overflow we cannot optimize this. */
5930 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5931 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5932 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5934 *strict_overflow_p
= true;
5935 return omit_one_operand (type
, integer_zero_node
, op0
);
5938 /* ... fall through ... */
5940 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5941 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5942 /* If we can extract our operation from the LHS, do so and return a
5943 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5944 do something only if the second operand is a constant. */
5946 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5947 strict_overflow_p
)) != 0)
5948 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5949 fold_convert (ctype
, op1
));
5950 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5951 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5952 strict_overflow_p
)) != 0)
5953 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5954 fold_convert (ctype
, t1
));
5955 else if (TREE_CODE (op1
) != INTEGER_CST
)
5958 /* If these are the same operation types, we can associate them
5959 assuming no overflow. */
5964 unsigned prec
= TYPE_PRECISION (ctype
);
5965 bool uns
= TYPE_UNSIGNED (ctype
);
5966 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
5967 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
5968 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
5969 overflow_p
= ((!uns
&& overflow_p
)
5970 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5971 if (!double_int_fits_to_tree_p (ctype
, mul
)
5972 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
5975 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5976 double_int_to_tree (ctype
, mul
));
5979 /* If these operations "cancel" each other, we have the main
5980 optimizations of this pass, which occur when either constant is a
5981 multiple of the other, in which case we replace this with either an
5982 operation or CODE or TCODE.
5984 If we have an unsigned type, we cannot do this since it will change
5985 the result if the original computation overflowed. */
5986 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5987 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5988 || (tcode
== MULT_EXPR
5989 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5990 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5991 && code
!= MULT_EXPR
)))
5993 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5995 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5996 *strict_overflow_p
= true;
5997 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5998 fold_convert (ctype
,
5999 const_binop (TRUNC_DIV_EXPR
,
6002 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
6004 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6005 *strict_overflow_p
= true;
6006 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6007 fold_convert (ctype
,
6008 const_binop (TRUNC_DIV_EXPR
,
6021 /* Return a node which has the indicated constant VALUE (either 0 or
6022 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6023 and is of the indicated TYPE. */
6026 constant_boolean_node (bool value
, tree type
)
6028 if (type
== integer_type_node
)
6029 return value
? integer_one_node
: integer_zero_node
;
6030 else if (type
== boolean_type_node
)
6031 return value
? boolean_true_node
: boolean_false_node
;
6032 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6033 return build_vector_from_val (type
,
6034 build_int_cst (TREE_TYPE (type
),
6037 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6041 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6042 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6043 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6044 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6045 COND is the first argument to CODE; otherwise (as in the example
6046 given here), it is the second argument. TYPE is the type of the
6047 original expression. Return NULL_TREE if no simplification is
6051 fold_binary_op_with_conditional_arg (location_t loc
,
6052 enum tree_code code
,
6053 tree type
, tree op0
, tree op1
,
6054 tree cond
, tree arg
, int cond_first_p
)
6056 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6057 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6058 tree test
, true_value
, false_value
;
6059 tree lhs
= NULL_TREE
;
6060 tree rhs
= NULL_TREE
;
6061 enum tree_code cond_code
= COND_EXPR
;
6063 if (TREE_CODE (cond
) == COND_EXPR
6064 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6066 test
= TREE_OPERAND (cond
, 0);
6067 true_value
= TREE_OPERAND (cond
, 1);
6068 false_value
= TREE_OPERAND (cond
, 2);
6069 /* If this operand throws an expression, then it does not make
6070 sense to try to perform a logical or arithmetic operation
6072 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6074 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6079 tree testtype
= TREE_TYPE (cond
);
6081 true_value
= constant_boolean_node (true, testtype
);
6082 false_value
= constant_boolean_node (false, testtype
);
6085 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6086 cond_code
= VEC_COND_EXPR
;
6088 /* This transformation is only worthwhile if we don't have to wrap ARG
6089 in a SAVE_EXPR and the operation can be simplified without recursing
6090 on at least one of the branches once its pushed inside the COND_EXPR. */
6091 if (!TREE_CONSTANT (arg
)
6092 && (TREE_SIDE_EFFECTS (arg
)
6093 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6094 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6097 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6100 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6102 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6104 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6108 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6110 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6112 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6115 /* Check that we have simplified at least one of the branches. */
6116 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6119 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6123 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6125 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6126 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6127 ADDEND is the same as X.
6129 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6130 and finite. The problematic cases are when X is zero, and its mode
6131 has signed zeros. In the case of rounding towards -infinity,
6132 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6133 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6136 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6138 if (!real_zerop (addend
))
6141 /* Don't allow the fold with -fsignaling-nans. */
6142 if (HONOR_SNANS (TYPE_MODE (type
)))
6145 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6146 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6149 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6150 if (TREE_CODE (addend
) == REAL_CST
6151 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6154 /* The mode has signed zeros, and we have to honor their sign.
6155 In this situation, there is only one case we can return true for.
6156 X - 0 is the same as X unless rounding towards -infinity is
6158 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6161 /* Subroutine of fold() that checks comparisons of built-in math
6162 functions against real constants.
6164 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6165 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6166 is the type of the result and ARG0 and ARG1 are the operands of the
6167 comparison. ARG1 must be a TREE_REAL_CST.
6169 The function returns the constant folded tree if a simplification
6170 can be made, and NULL_TREE otherwise. */
6173 fold_mathfn_compare (location_t loc
,
6174 enum built_in_function fcode
, enum tree_code code
,
6175 tree type
, tree arg0
, tree arg1
)
6179 if (BUILTIN_SQRT_P (fcode
))
6181 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6182 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6184 c
= TREE_REAL_CST (arg1
);
6185 if (REAL_VALUE_NEGATIVE (c
))
6187 /* sqrt(x) < y is always false, if y is negative. */
6188 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6189 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6191 /* sqrt(x) > y is always true, if y is negative and we
6192 don't care about NaNs, i.e. negative values of x. */
6193 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6194 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6196 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6197 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6198 build_real (TREE_TYPE (arg
), dconst0
));
6200 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6204 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6205 real_convert (&c2
, mode
, &c2
);
6207 if (REAL_VALUE_ISINF (c2
))
6209 /* sqrt(x) > y is x == +Inf, when y is very large. */
6210 if (HONOR_INFINITIES (mode
))
6211 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6212 build_real (TREE_TYPE (arg
), c2
));
6214 /* sqrt(x) > y is always false, when y is very large
6215 and we don't care about infinities. */
6216 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6219 /* sqrt(x) > c is the same as x > c*c. */
6220 return fold_build2_loc (loc
, code
, type
, arg
,
6221 build_real (TREE_TYPE (arg
), c2
));
6223 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6227 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6228 real_convert (&c2
, mode
, &c2
);
6230 if (REAL_VALUE_ISINF (c2
))
6232 /* sqrt(x) < y is always true, when y is a very large
6233 value and we don't care about NaNs or Infinities. */
6234 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6235 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6237 /* sqrt(x) < y is x != +Inf when y is very large and we
6238 don't care about NaNs. */
6239 if (! HONOR_NANS (mode
))
6240 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6241 build_real (TREE_TYPE (arg
), c2
));
6243 /* sqrt(x) < y is x >= 0 when y is very large and we
6244 don't care about Infinities. */
6245 if (! HONOR_INFINITIES (mode
))
6246 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6247 build_real (TREE_TYPE (arg
), dconst0
));
6249 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6250 arg
= save_expr (arg
);
6251 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6252 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6253 build_real (TREE_TYPE (arg
),
6255 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6256 build_real (TREE_TYPE (arg
),
6260 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6261 if (! HONOR_NANS (mode
))
6262 return fold_build2_loc (loc
, code
, type
, arg
,
6263 build_real (TREE_TYPE (arg
), c2
));
6265 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6266 arg
= save_expr (arg
);
6267 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6268 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6269 build_real (TREE_TYPE (arg
),
6271 fold_build2_loc (loc
, code
, type
, arg
,
6272 build_real (TREE_TYPE (arg
),
6280 /* Subroutine of fold() that optimizes comparisons against Infinities,
6281 either +Inf or -Inf.
6283 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6284 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6285 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6287 The function returns the constant folded tree if a simplification
6288 can be made, and NULL_TREE otherwise. */
6291 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6292 tree arg0
, tree arg1
)
6294 enum machine_mode mode
;
6295 REAL_VALUE_TYPE max
;
6299 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6301 /* For negative infinity swap the sense of the comparison. */
6302 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6304 code
= swap_tree_comparison (code
);
6309 /* x > +Inf is always false, if with ignore sNANs. */
6310 if (HONOR_SNANS (mode
))
6312 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6315 /* x <= +Inf is always true, if we don't case about NaNs. */
6316 if (! HONOR_NANS (mode
))
6317 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6319 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6320 arg0
= save_expr (arg0
);
6321 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6325 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6326 real_maxval (&max
, neg
, mode
);
6327 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6328 arg0
, build_real (TREE_TYPE (arg0
), max
));
6331 /* x < +Inf is always equal to x <= DBL_MAX. */
6332 real_maxval (&max
, neg
, mode
);
6333 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6334 arg0
, build_real (TREE_TYPE (arg0
), max
));
6337 /* x != +Inf is always equal to !(x > DBL_MAX). */
6338 real_maxval (&max
, neg
, mode
);
6339 if (! HONOR_NANS (mode
))
6340 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6341 arg0
, build_real (TREE_TYPE (arg0
), max
));
6343 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6344 arg0
, build_real (TREE_TYPE (arg0
), max
));
6345 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6354 /* Subroutine of fold() that optimizes comparisons of a division by
6355 a nonzero integer constant against an integer constant, i.e.
6358 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6359 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6360 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6362 The function returns the constant folded tree if a simplification
6363 can be made, and NULL_TREE otherwise. */
6366 fold_div_compare (location_t loc
,
6367 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6369 tree prod
, tmp
, hi
, lo
;
6370 tree arg00
= TREE_OPERAND (arg0
, 0);
6371 tree arg01
= TREE_OPERAND (arg0
, 1);
6373 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6377 /* We have to do this the hard way to detect unsigned overflow.
6378 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6379 val
= TREE_INT_CST (arg01
)
6380 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6381 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6382 neg_overflow
= false;
6386 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6387 build_int_cst (TREE_TYPE (arg01
), 1));
6390 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6391 val
= TREE_INT_CST (prod
)
6392 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6393 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6394 -1, overflow
| TREE_OVERFLOW (prod
));
6396 else if (tree_int_cst_sgn (arg01
) >= 0)
6398 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6399 build_int_cst (TREE_TYPE (arg01
), 1));
6400 switch (tree_int_cst_sgn (arg1
))
6403 neg_overflow
= true;
6404 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6409 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6414 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6424 /* A negative divisor reverses the relational operators. */
6425 code
= swap_tree_comparison (code
);
6427 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6428 build_int_cst (TREE_TYPE (arg01
), 1));
6429 switch (tree_int_cst_sgn (arg1
))
6432 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6437 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6442 neg_overflow
= true;
6443 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6455 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6456 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6457 if (TREE_OVERFLOW (hi
))
6458 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6459 if (TREE_OVERFLOW (lo
))
6460 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6461 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6464 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6465 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6466 if (TREE_OVERFLOW (hi
))
6467 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6468 if (TREE_OVERFLOW (lo
))
6469 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6470 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6473 if (TREE_OVERFLOW (lo
))
6475 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6476 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6478 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6481 if (TREE_OVERFLOW (hi
))
6483 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6484 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6486 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6489 if (TREE_OVERFLOW (hi
))
6491 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6492 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6494 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6497 if (TREE_OVERFLOW (lo
))
6499 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6500 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6502 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6512 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6513 equality/inequality test, then return a simplified form of the test
6514 using a sign testing. Otherwise return NULL. TYPE is the desired
6518 fold_single_bit_test_into_sign_test (location_t loc
,
6519 enum tree_code code
, tree arg0
, tree arg1
,
6522 /* If this is testing a single bit, we can optimize the test. */
6523 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6524 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6525 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6527 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6528 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6529 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6531 if (arg00
!= NULL_TREE
6532 /* This is only a win if casting to a signed type is cheap,
6533 i.e. when arg00's type is not a partial mode. */
6534 && TYPE_PRECISION (TREE_TYPE (arg00
))
6535 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6537 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6538 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6540 fold_convert_loc (loc
, stype
, arg00
),
6541 build_int_cst (stype
, 0));
6548 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6549 equality/inequality test, then return a simplified form of
6550 the test using shifts and logical operations. Otherwise return
6551 NULL. TYPE is the desired result type. */
6554 fold_single_bit_test (location_t loc
, enum tree_code code
,
6555 tree arg0
, tree arg1
, tree result_type
)
6557 /* If this is testing a single bit, we can optimize the test. */
6558 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6559 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6560 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6562 tree inner
= TREE_OPERAND (arg0
, 0);
6563 tree type
= TREE_TYPE (arg0
);
6564 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6565 enum machine_mode operand_mode
= TYPE_MODE (type
);
6567 tree signed_type
, unsigned_type
, intermediate_type
;
6570 /* First, see if we can fold the single bit test into a sign-bit
6572 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6577 /* Otherwise we have (A & C) != 0 where C is a single bit,
6578 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6579 Similarly for (A & C) == 0. */
6581 /* If INNER is a right shift of a constant and it plus BITNUM does
6582 not overflow, adjust BITNUM and INNER. */
6583 if (TREE_CODE (inner
) == RSHIFT_EXPR
6584 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6585 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6586 && bitnum
< TYPE_PRECISION (type
)
6587 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6588 bitnum
- TYPE_PRECISION (type
)))
6590 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6591 inner
= TREE_OPERAND (inner
, 0);
6594 /* If we are going to be able to omit the AND below, we must do our
6595 operations as unsigned. If we must use the AND, we have a choice.
6596 Normally unsigned is faster, but for some machines signed is. */
6597 #ifdef LOAD_EXTEND_OP
6598 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6599 && !flag_syntax_only
) ? 0 : 1;
6604 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6605 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6606 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6607 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6610 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6611 inner
, size_int (bitnum
));
6613 one
= build_int_cst (intermediate_type
, 1);
6615 if (code
== EQ_EXPR
)
6616 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6618 /* Put the AND last so it can combine with more things. */
6619 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6621 /* Make sure to return the proper type. */
6622 inner
= fold_convert_loc (loc
, result_type
, inner
);
6629 /* Check whether we are allowed to reorder operands arg0 and arg1,
6630 such that the evaluation of arg1 occurs before arg0. */
6633 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6635 if (! flag_evaluation_order
)
6637 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6639 return ! TREE_SIDE_EFFECTS (arg0
)
6640 && ! TREE_SIDE_EFFECTS (arg1
);
6643 /* Test whether it is preferable two swap two operands, ARG0 and
6644 ARG1, for example because ARG0 is an integer constant and ARG1
6645 isn't. If REORDER is true, only recommend swapping if we can
6646 evaluate the operands in reverse order. */
6649 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6651 STRIP_SIGN_NOPS (arg0
);
6652 STRIP_SIGN_NOPS (arg1
);
6654 if (TREE_CODE (arg1
) == INTEGER_CST
)
6656 if (TREE_CODE (arg0
) == INTEGER_CST
)
6659 if (TREE_CODE (arg1
) == REAL_CST
)
6661 if (TREE_CODE (arg0
) == REAL_CST
)
6664 if (TREE_CODE (arg1
) == FIXED_CST
)
6666 if (TREE_CODE (arg0
) == FIXED_CST
)
6669 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6671 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6674 if (TREE_CONSTANT (arg1
))
6676 if (TREE_CONSTANT (arg0
))
6679 if (optimize_function_for_size_p (cfun
))
6682 if (reorder
&& flag_evaluation_order
6683 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6686 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6687 for commutative and comparison operators. Ensuring a canonical
6688 form allows the optimizers to find additional redundancies without
6689 having to explicitly check for both orderings. */
6690 if (TREE_CODE (arg0
) == SSA_NAME
6691 && TREE_CODE (arg1
) == SSA_NAME
6692 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6695 /* Put SSA_NAMEs last. */
6696 if (TREE_CODE (arg1
) == SSA_NAME
)
6698 if (TREE_CODE (arg0
) == SSA_NAME
)
6701 /* Put variables last. */
6710 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6711 ARG0 is extended to a wider type. */
6714 fold_widened_comparison (location_t loc
, enum tree_code code
,
6715 tree type
, tree arg0
, tree arg1
)
6717 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6719 tree shorter_type
, outer_type
;
6723 if (arg0_unw
== arg0
)
6725 shorter_type
= TREE_TYPE (arg0_unw
);
6727 #ifdef HAVE_canonicalize_funcptr_for_compare
6728 /* Disable this optimization if we're casting a function pointer
6729 type on targets that require function pointer canonicalization. */
6730 if (HAVE_canonicalize_funcptr_for_compare
6731 && TREE_CODE (shorter_type
) == POINTER_TYPE
6732 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6736 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6739 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6741 /* If possible, express the comparison in the shorter mode. */
6742 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6743 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6744 && (TREE_TYPE (arg1_unw
) == shorter_type
6745 || ((TYPE_PRECISION (shorter_type
)
6746 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6747 && (TYPE_UNSIGNED (shorter_type
)
6748 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6749 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6750 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6751 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6752 && int_fits_type_p (arg1_unw
, shorter_type
))))
6753 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6754 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6756 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6757 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6758 || !int_fits_type_p (arg1_unw
, shorter_type
))
6761 /* If we are comparing with the integer that does not fit into the range
6762 of the shorter type, the result is known. */
6763 outer_type
= TREE_TYPE (arg1_unw
);
6764 min
= lower_bound_in_type (outer_type
, shorter_type
);
6765 max
= upper_bound_in_type (outer_type
, shorter_type
);
6767 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6769 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6776 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6781 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6787 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6789 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6794 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6796 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6805 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6806 ARG0 just the signedness is changed. */
6809 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6810 tree arg0
, tree arg1
)
6813 tree inner_type
, outer_type
;
6815 if (!CONVERT_EXPR_P (arg0
))
6818 outer_type
= TREE_TYPE (arg0
);
6819 arg0_inner
= TREE_OPERAND (arg0
, 0);
6820 inner_type
= TREE_TYPE (arg0_inner
);
6822 #ifdef HAVE_canonicalize_funcptr_for_compare
6823 /* Disable this optimization if we're casting a function pointer
6824 type on targets that require function pointer canonicalization. */
6825 if (HAVE_canonicalize_funcptr_for_compare
6826 && TREE_CODE (inner_type
) == POINTER_TYPE
6827 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6831 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6834 if (TREE_CODE (arg1
) != INTEGER_CST
6835 && !(CONVERT_EXPR_P (arg1
)
6836 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6839 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6844 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6847 if (TREE_CODE (arg1
) == INTEGER_CST
)
6848 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6849 0, TREE_OVERFLOW (arg1
));
6851 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6853 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6856 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6857 step of the array. Reconstructs s and delta in the case of s *
6858 delta being an integer constant (and thus already folded). ADDR is
6859 the address. MULT is the multiplicative expression. If the
6860 function succeeds, the new address expression is returned.
6861 Otherwise NULL_TREE is returned. LOC is the location of the
6862 resulting expression. */
6865 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6867 tree s
, delta
, step
;
6868 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6873 /* Strip the nops that might be added when converting op1 to sizetype. */
6876 /* Canonicalize op1 into a possibly non-constant delta
6877 and an INTEGER_CST s. */
6878 if (TREE_CODE (op1
) == MULT_EXPR
)
6880 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6885 if (TREE_CODE (arg0
) == INTEGER_CST
)
6890 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6898 else if (TREE_CODE (op1
) == INTEGER_CST
)
6905 /* Simulate we are delta * 1. */
6907 s
= integer_one_node
;
6910 /* Handle &x.array the same as we would handle &x.array[0]. */
6911 if (TREE_CODE (ref
) == COMPONENT_REF
6912 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6916 /* Remember if this was a multi-dimensional array. */
6917 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6920 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6923 itype
= TREE_TYPE (domain
);
6925 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6926 if (TREE_CODE (step
) != INTEGER_CST
)
6931 if (! tree_int_cst_equal (step
, s
))
6936 /* Try if delta is a multiple of step. */
6937 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6943 /* Only fold here if we can verify we do not overflow one
6944 dimension of a multi-dimensional array. */
6949 if (!TYPE_MIN_VALUE (domain
)
6950 || !TYPE_MAX_VALUE (domain
)
6951 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6954 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6955 fold_convert_loc (loc
, itype
,
6956 TYPE_MIN_VALUE (domain
)),
6957 fold_convert_loc (loc
, itype
, delta
));
6958 if (TREE_CODE (tmp
) != INTEGER_CST
6959 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6963 /* We found a suitable component reference. */
6965 pref
= TREE_OPERAND (addr
, 0);
6966 ret
= copy_node (pref
);
6967 SET_EXPR_LOCATION (ret
, loc
);
6969 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6971 (loc
, PLUS_EXPR
, itype
,
6972 fold_convert_loc (loc
, itype
,
6974 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6975 fold_convert_loc (loc
, itype
, delta
)),
6976 NULL_TREE
, NULL_TREE
);
6977 return build_fold_addr_expr_loc (loc
, ret
);
6982 for (;; ref
= TREE_OPERAND (ref
, 0))
6984 if (TREE_CODE (ref
) == ARRAY_REF
)
6988 /* Remember if this was a multi-dimensional array. */
6989 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6992 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6995 itype
= TREE_TYPE (domain
);
6997 step
= array_ref_element_size (ref
);
6998 if (TREE_CODE (step
) != INTEGER_CST
)
7003 if (! tree_int_cst_equal (step
, s
))
7008 /* Try if delta is a multiple of step. */
7009 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7015 /* Only fold here if we can verify we do not overflow one
7016 dimension of a multi-dimensional array. */
7021 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7022 || !TYPE_MAX_VALUE (domain
)
7023 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7026 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7027 fold_convert_loc (loc
, itype
,
7028 TREE_OPERAND (ref
, 1)),
7029 fold_convert_loc (loc
, itype
, delta
));
7031 || TREE_CODE (tmp
) != INTEGER_CST
7032 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7041 if (!handled_component_p (ref
))
7045 /* We found the suitable array reference. So copy everything up to it,
7046 and replace the index. */
7048 pref
= TREE_OPERAND (addr
, 0);
7049 ret
= copy_node (pref
);
7050 SET_EXPR_LOCATION (ret
, loc
);
7055 pref
= TREE_OPERAND (pref
, 0);
7056 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7057 pos
= TREE_OPERAND (pos
, 0);
7060 TREE_OPERAND (pos
, 1)
7061 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7062 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7063 fold_convert_loc (loc
, itype
, delta
));
7064 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7068 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7069 means A >= Y && A != MAX, but in this case we know that
7070 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7073 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7075 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7077 if (TREE_CODE (bound
) == LT_EXPR
)
7078 a
= TREE_OPERAND (bound
, 0);
7079 else if (TREE_CODE (bound
) == GT_EXPR
)
7080 a
= TREE_OPERAND (bound
, 1);
7084 typea
= TREE_TYPE (a
);
7085 if (!INTEGRAL_TYPE_P (typea
)
7086 && !POINTER_TYPE_P (typea
))
7089 if (TREE_CODE (ineq
) == LT_EXPR
)
7091 a1
= TREE_OPERAND (ineq
, 1);
7092 y
= TREE_OPERAND (ineq
, 0);
7094 else if (TREE_CODE (ineq
) == GT_EXPR
)
7096 a1
= TREE_OPERAND (ineq
, 0);
7097 y
= TREE_OPERAND (ineq
, 1);
7102 if (TREE_TYPE (a1
) != typea
)
7105 if (POINTER_TYPE_P (typea
))
7107 /* Convert the pointer types into integer before taking the difference. */
7108 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7109 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7110 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7113 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7115 if (!diff
|| !integer_onep (diff
))
7118 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7121 /* Fold a sum or difference of at least one multiplication.
7122 Returns the folded tree or NULL if no simplification could be made. */
7125 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7126 tree arg0
, tree arg1
)
7128 tree arg00
, arg01
, arg10
, arg11
;
7129 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7131 /* (A * C) +- (B * C) -> (A+-B) * C.
7132 (A * C) +- A -> A * (C+-1).
7133 We are most concerned about the case where C is a constant,
7134 but other combinations show up during loop reduction. Since
7135 it is not difficult, try all four possibilities. */
7137 if (TREE_CODE (arg0
) == MULT_EXPR
)
7139 arg00
= TREE_OPERAND (arg0
, 0);
7140 arg01
= TREE_OPERAND (arg0
, 1);
7142 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7144 arg00
= build_one_cst (type
);
7149 /* We cannot generate constant 1 for fract. */
7150 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7153 arg01
= build_one_cst (type
);
7155 if (TREE_CODE (arg1
) == MULT_EXPR
)
7157 arg10
= TREE_OPERAND (arg1
, 0);
7158 arg11
= TREE_OPERAND (arg1
, 1);
7160 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7162 arg10
= build_one_cst (type
);
7163 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7164 the purpose of this canonicalization. */
7165 if (TREE_INT_CST_HIGH (arg1
) == -1
7166 && negate_expr_p (arg1
)
7167 && code
== PLUS_EXPR
)
7169 arg11
= negate_expr (arg1
);
7177 /* We cannot generate constant 1 for fract. */
7178 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7181 arg11
= build_one_cst (type
);
7185 if (operand_equal_p (arg01
, arg11
, 0))
7186 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7187 else if (operand_equal_p (arg00
, arg10
, 0))
7188 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7189 else if (operand_equal_p (arg00
, arg11
, 0))
7190 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7191 else if (operand_equal_p (arg01
, arg10
, 0))
7192 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7194 /* No identical multiplicands; see if we can find a common
7195 power-of-two factor in non-power-of-two multiplies. This
7196 can help in multi-dimensional array access. */
7197 else if (host_integerp (arg01
, 0)
7198 && host_integerp (arg11
, 0))
7200 HOST_WIDE_INT int01
, int11
, tmp
;
7203 int01
= TREE_INT_CST_LOW (arg01
);
7204 int11
= TREE_INT_CST_LOW (arg11
);
7206 /* Move min of absolute values to int11. */
7207 if (absu_hwi (int01
) < absu_hwi (int11
))
7209 tmp
= int01
, int01
= int11
, int11
= tmp
;
7210 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7217 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7218 /* The remainder should not be a constant, otherwise we
7219 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7220 increased the number of multiplications necessary. */
7221 && TREE_CODE (arg10
) != INTEGER_CST
)
7223 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7224 build_int_cst (TREE_TYPE (arg00
),
7229 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7234 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7235 fold_build2_loc (loc
, code
, type
,
7236 fold_convert_loc (loc
, type
, alt0
),
7237 fold_convert_loc (loc
, type
, alt1
)),
7238 fold_convert_loc (loc
, type
, same
));
7243 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7244 specified by EXPR into the buffer PTR of length LEN bytes.
7245 Return the number of bytes placed in the buffer, or zero
7249 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7251 tree type
= TREE_TYPE (expr
);
7252 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7253 int byte
, offset
, word
, words
;
7254 unsigned char value
;
7256 if (total_bytes
> len
)
7258 words
= total_bytes
/ UNITS_PER_WORD
;
7260 for (byte
= 0; byte
< total_bytes
; byte
++)
7262 int bitpos
= byte
* BITS_PER_UNIT
;
7263 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7264 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7266 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7267 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7269 if (total_bytes
> UNITS_PER_WORD
)
7271 word
= byte
/ UNITS_PER_WORD
;
7272 if (WORDS_BIG_ENDIAN
)
7273 word
= (words
- 1) - word
;
7274 offset
= word
* UNITS_PER_WORD
;
7275 if (BYTES_BIG_ENDIAN
)
7276 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7278 offset
+= byte
% UNITS_PER_WORD
;
7281 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7282 ptr
[offset
] = value
;
7288 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7289 specified by EXPR into the buffer PTR of length LEN bytes.
7290 Return the number of bytes placed in the buffer, or zero
7294 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7296 tree type
= TREE_TYPE (expr
);
7297 enum machine_mode mode
= TYPE_MODE (type
);
7298 int total_bytes
= GET_MODE_SIZE (mode
);
7299 FIXED_VALUE_TYPE value
;
7300 tree i_value
, i_type
;
7302 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7305 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7307 if (NULL_TREE
== i_type
7308 || TYPE_PRECISION (i_type
) != total_bytes
)
7311 value
= TREE_FIXED_CST (expr
);
7312 i_value
= double_int_to_tree (i_type
, value
.data
);
7314 return native_encode_int (i_value
, ptr
, len
);
7318 /* Subroutine of native_encode_expr. Encode the REAL_CST
7319 specified by EXPR into the buffer PTR of length LEN bytes.
7320 Return the number of bytes placed in the buffer, or zero
7324 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7326 tree type
= TREE_TYPE (expr
);
7327 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7328 int byte
, offset
, word
, words
, bitpos
;
7329 unsigned char value
;
7331 /* There are always 32 bits in each long, no matter the size of
7332 the hosts long. We handle floating point representations with
7336 if (total_bytes
> len
)
7338 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7340 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7342 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7343 bitpos
+= BITS_PER_UNIT
)
7345 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7346 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7348 if (UNITS_PER_WORD
< 4)
7350 word
= byte
/ UNITS_PER_WORD
;
7351 if (WORDS_BIG_ENDIAN
)
7352 word
= (words
- 1) - word
;
7353 offset
= word
* UNITS_PER_WORD
;
7354 if (BYTES_BIG_ENDIAN
)
7355 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7357 offset
+= byte
% UNITS_PER_WORD
;
7360 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7361 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7366 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7367 specified by EXPR into the buffer PTR of length LEN bytes.
7368 Return the number of bytes placed in the buffer, or zero
7372 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7377 part
= TREE_REALPART (expr
);
7378 rsize
= native_encode_expr (part
, ptr
, len
);
7381 part
= TREE_IMAGPART (expr
);
7382 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7385 return rsize
+ isize
;
7389 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7390 specified by EXPR into the buffer PTR of length LEN bytes.
7391 Return the number of bytes placed in the buffer, or zero
7395 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7402 count
= VECTOR_CST_NELTS (expr
);
7403 itype
= TREE_TYPE (TREE_TYPE (expr
));
7404 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7405 for (i
= 0; i
< count
; i
++)
7407 elem
= VECTOR_CST_ELT (expr
, i
);
7408 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7416 /* Subroutine of native_encode_expr. Encode the STRING_CST
7417 specified by EXPR into the buffer PTR of length LEN bytes.
7418 Return the number of bytes placed in the buffer, or zero
7422 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7424 tree type
= TREE_TYPE (expr
);
7425 HOST_WIDE_INT total_bytes
;
7427 if (TREE_CODE (type
) != ARRAY_TYPE
7428 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7429 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7430 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7432 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7433 if (total_bytes
> len
)
7435 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7437 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7438 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7439 total_bytes
- TREE_STRING_LENGTH (expr
));
7442 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7447 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7448 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7449 buffer PTR of length LEN bytes. Return the number of bytes
7450 placed in the buffer, or zero upon failure. */
7453 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7455 switch (TREE_CODE (expr
))
7458 return native_encode_int (expr
, ptr
, len
);
7461 return native_encode_real (expr
, ptr
, len
);
7464 return native_encode_fixed (expr
, ptr
, len
);
7467 return native_encode_complex (expr
, ptr
, len
);
7470 return native_encode_vector (expr
, ptr
, len
);
7473 return native_encode_string (expr
, ptr
, len
);
7481 /* Subroutine of native_interpret_expr. Interpret the contents of
7482 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7483 If the buffer cannot be interpreted, return NULL_TREE. */
7486 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7488 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7491 if (total_bytes
> len
7492 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7495 result
= double_int::from_buffer (ptr
, total_bytes
);
7497 return double_int_to_tree (type
, result
);
7501 /* Subroutine of native_interpret_expr. Interpret the contents of
7502 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7503 If the buffer cannot be interpreted, return NULL_TREE. */
7506 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7508 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7510 FIXED_VALUE_TYPE fixed_value
;
7512 if (total_bytes
> len
7513 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7516 result
= double_int::from_buffer (ptr
, total_bytes
);
7517 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7519 return build_fixed (type
, fixed_value
);
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7528 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7530 enum machine_mode mode
= TYPE_MODE (type
);
7531 int total_bytes
= GET_MODE_SIZE (mode
);
7532 int byte
, offset
, word
, words
, bitpos
;
7533 unsigned char value
;
7534 /* There are always 32 bits in each long, no matter the size of
7535 the hosts long. We handle floating point representations with
7540 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7541 if (total_bytes
> len
|| total_bytes
> 24)
7543 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7545 memset (tmp
, 0, sizeof (tmp
));
7546 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7547 bitpos
+= BITS_PER_UNIT
)
7549 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7550 if (UNITS_PER_WORD
< 4)
7552 word
= byte
/ UNITS_PER_WORD
;
7553 if (WORDS_BIG_ENDIAN
)
7554 word
= (words
- 1) - word
;
7555 offset
= word
* UNITS_PER_WORD
;
7556 if (BYTES_BIG_ENDIAN
)
7557 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7559 offset
+= byte
% UNITS_PER_WORD
;
7562 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7563 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7565 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7568 real_from_target (&r
, tmp
, mode
);
7569 return build_real (type
, r
);
7573 /* Subroutine of native_interpret_expr. Interpret the contents of
7574 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7575 If the buffer cannot be interpreted, return NULL_TREE. */
7578 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7580 tree etype
, rpart
, ipart
;
7583 etype
= TREE_TYPE (type
);
7584 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7587 rpart
= native_interpret_expr (etype
, ptr
, size
);
7590 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7593 return build_complex (type
, rpart
, ipart
);
7597 /* Subroutine of native_interpret_expr. Interpret the contents of
7598 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7599 If the buffer cannot be interpreted, return NULL_TREE. */
7602 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7608 etype
= TREE_TYPE (type
);
7609 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7610 count
= TYPE_VECTOR_SUBPARTS (type
);
7611 if (size
* count
> len
)
7614 elements
= XALLOCAVEC (tree
, count
);
7615 for (i
= count
- 1; i
>= 0; i
--)
7617 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7622 return build_vector (type
, elements
);
7626 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7627 the buffer PTR of length LEN as a constant of type TYPE. For
7628 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7629 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7630 return NULL_TREE. */
7633 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7635 switch (TREE_CODE (type
))
7641 case REFERENCE_TYPE
:
7642 return native_interpret_int (type
, ptr
, len
);
7645 return native_interpret_real (type
, ptr
, len
);
7647 case FIXED_POINT_TYPE
:
7648 return native_interpret_fixed (type
, ptr
, len
);
7651 return native_interpret_complex (type
, ptr
, len
);
7654 return native_interpret_vector (type
, ptr
, len
);
7661 /* Returns true if we can interpret the contents of a native encoding
7665 can_native_interpret_type_p (tree type
)
7667 switch (TREE_CODE (type
))
7673 case REFERENCE_TYPE
:
7674 case FIXED_POINT_TYPE
:
7684 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7685 TYPE at compile-time. If we're unable to perform the conversion
7686 return NULL_TREE. */
7689 fold_view_convert_expr (tree type
, tree expr
)
7691 /* We support up to 512-bit values (for V8DFmode). */
7692 unsigned char buffer
[64];
7695 /* Check that the host and target are sane. */
7696 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7699 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7703 return native_interpret_expr (type
, buffer
, len
);
7706 /* Build an expression for the address of T. Folds away INDIRECT_REF
7707 to avoid confusing the gimplify process. */
7710 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7712 /* The size of the object is not relevant when talking about its address. */
7713 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7714 t
= TREE_OPERAND (t
, 0);
7716 if (TREE_CODE (t
) == INDIRECT_REF
)
7718 t
= TREE_OPERAND (t
, 0);
7720 if (TREE_TYPE (t
) != ptrtype
)
7721 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7723 else if (TREE_CODE (t
) == MEM_REF
7724 && integer_zerop (TREE_OPERAND (t
, 1)))
7725 return TREE_OPERAND (t
, 0);
7726 else if (TREE_CODE (t
) == MEM_REF
7727 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7728 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7729 TREE_OPERAND (t
, 0),
7730 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7731 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7733 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7735 if (TREE_TYPE (t
) != ptrtype
)
7736 t
= fold_convert_loc (loc
, ptrtype
, t
);
7739 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7744 /* Build an expression for the address of T. */
7747 build_fold_addr_expr_loc (location_t loc
, tree t
)
7749 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7751 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7754 static bool vec_cst_ctor_to_array (tree
, tree
*);
7756 /* Fold a unary expression of code CODE and type TYPE with operand
7757 OP0. Return the folded expression if folding is successful.
7758 Otherwise, return NULL_TREE. */
7761 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7765 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7767 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7768 && TREE_CODE_LENGTH (code
) == 1);
7773 if (CONVERT_EXPR_CODE_P (code
)
7774 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7776 /* Don't use STRIP_NOPS, because signedness of argument type
7778 STRIP_SIGN_NOPS (arg0
);
7782 /* Strip any conversions that don't change the mode. This
7783 is safe for every expression, except for a comparison
7784 expression because its signedness is derived from its
7787 Note that this is done as an internal manipulation within
7788 the constant folder, in order to find the simplest
7789 representation of the arguments so that their form can be
7790 studied. In any cases, the appropriate type conversions
7791 should be put back in the tree that will get out of the
7797 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7799 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7800 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7801 fold_build1_loc (loc
, code
, type
,
7802 fold_convert_loc (loc
, TREE_TYPE (op0
),
7803 TREE_OPERAND (arg0
, 1))));
7804 else if (TREE_CODE (arg0
) == COND_EXPR
)
7806 tree arg01
= TREE_OPERAND (arg0
, 1);
7807 tree arg02
= TREE_OPERAND (arg0
, 2);
7808 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7809 arg01
= fold_build1_loc (loc
, code
, type
,
7810 fold_convert_loc (loc
,
7811 TREE_TYPE (op0
), arg01
));
7812 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7813 arg02
= fold_build1_loc (loc
, code
, type
,
7814 fold_convert_loc (loc
,
7815 TREE_TYPE (op0
), arg02
));
7816 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7819 /* If this was a conversion, and all we did was to move into
7820 inside the COND_EXPR, bring it back out. But leave it if
7821 it is a conversion from integer to integer and the
7822 result precision is no wider than a word since such a
7823 conversion is cheap and may be optimized away by combine,
7824 while it couldn't if it were outside the COND_EXPR. Then return
7825 so we don't get into an infinite recursion loop taking the
7826 conversion out and then back in. */
7828 if ((CONVERT_EXPR_CODE_P (code
)
7829 || code
== NON_LVALUE_EXPR
)
7830 && TREE_CODE (tem
) == COND_EXPR
7831 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7832 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7833 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7834 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7835 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7836 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7837 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7839 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7840 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7841 || flag_syntax_only
))
7842 tem
= build1_loc (loc
, code
, type
,
7844 TREE_TYPE (TREE_OPERAND
7845 (TREE_OPERAND (tem
, 1), 0)),
7846 TREE_OPERAND (tem
, 0),
7847 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7848 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7857 /* Re-association barriers around constants and other re-association
7858 barriers can be removed. */
7859 if (CONSTANT_CLASS_P (op0
)
7860 || TREE_CODE (op0
) == PAREN_EXPR
)
7861 return fold_convert_loc (loc
, type
, op0
);
7866 case FIX_TRUNC_EXPR
:
7867 if (TREE_TYPE (op0
) == type
)
7870 if (COMPARISON_CLASS_P (op0
))
7872 /* If we have (type) (a CMP b) and type is an integral type, return
7873 new expression involving the new type. Canonicalize
7874 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7876 Do not fold the result as that would not simplify further, also
7877 folding again results in recursions. */
7878 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7879 return build2_loc (loc
, TREE_CODE (op0
), type
,
7880 TREE_OPERAND (op0
, 0),
7881 TREE_OPERAND (op0
, 1));
7882 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7883 && TREE_CODE (type
) != VECTOR_TYPE
)
7884 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7885 constant_boolean_node (true, type
),
7886 constant_boolean_node (false, type
));
7889 /* Handle cases of two conversions in a row. */
7890 if (CONVERT_EXPR_P (op0
))
7892 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7893 tree inter_type
= TREE_TYPE (op0
);
7894 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7895 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7896 int inside_float
= FLOAT_TYPE_P (inside_type
);
7897 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7898 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7899 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7900 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7901 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7902 int inter_float
= FLOAT_TYPE_P (inter_type
);
7903 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7904 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7905 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7906 int final_int
= INTEGRAL_TYPE_P (type
);
7907 int final_ptr
= POINTER_TYPE_P (type
);
7908 int final_float
= FLOAT_TYPE_P (type
);
7909 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7910 unsigned int final_prec
= TYPE_PRECISION (type
);
7911 int final_unsignedp
= TYPE_UNSIGNED (type
);
7913 /* In addition to the cases of two conversions in a row
7914 handled below, if we are converting something to its own
7915 type via an object of identical or wider precision, neither
7916 conversion is needed. */
7917 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7918 && (((inter_int
|| inter_ptr
) && final_int
)
7919 || (inter_float
&& final_float
))
7920 && inter_prec
>= final_prec
)
7921 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7923 /* Likewise, if the intermediate and initial types are either both
7924 float or both integer, we don't need the middle conversion if the
7925 former is wider than the latter and doesn't change the signedness
7926 (for integers). Avoid this if the final type is a pointer since
7927 then we sometimes need the middle conversion. Likewise if the
7928 final type has a precision not equal to the size of its mode. */
7929 if (((inter_int
&& inside_int
)
7930 || (inter_float
&& inside_float
)
7931 || (inter_vec
&& inside_vec
))
7932 && inter_prec
>= inside_prec
7933 && (inter_float
|| inter_vec
7934 || inter_unsignedp
== inside_unsignedp
)
7935 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7936 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7938 && (! final_vec
|| inter_prec
== inside_prec
))
7939 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7941 /* If we have a sign-extension of a zero-extended value, we can
7942 replace that by a single zero-extension. Likewise if the
7943 final conversion does not change precision we can drop the
7944 intermediate conversion. */
7945 if (inside_int
&& inter_int
&& final_int
7946 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7947 && inside_unsignedp
&& !inter_unsignedp
)
7948 || final_prec
== inter_prec
))
7949 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7951 /* Two conversions in a row are not needed unless:
7952 - some conversion is floating-point (overstrict for now), or
7953 - some conversion is a vector (overstrict for now), or
7954 - the intermediate type is narrower than both initial and
7956 - the intermediate type and innermost type differ in signedness,
7957 and the outermost type is wider than the intermediate, or
7958 - the initial type is a pointer type and the precisions of the
7959 intermediate and final types differ, or
7960 - the final type is a pointer type and the precisions of the
7961 initial and intermediate types differ. */
7962 if (! inside_float
&& ! inter_float
&& ! final_float
7963 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7964 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7965 && ! (inside_int
&& inter_int
7966 && inter_unsignedp
!= inside_unsignedp
7967 && inter_prec
< final_prec
)
7968 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7969 == (final_unsignedp
&& final_prec
> inter_prec
))
7970 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7971 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7972 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7973 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7974 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7977 /* Handle (T *)&A.B.C for A being of type T and B and C
7978 living at offset zero. This occurs frequently in
7979 C++ upcasting and then accessing the base. */
7980 if (TREE_CODE (op0
) == ADDR_EXPR
7981 && POINTER_TYPE_P (type
)
7982 && handled_component_p (TREE_OPERAND (op0
, 0)))
7984 HOST_WIDE_INT bitsize
, bitpos
;
7986 enum machine_mode mode
;
7987 int unsignedp
, volatilep
;
7988 tree base
= TREE_OPERAND (op0
, 0);
7989 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7990 &mode
, &unsignedp
, &volatilep
, false);
7991 /* If the reference was to a (constant) zero offset, we can use
7992 the address of the base if it has the same base type
7993 as the result type and the pointer type is unqualified. */
7994 if (! offset
&& bitpos
== 0
7995 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7996 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7997 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7998 return fold_convert_loc (loc
, type
,
7999 build_fold_addr_expr_loc (loc
, base
));
8002 if (TREE_CODE (op0
) == MODIFY_EXPR
8003 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8004 /* Detect assigning a bitfield. */
8005 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8007 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8009 /* Don't leave an assignment inside a conversion
8010 unless assigning a bitfield. */
8011 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8012 /* First do the assignment, then return converted constant. */
8013 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8014 TREE_NO_WARNING (tem
) = 1;
8015 TREE_USED (tem
) = 1;
8019 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8020 constants (if x has signed type, the sign bit cannot be set
8021 in c). This folds extension into the BIT_AND_EXPR.
8022 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8023 very likely don't have maximal range for their precision and this
8024 transformation effectively doesn't preserve non-maximal ranges. */
8025 if (TREE_CODE (type
) == INTEGER_TYPE
8026 && TREE_CODE (op0
) == BIT_AND_EXPR
8027 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8029 tree and_expr
= op0
;
8030 tree and0
= TREE_OPERAND (and_expr
, 0);
8031 tree and1
= TREE_OPERAND (and_expr
, 1);
8034 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8035 || (TYPE_PRECISION (type
)
8036 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8038 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8039 <= HOST_BITS_PER_WIDE_INT
8040 && host_integerp (and1
, 1))
8042 unsigned HOST_WIDE_INT cst
;
8044 cst
= tree_low_cst (and1
, 1);
8045 cst
&= (HOST_WIDE_INT
) -1
8046 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8047 change
= (cst
== 0);
8048 #ifdef LOAD_EXTEND_OP
8050 && !flag_syntax_only
8051 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8054 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8055 and0
= fold_convert_loc (loc
, uns
, and0
);
8056 and1
= fold_convert_loc (loc
, uns
, and1
);
8062 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
8063 0, TREE_OVERFLOW (and1
));
8064 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8065 fold_convert_loc (loc
, type
, and0
), tem
);
8069 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8070 when one of the new casts will fold away. Conservatively we assume
8071 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8072 if (POINTER_TYPE_P (type
)
8073 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8074 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8075 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8076 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8077 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8079 tree arg00
= TREE_OPERAND (arg0
, 0);
8080 tree arg01
= TREE_OPERAND (arg0
, 1);
8082 return fold_build_pointer_plus_loc
8083 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8086 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8087 of the same precision, and X is an integer type not narrower than
8088 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8089 if (INTEGRAL_TYPE_P (type
)
8090 && TREE_CODE (op0
) == BIT_NOT_EXPR
8091 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8092 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8093 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8095 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8096 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8097 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8098 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8099 fold_convert_loc (loc
, type
, tem
));
8102 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8103 type of X and Y (integer types only). */
8104 if (INTEGRAL_TYPE_P (type
)
8105 && TREE_CODE (op0
) == MULT_EXPR
8106 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8107 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8109 /* Be careful not to introduce new overflows. */
8111 if (TYPE_OVERFLOW_WRAPS (type
))
8114 mult_type
= unsigned_type_for (type
);
8116 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8118 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8119 fold_convert_loc (loc
, mult_type
,
8120 TREE_OPERAND (op0
, 0)),
8121 fold_convert_loc (loc
, mult_type
,
8122 TREE_OPERAND (op0
, 1)));
8123 return fold_convert_loc (loc
, type
, tem
);
8127 tem
= fold_convert_const (code
, type
, op0
);
8128 return tem
? tem
: NULL_TREE
;
8130 case ADDR_SPACE_CONVERT_EXPR
:
8131 if (integer_zerop (arg0
))
8132 return fold_convert_const (code
, type
, arg0
);
8135 case FIXED_CONVERT_EXPR
:
8136 tem
= fold_convert_const (code
, type
, arg0
);
8137 return tem
? tem
: NULL_TREE
;
8139 case VIEW_CONVERT_EXPR
:
8140 if (TREE_TYPE (op0
) == type
)
8142 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8143 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8144 type
, TREE_OPERAND (op0
, 0));
8145 if (TREE_CODE (op0
) == MEM_REF
)
8146 return fold_build2_loc (loc
, MEM_REF
, type
,
8147 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8149 /* For integral conversions with the same precision or pointer
8150 conversions use a NOP_EXPR instead. */
8151 if ((INTEGRAL_TYPE_P (type
)
8152 || POINTER_TYPE_P (type
))
8153 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8154 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8155 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8156 return fold_convert_loc (loc
, type
, op0
);
8158 /* Strip inner integral conversions that do not change the precision. */
8159 if (CONVERT_EXPR_P (op0
)
8160 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8161 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8162 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8163 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8164 && (TYPE_PRECISION (TREE_TYPE (op0
))
8165 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8166 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8167 type
, TREE_OPERAND (op0
, 0));
8169 return fold_view_convert_expr (type
, op0
);
8172 tem
= fold_negate_expr (loc
, arg0
);
8174 return fold_convert_loc (loc
, type
, tem
);
8178 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8179 return fold_abs_const (arg0
, type
);
8180 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8181 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8182 /* Convert fabs((double)float) into (double)fabsf(float). */
8183 else if (TREE_CODE (arg0
) == NOP_EXPR
8184 && TREE_CODE (type
) == REAL_TYPE
)
8186 tree targ0
= strip_float_extensions (arg0
);
8188 return fold_convert_loc (loc
, type
,
8189 fold_build1_loc (loc
, ABS_EXPR
,
8193 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8194 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8196 else if (tree_expr_nonnegative_p (arg0
))
8199 /* Strip sign ops from argument. */
8200 if (TREE_CODE (type
) == REAL_TYPE
)
8202 tem
= fold_strip_sign_ops (arg0
);
8204 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8205 fold_convert_loc (loc
, type
, tem
));
8210 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8211 return fold_convert_loc (loc
, type
, arg0
);
8212 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8214 tree itype
= TREE_TYPE (type
);
8215 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8216 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8217 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8218 negate_expr (ipart
));
8220 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8222 tree itype
= TREE_TYPE (type
);
8223 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8224 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8225 return build_complex (type
, rpart
, negate_expr (ipart
));
8227 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8228 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8232 if (TREE_CODE (arg0
) == INTEGER_CST
)
8233 return fold_not_const (arg0
, type
);
8234 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8235 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8236 /* Convert ~ (-A) to A - 1. */
8237 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8238 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8239 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8240 build_int_cst (type
, 1));
8241 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8242 else if (INTEGRAL_TYPE_P (type
)
8243 && ((TREE_CODE (arg0
) == MINUS_EXPR
8244 && integer_onep (TREE_OPERAND (arg0
, 1)))
8245 || (TREE_CODE (arg0
) == PLUS_EXPR
8246 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8247 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8248 fold_convert_loc (loc
, type
,
8249 TREE_OPERAND (arg0
, 0)));
8250 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8251 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8252 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8253 fold_convert_loc (loc
, type
,
8254 TREE_OPERAND (arg0
, 0)))))
8255 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8256 fold_convert_loc (loc
, type
,
8257 TREE_OPERAND (arg0
, 1)));
8258 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8259 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8260 fold_convert_loc (loc
, type
,
8261 TREE_OPERAND (arg0
, 1)))))
8262 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8263 fold_convert_loc (loc
, type
,
8264 TREE_OPERAND (arg0
, 0)), tem
);
8265 /* Perform BIT_NOT_EXPR on each element individually. */
8266 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8270 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8272 elements
= XALLOCAVEC (tree
, count
);
8273 for (i
= 0; i
< count
; i
++)
8275 elem
= VECTOR_CST_ELT (arg0
, i
);
8276 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8277 if (elem
== NULL_TREE
)
8282 return build_vector (type
, elements
);
8287 case TRUTH_NOT_EXPR
:
8288 /* The argument to invert_truthvalue must have Boolean type. */
8289 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8290 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8292 /* Note that the operand of this must be an int
8293 and its values must be 0 or 1.
8294 ("true" is a fixed value perhaps depending on the language,
8295 but we don't handle values other than 1 correctly yet.) */
8296 tem
= fold_truth_not_expr (loc
, arg0
);
8299 return fold_convert_loc (loc
, type
, tem
);
8302 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8303 return fold_convert_loc (loc
, type
, arg0
);
8304 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8305 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8306 TREE_OPERAND (arg0
, 1));
8307 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8308 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8309 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8311 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8312 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8313 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8314 TREE_OPERAND (arg0
, 0)),
8315 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8316 TREE_OPERAND (arg0
, 1)));
8317 return fold_convert_loc (loc
, type
, tem
);
8319 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8321 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8322 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8323 TREE_OPERAND (arg0
, 0));
8324 return fold_convert_loc (loc
, type
, tem
);
8326 if (TREE_CODE (arg0
) == CALL_EXPR
)
8328 tree fn
= get_callee_fndecl (arg0
);
8329 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8330 switch (DECL_FUNCTION_CODE (fn
))
8332 CASE_FLT_FN (BUILT_IN_CEXPI
):
8333 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8335 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8345 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8346 return build_zero_cst (type
);
8347 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8348 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8349 TREE_OPERAND (arg0
, 0));
8350 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8351 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8352 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8354 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8355 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8356 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8357 TREE_OPERAND (arg0
, 0)),
8358 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8359 TREE_OPERAND (arg0
, 1)));
8360 return fold_convert_loc (loc
, type
, tem
);
8362 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8364 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8365 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8366 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8368 if (TREE_CODE (arg0
) == CALL_EXPR
)
8370 tree fn
= get_callee_fndecl (arg0
);
8371 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8372 switch (DECL_FUNCTION_CODE (fn
))
8374 CASE_FLT_FN (BUILT_IN_CEXPI
):
8375 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8377 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8387 /* Fold *&X to X if X is an lvalue. */
8388 if (TREE_CODE (op0
) == ADDR_EXPR
)
8390 tree op00
= TREE_OPERAND (op0
, 0);
8391 if ((TREE_CODE (op00
) == VAR_DECL
8392 || TREE_CODE (op00
) == PARM_DECL
8393 || TREE_CODE (op00
) == RESULT_DECL
)
8394 && !TREE_READONLY (op00
))
8399 case VEC_UNPACK_LO_EXPR
:
8400 case VEC_UNPACK_HI_EXPR
:
8401 case VEC_UNPACK_FLOAT_LO_EXPR
:
8402 case VEC_UNPACK_FLOAT_HI_EXPR
:
8404 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8406 enum tree_code subcode
;
8408 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8409 if (TREE_CODE (arg0
) != VECTOR_CST
)
8412 elts
= XALLOCAVEC (tree
, nelts
* 2);
8413 if (!vec_cst_ctor_to_array (arg0
, elts
))
8416 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8417 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8420 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8423 subcode
= FLOAT_EXPR
;
8425 for (i
= 0; i
< nelts
; i
++)
8427 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8428 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8432 return build_vector (type
, elts
);
8435 case REDUC_MIN_EXPR
:
8436 case REDUC_MAX_EXPR
:
8437 case REDUC_PLUS_EXPR
:
8439 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8441 enum tree_code subcode
;
8443 if (TREE_CODE (op0
) != VECTOR_CST
)
8446 elts
= XALLOCAVEC (tree
, nelts
);
8447 if (!vec_cst_ctor_to_array (op0
, elts
))
8452 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8453 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8454 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8455 default: gcc_unreachable ();
8458 for (i
= 1; i
< nelts
; i
++)
8460 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8461 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8463 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8466 return build_vector (type
, elts
);
8471 } /* switch (code) */
8475 /* If the operation was a conversion do _not_ mark a resulting constant
8476 with TREE_OVERFLOW if the original constant was not. These conversions
8477 have implementation defined behavior and retaining the TREE_OVERFLOW
8478 flag here would confuse later passes such as VRP. */
8480 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8481 tree type
, tree op0
)
8483 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8485 && TREE_CODE (res
) == INTEGER_CST
8486 && TREE_CODE (op0
) == INTEGER_CST
8487 && CONVERT_EXPR_CODE_P (code
))
8488 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8493 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8494 operands OP0 and OP1. LOC is the location of the resulting expression.
8495 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8496 Return the folded expression if folding is successful. Otherwise,
8497 return NULL_TREE. */
8499 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8500 tree arg0
, tree arg1
, tree op0
, tree op1
)
8504 /* We only do these simplifications if we are optimizing. */
8508 /* Check for things like (A || B) && (A || C). We can convert this
8509 to A || (B && C). Note that either operator can be any of the four
8510 truth and/or operations and the transformation will still be
8511 valid. Also note that we only care about order for the
8512 ANDIF and ORIF operators. If B contains side effects, this
8513 might change the truth-value of A. */
8514 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8515 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8516 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8517 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8518 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8519 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8521 tree a00
= TREE_OPERAND (arg0
, 0);
8522 tree a01
= TREE_OPERAND (arg0
, 1);
8523 tree a10
= TREE_OPERAND (arg1
, 0);
8524 tree a11
= TREE_OPERAND (arg1
, 1);
8525 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8526 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8527 && (code
== TRUTH_AND_EXPR
8528 || code
== TRUTH_OR_EXPR
));
8530 if (operand_equal_p (a00
, a10
, 0))
8531 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8532 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8533 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8534 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8535 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8536 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8537 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8538 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8540 /* This case if tricky because we must either have commutative
8541 operators or else A10 must not have side-effects. */
8543 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8544 && operand_equal_p (a01
, a11
, 0))
8545 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8546 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8550 /* See if we can build a range comparison. */
8551 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8554 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8555 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8557 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8559 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8562 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8563 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8565 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8567 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8570 /* Check for the possibility of merging component references. If our
8571 lhs is another similar operation, try to merge its rhs with our
8572 rhs. Then try to merge our lhs and rhs. */
8573 if (TREE_CODE (arg0
) == code
8574 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8575 TREE_OPERAND (arg0
, 1), arg1
)))
8576 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8578 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8581 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8582 && (code
== TRUTH_AND_EXPR
8583 || code
== TRUTH_ANDIF_EXPR
8584 || code
== TRUTH_OR_EXPR
8585 || code
== TRUTH_ORIF_EXPR
))
8587 enum tree_code ncode
, icode
;
8589 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8590 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8591 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8593 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8594 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8595 We don't want to pack more than two leafs to a non-IF AND/OR
8597 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8598 equal to IF-CODE, then we don't want to add right-hand operand.
8599 If the inner right-hand side of left-hand operand has
8600 side-effects, or isn't simple, then we can't add to it,
8601 as otherwise we might destroy if-sequence. */
8602 if (TREE_CODE (arg0
) == icode
8603 && simple_operand_p_2 (arg1
)
8604 /* Needed for sequence points to handle trappings, and
8606 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8608 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8610 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8613 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8614 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8615 else if (TREE_CODE (arg1
) == icode
8616 && simple_operand_p_2 (arg0
)
8617 /* Needed for sequence points to handle trappings, and
8619 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8621 tem
= fold_build2_loc (loc
, ncode
, type
,
8622 arg0
, TREE_OPERAND (arg1
, 0));
8623 return fold_build2_loc (loc
, icode
, type
, tem
,
8624 TREE_OPERAND (arg1
, 1));
8626 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8628 For sequence point consistancy, we need to check for trapping,
8629 and side-effects. */
8630 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8631 && simple_operand_p_2 (arg1
))
8632 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8638 /* Fold a binary expression of code CODE and type TYPE with operands
8639 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8640 Return the folded expression if folding is successful. Otherwise,
8641 return NULL_TREE. */
8644 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8646 enum tree_code compl_code
;
8648 if (code
== MIN_EXPR
)
8649 compl_code
= MAX_EXPR
;
8650 else if (code
== MAX_EXPR
)
8651 compl_code
= MIN_EXPR
;
8655 /* MIN (MAX (a, b), b) == b. */
8656 if (TREE_CODE (op0
) == compl_code
8657 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8658 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8660 /* MIN (MAX (b, a), b) == b. */
8661 if (TREE_CODE (op0
) == compl_code
8662 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8663 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8664 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8666 /* MIN (a, MAX (a, b)) == a. */
8667 if (TREE_CODE (op1
) == compl_code
8668 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8669 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8670 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8672 /* MIN (a, MAX (b, a)) == a. */
8673 if (TREE_CODE (op1
) == compl_code
8674 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8675 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8676 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8681 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8682 by changing CODE to reduce the magnitude of constants involved in
8683 ARG0 of the comparison.
8684 Returns a canonicalized comparison tree if a simplification was
8685 possible, otherwise returns NULL_TREE.
8686 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8687 valid if signed overflow is undefined. */
8690 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8691 tree arg0
, tree arg1
,
8692 bool *strict_overflow_p
)
8694 enum tree_code code0
= TREE_CODE (arg0
);
8695 tree t
, cst0
= NULL_TREE
;
8699 /* Match A +- CST code arg1 and CST code arg1. We can change the
8700 first form only if overflow is undefined. */
8701 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8702 /* In principle pointers also have undefined overflow behavior,
8703 but that causes problems elsewhere. */
8704 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8705 && (code0
== MINUS_EXPR
8706 || code0
== PLUS_EXPR
)
8707 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8708 || code0
== INTEGER_CST
))
8711 /* Identify the constant in arg0 and its sign. */
8712 if (code0
== INTEGER_CST
)
8715 cst0
= TREE_OPERAND (arg0
, 1);
8716 sgn0
= tree_int_cst_sgn (cst0
);
8718 /* Overflowed constants and zero will cause problems. */
8719 if (integer_zerop (cst0
)
8720 || TREE_OVERFLOW (cst0
))
8723 /* See if we can reduce the magnitude of the constant in
8724 arg0 by changing the comparison code. */
8725 if (code0
== INTEGER_CST
)
8727 /* CST <= arg1 -> CST-1 < arg1. */
8728 if (code
== LE_EXPR
&& sgn0
== 1)
8730 /* -CST < arg1 -> -CST-1 <= arg1. */
8731 else if (code
== LT_EXPR
&& sgn0
== -1)
8733 /* CST > arg1 -> CST-1 >= arg1. */
8734 else if (code
== GT_EXPR
&& sgn0
== 1)
8736 /* -CST >= arg1 -> -CST-1 > arg1. */
8737 else if (code
== GE_EXPR
&& sgn0
== -1)
8741 /* arg1 code' CST' might be more canonical. */
8746 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8748 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8750 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8751 else if (code
== GT_EXPR
8752 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8754 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8755 else if (code
== LE_EXPR
8756 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8758 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8759 else if (code
== GE_EXPR
8760 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8764 *strict_overflow_p
= true;
8767 /* Now build the constant reduced in magnitude. But not if that
8768 would produce one outside of its types range. */
8769 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8771 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8772 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8774 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8775 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8776 /* We cannot swap the comparison here as that would cause us to
8777 endlessly recurse. */
8780 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8781 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8782 if (code0
!= INTEGER_CST
)
8783 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8784 t
= fold_convert (TREE_TYPE (arg1
), t
);
8786 /* If swapping might yield to a more canonical form, do so. */
8788 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8790 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8793 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8794 overflow further. Try to decrease the magnitude of constants involved
8795 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8796 and put sole constants at the second argument position.
8797 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8800 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8801 tree arg0
, tree arg1
)
8804 bool strict_overflow_p
;
8805 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8806 "when reducing constant in comparison");
8808 /* Try canonicalization by simplifying arg0. */
8809 strict_overflow_p
= false;
8810 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8811 &strict_overflow_p
);
8814 if (strict_overflow_p
)
8815 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8819 /* Try canonicalization by simplifying arg1 using the swapped
8821 code
= swap_tree_comparison (code
);
8822 strict_overflow_p
= false;
8823 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8824 &strict_overflow_p
);
8825 if (t
&& strict_overflow_p
)
8826 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8830 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8831 space. This is used to avoid issuing overflow warnings for
8832 expressions like &p->x which can not wrap. */
8835 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8837 double_int di_offset
, total
;
8839 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8845 if (offset
== NULL_TREE
)
8846 di_offset
= double_int_zero
;
8847 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8850 di_offset
= TREE_INT_CST (offset
);
8853 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8854 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8858 if (total
.high
!= 0)
8861 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8865 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8867 if (TREE_CODE (base
) == ADDR_EXPR
)
8869 HOST_WIDE_INT base_size
;
8871 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8872 if (base_size
> 0 && size
< base_size
)
8876 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8879 /* Subroutine of fold_binary. This routine performs all of the
8880 transformations that are common to the equality/inequality
8881 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8882 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8883 fold_binary should call fold_binary. Fold a comparison with
8884 tree code CODE and type TYPE with operands OP0 and OP1. Return
8885 the folded comparison or NULL_TREE. */
8888 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8891 tree arg0
, arg1
, tem
;
8896 STRIP_SIGN_NOPS (arg0
);
8897 STRIP_SIGN_NOPS (arg1
);
8899 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8900 if (tem
!= NULL_TREE
)
8903 /* If one arg is a real or integer constant, put it last. */
8904 if (tree_swap_operands_p (arg0
, arg1
, true))
8905 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8907 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8908 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8909 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8910 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8911 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8912 && (TREE_CODE (arg1
) == INTEGER_CST
8913 && !TREE_OVERFLOW (arg1
)))
8915 tree const1
= TREE_OPERAND (arg0
, 1);
8917 tree variable
= TREE_OPERAND (arg0
, 0);
8920 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8922 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8923 TREE_TYPE (arg1
), const2
, const1
);
8925 /* If the constant operation overflowed this can be
8926 simplified as a comparison against INT_MAX/INT_MIN. */
8927 if (TREE_CODE (lhs
) == INTEGER_CST
8928 && TREE_OVERFLOW (lhs
))
8930 int const1_sgn
= tree_int_cst_sgn (const1
);
8931 enum tree_code code2
= code
;
8933 /* Get the sign of the constant on the lhs if the
8934 operation were VARIABLE + CONST1. */
8935 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8936 const1_sgn
= -const1_sgn
;
8938 /* The sign of the constant determines if we overflowed
8939 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8940 Canonicalize to the INT_MIN overflow by swapping the comparison
8942 if (const1_sgn
== -1)
8943 code2
= swap_tree_comparison (code
);
8945 /* We now can look at the canonicalized case
8946 VARIABLE + 1 CODE2 INT_MIN
8947 and decide on the result. */
8948 if (code2
== LT_EXPR
8950 || code2
== EQ_EXPR
)
8951 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8952 else if (code2
== NE_EXPR
8954 || code2
== GT_EXPR
)
8955 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8958 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8959 && (TREE_CODE (lhs
) != INTEGER_CST
8960 || !TREE_OVERFLOW (lhs
)))
8962 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8963 fold_overflow_warning ("assuming signed overflow does not occur "
8964 "when changing X +- C1 cmp C2 to "
8966 WARN_STRICT_OVERFLOW_COMPARISON
);
8967 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8971 /* For comparisons of pointers we can decompose it to a compile time
8972 comparison of the base objects and the offsets into the object.
8973 This requires at least one operand being an ADDR_EXPR or a
8974 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8975 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8976 && (TREE_CODE (arg0
) == ADDR_EXPR
8977 || TREE_CODE (arg1
) == ADDR_EXPR
8978 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8979 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8981 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8982 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8983 enum machine_mode mode
;
8984 int volatilep
, unsignedp
;
8985 bool indirect_base0
= false, indirect_base1
= false;
8987 /* Get base and offset for the access. Strip ADDR_EXPR for
8988 get_inner_reference, but put it back by stripping INDIRECT_REF
8989 off the base object if possible. indirect_baseN will be true
8990 if baseN is not an address but refers to the object itself. */
8992 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8994 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8995 &bitsize
, &bitpos0
, &offset0
, &mode
,
8996 &unsignedp
, &volatilep
, false);
8997 if (TREE_CODE (base0
) == INDIRECT_REF
)
8998 base0
= TREE_OPERAND (base0
, 0);
9000 indirect_base0
= true;
9002 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9004 base0
= TREE_OPERAND (arg0
, 0);
9005 STRIP_SIGN_NOPS (base0
);
9006 if (TREE_CODE (base0
) == ADDR_EXPR
)
9008 base0
= TREE_OPERAND (base0
, 0);
9009 indirect_base0
= true;
9011 offset0
= TREE_OPERAND (arg0
, 1);
9012 if (host_integerp (offset0
, 0))
9014 HOST_WIDE_INT off
= size_low_cst (offset0
);
9015 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9017 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9019 bitpos0
= off
* BITS_PER_UNIT
;
9020 offset0
= NULL_TREE
;
9026 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9028 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9029 &bitsize
, &bitpos1
, &offset1
, &mode
,
9030 &unsignedp
, &volatilep
, false);
9031 if (TREE_CODE (base1
) == INDIRECT_REF
)
9032 base1
= TREE_OPERAND (base1
, 0);
9034 indirect_base1
= true;
9036 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9038 base1
= TREE_OPERAND (arg1
, 0);
9039 STRIP_SIGN_NOPS (base1
);
9040 if (TREE_CODE (base1
) == ADDR_EXPR
)
9042 base1
= TREE_OPERAND (base1
, 0);
9043 indirect_base1
= true;
9045 offset1
= TREE_OPERAND (arg1
, 1);
9046 if (host_integerp (offset1
, 0))
9048 HOST_WIDE_INT off
= size_low_cst (offset1
);
9049 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9051 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9053 bitpos1
= off
* BITS_PER_UNIT
;
9054 offset1
= NULL_TREE
;
9059 /* A local variable can never be pointed to by
9060 the default SSA name of an incoming parameter. */
9061 if ((TREE_CODE (arg0
) == ADDR_EXPR
9063 && TREE_CODE (base0
) == VAR_DECL
9064 && auto_var_in_fn_p (base0
, current_function_decl
)
9066 && TREE_CODE (base1
) == SSA_NAME
9067 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9068 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9069 || (TREE_CODE (arg1
) == ADDR_EXPR
9071 && TREE_CODE (base1
) == VAR_DECL
9072 && auto_var_in_fn_p (base1
, current_function_decl
)
9074 && TREE_CODE (base0
) == SSA_NAME
9075 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9076 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9078 if (code
== NE_EXPR
)
9079 return constant_boolean_node (1, type
);
9080 else if (code
== EQ_EXPR
)
9081 return constant_boolean_node (0, type
);
9083 /* If we have equivalent bases we might be able to simplify. */
9084 else if (indirect_base0
== indirect_base1
9085 && operand_equal_p (base0
, base1
, 0))
9087 /* We can fold this expression to a constant if the non-constant
9088 offset parts are equal. */
9089 if ((offset0
== offset1
9090 || (offset0
&& offset1
9091 && operand_equal_p (offset0
, offset1
, 0)))
9094 || (indirect_base0
&& DECL_P (base0
))
9095 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9100 && bitpos0
!= bitpos1
9101 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9102 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9103 fold_overflow_warning (("assuming pointer wraparound does not "
9104 "occur when comparing P +- C1 with "
9106 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9111 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9113 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9115 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9117 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9119 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9121 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9125 /* We can simplify the comparison to a comparison of the variable
9126 offset parts if the constant offset parts are equal.
9127 Be careful to use signed sizetype here because otherwise we
9128 mess with array offsets in the wrong way. This is possible
9129 because pointer arithmetic is restricted to retain within an
9130 object and overflow on pointer differences is undefined as of
9131 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9132 else if (bitpos0
== bitpos1
9133 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9134 || (indirect_base0
&& DECL_P (base0
))
9135 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9137 /* By converting to signed sizetype we cover middle-end pointer
9138 arithmetic which operates on unsigned pointer types of size
9139 type size and ARRAY_REF offsets which are properly sign or
9140 zero extended from their type in case it is narrower than
9142 if (offset0
== NULL_TREE
)
9143 offset0
= build_int_cst (ssizetype
, 0);
9145 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9146 if (offset1
== NULL_TREE
)
9147 offset1
= build_int_cst (ssizetype
, 0);
9149 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9153 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9154 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9155 fold_overflow_warning (("assuming pointer wraparound does not "
9156 "occur when comparing P +- C1 with "
9158 WARN_STRICT_OVERFLOW_COMPARISON
);
9160 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9163 /* For non-equal bases we can simplify if they are addresses
9164 of local binding decls or constants. */
9165 else if (indirect_base0
&& indirect_base1
9166 /* We know that !operand_equal_p (base0, base1, 0)
9167 because the if condition was false. But make
9168 sure two decls are not the same. */
9170 && TREE_CODE (arg0
) == ADDR_EXPR
9171 && TREE_CODE (arg1
) == ADDR_EXPR
9172 && (((TREE_CODE (base0
) == VAR_DECL
9173 || TREE_CODE (base0
) == PARM_DECL
)
9174 && (targetm
.binds_local_p (base0
)
9175 || CONSTANT_CLASS_P (base1
)))
9176 || CONSTANT_CLASS_P (base0
))
9177 && (((TREE_CODE (base1
) == VAR_DECL
9178 || TREE_CODE (base1
) == PARM_DECL
)
9179 && (targetm
.binds_local_p (base1
)
9180 || CONSTANT_CLASS_P (base0
)))
9181 || CONSTANT_CLASS_P (base1
)))
9183 if (code
== EQ_EXPR
)
9184 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9186 else if (code
== NE_EXPR
)
9187 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9190 /* For equal offsets we can simplify to a comparison of the
9192 else if (bitpos0
== bitpos1
9194 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9196 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9197 && ((offset0
== offset1
)
9198 || (offset0
&& offset1
9199 && operand_equal_p (offset0
, offset1
, 0))))
9202 base0
= build_fold_addr_expr_loc (loc
, base0
);
9204 base1
= build_fold_addr_expr_loc (loc
, base1
);
9205 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9209 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9210 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9211 the resulting offset is smaller in absolute value than the
9213 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9214 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9215 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9216 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9217 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9218 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9219 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9221 tree const1
= TREE_OPERAND (arg0
, 1);
9222 tree const2
= TREE_OPERAND (arg1
, 1);
9223 tree variable1
= TREE_OPERAND (arg0
, 0);
9224 tree variable2
= TREE_OPERAND (arg1
, 0);
9226 const char * const warnmsg
= G_("assuming signed overflow does not "
9227 "occur when combining constants around "
9230 /* Put the constant on the side where it doesn't overflow and is
9231 of lower absolute value than before. */
9232 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9233 ? MINUS_EXPR
: PLUS_EXPR
,
9235 if (!TREE_OVERFLOW (cst
)
9236 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9238 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9239 return fold_build2_loc (loc
, code
, type
,
9241 fold_build2_loc (loc
,
9242 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9246 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9247 ? MINUS_EXPR
: PLUS_EXPR
,
9249 if (!TREE_OVERFLOW (cst
)
9250 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9252 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9253 return fold_build2_loc (loc
, code
, type
,
9254 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9260 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9261 signed arithmetic case. That form is created by the compiler
9262 often enough for folding it to be of value. One example is in
9263 computing loop trip counts after Operator Strength Reduction. */
9264 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9265 && TREE_CODE (arg0
) == MULT_EXPR
9266 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9267 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9268 && integer_zerop (arg1
))
9270 tree const1
= TREE_OPERAND (arg0
, 1);
9271 tree const2
= arg1
; /* zero */
9272 tree variable1
= TREE_OPERAND (arg0
, 0);
9273 enum tree_code cmp_code
= code
;
9275 /* Handle unfolded multiplication by zero. */
9276 if (integer_zerop (const1
))
9277 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9279 fold_overflow_warning (("assuming signed overflow does not occur when "
9280 "eliminating multiplication in comparison "
9282 WARN_STRICT_OVERFLOW_COMPARISON
);
9284 /* If const1 is negative we swap the sense of the comparison. */
9285 if (tree_int_cst_sgn (const1
) < 0)
9286 cmp_code
= swap_tree_comparison (cmp_code
);
9288 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9291 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9295 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9297 tree targ0
= strip_float_extensions (arg0
);
9298 tree targ1
= strip_float_extensions (arg1
);
9299 tree newtype
= TREE_TYPE (targ0
);
9301 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9302 newtype
= TREE_TYPE (targ1
);
9304 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9305 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9306 return fold_build2_loc (loc
, code
, type
,
9307 fold_convert_loc (loc
, newtype
, targ0
),
9308 fold_convert_loc (loc
, newtype
, targ1
));
9310 /* (-a) CMP (-b) -> b CMP a */
9311 if (TREE_CODE (arg0
) == NEGATE_EXPR
9312 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9313 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9314 TREE_OPERAND (arg0
, 0));
9316 if (TREE_CODE (arg1
) == REAL_CST
)
9318 REAL_VALUE_TYPE cst
;
9319 cst
= TREE_REAL_CST (arg1
);
9321 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9322 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9323 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9324 TREE_OPERAND (arg0
, 0),
9325 build_real (TREE_TYPE (arg1
),
9326 real_value_negate (&cst
)));
9328 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9329 /* a CMP (-0) -> a CMP 0 */
9330 if (REAL_VALUE_MINUS_ZERO (cst
))
9331 return fold_build2_loc (loc
, code
, type
, arg0
,
9332 build_real (TREE_TYPE (arg1
), dconst0
));
9334 /* x != NaN is always true, other ops are always false. */
9335 if (REAL_VALUE_ISNAN (cst
)
9336 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9338 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9339 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9342 /* Fold comparisons against infinity. */
9343 if (REAL_VALUE_ISINF (cst
)
9344 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9346 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9347 if (tem
!= NULL_TREE
)
9352 /* If this is a comparison of a real constant with a PLUS_EXPR
9353 or a MINUS_EXPR of a real constant, we can convert it into a
9354 comparison with a revised real constant as long as no overflow
9355 occurs when unsafe_math_optimizations are enabled. */
9356 if (flag_unsafe_math_optimizations
9357 && TREE_CODE (arg1
) == REAL_CST
9358 && (TREE_CODE (arg0
) == PLUS_EXPR
9359 || TREE_CODE (arg0
) == MINUS_EXPR
)
9360 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9361 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9362 ? MINUS_EXPR
: PLUS_EXPR
,
9363 arg1
, TREE_OPERAND (arg0
, 1)))
9364 && !TREE_OVERFLOW (tem
))
9365 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9367 /* Likewise, we can simplify a comparison of a real constant with
9368 a MINUS_EXPR whose first operand is also a real constant, i.e.
9369 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9370 floating-point types only if -fassociative-math is set. */
9371 if (flag_associative_math
9372 && TREE_CODE (arg1
) == REAL_CST
9373 && TREE_CODE (arg0
) == MINUS_EXPR
9374 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9375 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9377 && !TREE_OVERFLOW (tem
))
9378 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9379 TREE_OPERAND (arg0
, 1), tem
);
9381 /* Fold comparisons against built-in math functions. */
9382 if (TREE_CODE (arg1
) == REAL_CST
9383 && flag_unsafe_math_optimizations
9384 && ! flag_errno_math
)
9386 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9388 if (fcode
!= END_BUILTINS
)
9390 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9391 if (tem
!= NULL_TREE
)
9397 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9398 && CONVERT_EXPR_P (arg0
))
9400 /* If we are widening one operand of an integer comparison,
9401 see if the other operand is similarly being widened. Perhaps we
9402 can do the comparison in the narrower type. */
9403 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9407 /* Or if we are changing signedness. */
9408 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9413 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9414 constant, we can simplify it. */
9415 if (TREE_CODE (arg1
) == INTEGER_CST
9416 && (TREE_CODE (arg0
) == MIN_EXPR
9417 || TREE_CODE (arg0
) == MAX_EXPR
)
9418 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9420 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9425 /* Simplify comparison of something with itself. (For IEEE
9426 floating-point, we can only do some of these simplifications.) */
9427 if (operand_equal_p (arg0
, arg1
, 0))
9432 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9433 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9434 return constant_boolean_node (1, type
);
9439 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9440 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9441 return constant_boolean_node (1, type
);
9442 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9445 /* For NE, we can only do this simplification if integer
9446 or we don't honor IEEE floating point NaNs. */
9447 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9448 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9450 /* ... fall through ... */
9453 return constant_boolean_node (0, type
);
9459 /* If we are comparing an expression that just has comparisons
9460 of two integer values, arithmetic expressions of those comparisons,
9461 and constants, we can simplify it. There are only three cases
9462 to check: the two values can either be equal, the first can be
9463 greater, or the second can be greater. Fold the expression for
9464 those three values. Since each value must be 0 or 1, we have
9465 eight possibilities, each of which corresponds to the constant 0
9466 or 1 or one of the six possible comparisons.
9468 This handles common cases like (a > b) == 0 but also handles
9469 expressions like ((x > y) - (y > x)) > 0, which supposedly
9470 occur in macroized code. */
9472 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9474 tree cval1
= 0, cval2
= 0;
9477 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9478 /* Don't handle degenerate cases here; they should already
9479 have been handled anyway. */
9480 && cval1
!= 0 && cval2
!= 0
9481 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9482 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9483 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9484 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9485 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9486 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9487 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9489 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9490 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9492 /* We can't just pass T to eval_subst in case cval1 or cval2
9493 was the same as ARG1. */
9496 = fold_build2_loc (loc
, code
, type
,
9497 eval_subst (loc
, arg0
, cval1
, maxval
,
9501 = fold_build2_loc (loc
, code
, type
,
9502 eval_subst (loc
, arg0
, cval1
, maxval
,
9506 = fold_build2_loc (loc
, code
, type
,
9507 eval_subst (loc
, arg0
, cval1
, minval
,
9511 /* All three of these results should be 0 or 1. Confirm they are.
9512 Then use those values to select the proper code to use. */
9514 if (TREE_CODE (high_result
) == INTEGER_CST
9515 && TREE_CODE (equal_result
) == INTEGER_CST
9516 && TREE_CODE (low_result
) == INTEGER_CST
)
9518 /* Make a 3-bit mask with the high-order bit being the
9519 value for `>', the next for '=', and the low for '<'. */
9520 switch ((integer_onep (high_result
) * 4)
9521 + (integer_onep (equal_result
) * 2)
9522 + integer_onep (low_result
))
9526 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9547 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9552 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9553 SET_EXPR_LOCATION (tem
, loc
);
9556 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9561 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9562 into a single range test. */
9563 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9564 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9565 && TREE_CODE (arg1
) == INTEGER_CST
9566 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9567 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9568 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9569 && !TREE_OVERFLOW (arg1
))
9571 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9572 if (tem
!= NULL_TREE
)
9576 /* Fold ~X op ~Y as Y op X. */
9577 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9578 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9580 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9581 return fold_build2_loc (loc
, code
, type
,
9582 fold_convert_loc (loc
, cmp_type
,
9583 TREE_OPERAND (arg1
, 0)),
9584 TREE_OPERAND (arg0
, 0));
9587 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9588 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9589 && TREE_CODE (arg1
) == INTEGER_CST
)
9591 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9592 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9593 TREE_OPERAND (arg0
, 0),
9594 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9595 fold_convert_loc (loc
, cmp_type
, arg1
)));
9602 /* Subroutine of fold_binary. Optimize complex multiplications of the
9603 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9604 argument EXPR represents the expression "z" of type TYPE. */
9607 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9609 tree itype
= TREE_TYPE (type
);
9610 tree rpart
, ipart
, tem
;
9612 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9614 rpart
= TREE_OPERAND (expr
, 0);
9615 ipart
= TREE_OPERAND (expr
, 1);
9617 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9619 rpart
= TREE_REALPART (expr
);
9620 ipart
= TREE_IMAGPART (expr
);
9624 expr
= save_expr (expr
);
9625 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9626 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9629 rpart
= save_expr (rpart
);
9630 ipart
= save_expr (ipart
);
9631 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9632 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9633 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9634 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9635 build_zero_cst (itype
));
9639 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9640 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9641 guarantees that P and N have the same least significant log2(M) bits.
9642 N is not otherwise constrained. In particular, N is not normalized to
9643 0 <= N < M as is common. In general, the precise value of P is unknown.
9644 M is chosen as large as possible such that constant N can be determined.
9646 Returns M and sets *RESIDUE to N.
9648 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9649 account. This is not always possible due to PR 35705.
9652 static unsigned HOST_WIDE_INT
9653 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9654 bool allow_func_align
)
9656 enum tree_code code
;
9660 code
= TREE_CODE (expr
);
9661 if (code
== ADDR_EXPR
)
9663 unsigned int bitalign
;
9664 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9665 *residue
/= BITS_PER_UNIT
;
9666 return bitalign
/ BITS_PER_UNIT
;
9668 else if (code
== POINTER_PLUS_EXPR
)
9671 unsigned HOST_WIDE_INT modulus
;
9672 enum tree_code inner_code
;
9674 op0
= TREE_OPERAND (expr
, 0);
9676 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9679 op1
= TREE_OPERAND (expr
, 1);
9681 inner_code
= TREE_CODE (op1
);
9682 if (inner_code
== INTEGER_CST
)
9684 *residue
+= TREE_INT_CST_LOW (op1
);
9687 else if (inner_code
== MULT_EXPR
)
9689 op1
= TREE_OPERAND (op1
, 1);
9690 if (TREE_CODE (op1
) == INTEGER_CST
)
9692 unsigned HOST_WIDE_INT align
;
9694 /* Compute the greatest power-of-2 divisor of op1. */
9695 align
= TREE_INT_CST_LOW (op1
);
9698 /* If align is non-zero and less than *modulus, replace
9699 *modulus with align., If align is 0, then either op1 is 0
9700 or the greatest power-of-2 divisor of op1 doesn't fit in an
9701 unsigned HOST_WIDE_INT. In either case, no additional
9702 constraint is imposed. */
9704 modulus
= MIN (modulus
, align
);
9711 /* If we get here, we were unable to determine anything useful about the
9716 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9717 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9720 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9722 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9724 if (TREE_CODE (arg
) == VECTOR_CST
)
9726 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9727 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9729 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9731 constructor_elt
*elt
;
9733 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9734 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9737 elts
[i
] = elt
->value
;
9741 for (; i
< nelts
; i
++)
9743 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9747 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9748 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9749 NULL_TREE otherwise. */
9752 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9754 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9756 bool need_ctor
= false;
9758 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9759 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9760 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9761 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9764 elts
= XALLOCAVEC (tree
, nelts
* 3);
9765 if (!vec_cst_ctor_to_array (arg0
, elts
)
9766 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9769 for (i
= 0; i
< nelts
; i
++)
9771 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9773 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9778 vec
<constructor_elt
, va_gc
> *v
;
9779 vec_alloc (v
, nelts
);
9780 for (i
= 0; i
< nelts
; i
++)
9781 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9782 return build_constructor (type
, v
);
9785 return build_vector (type
, &elts
[2 * nelts
]);
9788 /* Try to fold a pointer difference of type TYPE two address expressions of
9789 array references AREF0 and AREF1 using location LOC. Return a
9790 simplified expression for the difference or NULL_TREE. */
9793 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9794 tree aref0
, tree aref1
)
9796 tree base0
= TREE_OPERAND (aref0
, 0);
9797 tree base1
= TREE_OPERAND (aref1
, 0);
9798 tree base_offset
= build_int_cst (type
, 0);
9800 /* If the bases are array references as well, recurse. If the bases
9801 are pointer indirections compute the difference of the pointers.
9802 If the bases are equal, we are set. */
9803 if ((TREE_CODE (base0
) == ARRAY_REF
9804 && TREE_CODE (base1
) == ARRAY_REF
9806 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9807 || (INDIRECT_REF_P (base0
)
9808 && INDIRECT_REF_P (base1
)
9809 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9810 TREE_OPERAND (base0
, 0),
9811 TREE_OPERAND (base1
, 0))))
9812 || operand_equal_p (base0
, base1
, 0))
9814 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9815 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9816 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9817 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9818 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9820 fold_build2_loc (loc
, MULT_EXPR
, type
,
9826 /* If the real or vector real constant CST of type TYPE has an exact
9827 inverse, return it, else return NULL. */
9830 exact_inverse (tree type
, tree cst
)
9833 tree unit_type
, *elts
;
9834 enum machine_mode mode
;
9835 unsigned vec_nelts
, i
;
9837 switch (TREE_CODE (cst
))
9840 r
= TREE_REAL_CST (cst
);
9842 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9843 return build_real (type
, r
);
9848 vec_nelts
= VECTOR_CST_NELTS (cst
);
9849 elts
= XALLOCAVEC (tree
, vec_nelts
);
9850 unit_type
= TREE_TYPE (type
);
9851 mode
= TYPE_MODE (unit_type
);
9853 for (i
= 0; i
< vec_nelts
; i
++)
9855 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9856 if (!exact_real_inverse (mode
, &r
))
9858 elts
[i
] = build_real (unit_type
, r
);
9861 return build_vector (type
, elts
);
9868 /* Fold a binary expression of code CODE and type TYPE with operands
9869 OP0 and OP1. LOC is the location of the resulting expression.
9870 Return the folded expression if folding is successful. Otherwise,
9871 return NULL_TREE. */
9874 fold_binary_loc (location_t loc
,
9875 enum tree_code code
, tree type
, tree op0
, tree op1
)
9877 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9878 tree arg0
, arg1
, tem
;
9879 tree t1
= NULL_TREE
;
9880 bool strict_overflow_p
;
9883 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9884 && TREE_CODE_LENGTH (code
) == 2
9886 && op1
!= NULL_TREE
);
9891 /* Strip any conversions that don't change the mode. This is
9892 safe for every expression, except for a comparison expression
9893 because its signedness is derived from its operands. So, in
9894 the latter case, only strip conversions that don't change the
9895 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9898 Note that this is done as an internal manipulation within the
9899 constant folder, in order to find the simplest representation
9900 of the arguments so that their form can be studied. In any
9901 cases, the appropriate type conversions should be put back in
9902 the tree that will get out of the constant folder. */
9904 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9906 STRIP_SIGN_NOPS (arg0
);
9907 STRIP_SIGN_NOPS (arg1
);
9915 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9916 constant but we can't do arithmetic on them. */
9917 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9918 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9919 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9920 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9921 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9922 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9923 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9925 if (kind
== tcc_binary
)
9927 /* Make sure type and arg0 have the same saturating flag. */
9928 gcc_assert (TYPE_SATURATING (type
)
9929 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9930 tem
= const_binop (code
, arg0
, arg1
);
9932 else if (kind
== tcc_comparison
)
9933 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9937 if (tem
!= NULL_TREE
)
9939 if (TREE_TYPE (tem
) != type
)
9940 tem
= fold_convert_loc (loc
, type
, tem
);
9945 /* If this is a commutative operation, and ARG0 is a constant, move it
9946 to ARG1 to reduce the number of tests below. */
9947 if (commutative_tree_code (code
)
9948 && tree_swap_operands_p (arg0
, arg1
, true))
9949 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9951 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9953 First check for cases where an arithmetic operation is applied to a
9954 compound, conditional, or comparison operation. Push the arithmetic
9955 operation inside the compound or conditional to see if any folding
9956 can then be done. Convert comparison to conditional for this purpose.
9957 The also optimizes non-constant cases that used to be done in
9960 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9961 one of the operands is a comparison and the other is a comparison, a
9962 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9963 code below would make the expression more complex. Change it to a
9964 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9965 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9967 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9968 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9969 && TREE_CODE (type
) != VECTOR_TYPE
9970 && ((truth_value_p (TREE_CODE (arg0
))
9971 && (truth_value_p (TREE_CODE (arg1
))
9972 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9973 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9974 || (truth_value_p (TREE_CODE (arg1
))
9975 && (truth_value_p (TREE_CODE (arg0
))
9976 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9977 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9979 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9980 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9983 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9984 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9986 if (code
== EQ_EXPR
)
9987 tem
= invert_truthvalue_loc (loc
, tem
);
9989 return fold_convert_loc (loc
, type
, tem
);
9992 if (TREE_CODE_CLASS (code
) == tcc_binary
9993 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9995 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9997 tem
= fold_build2_loc (loc
, code
, type
,
9998 fold_convert_loc (loc
, TREE_TYPE (op0
),
9999 TREE_OPERAND (arg0
, 1)), op1
);
10000 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10003 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10004 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10006 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10007 fold_convert_loc (loc
, TREE_TYPE (op1
),
10008 TREE_OPERAND (arg1
, 1)));
10009 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10013 if (TREE_CODE (arg0
) == COND_EXPR
10014 || TREE_CODE (arg0
) == VEC_COND_EXPR
10015 || COMPARISON_CLASS_P (arg0
))
10017 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10019 /*cond_first_p=*/1);
10020 if (tem
!= NULL_TREE
)
10024 if (TREE_CODE (arg1
) == COND_EXPR
10025 || TREE_CODE (arg1
) == VEC_COND_EXPR
10026 || COMPARISON_CLASS_P (arg1
))
10028 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10030 /*cond_first_p=*/0);
10031 if (tem
!= NULL_TREE
)
10039 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10040 if (TREE_CODE (arg0
) == ADDR_EXPR
10041 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10043 tree iref
= TREE_OPERAND (arg0
, 0);
10044 return fold_build2 (MEM_REF
, type
,
10045 TREE_OPERAND (iref
, 0),
10046 int_const_binop (PLUS_EXPR
, arg1
,
10047 TREE_OPERAND (iref
, 1)));
10050 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10051 if (TREE_CODE (arg0
) == ADDR_EXPR
10052 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10055 HOST_WIDE_INT coffset
;
10056 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10060 return fold_build2 (MEM_REF
, type
,
10061 build_fold_addr_expr (base
),
10062 int_const_binop (PLUS_EXPR
, arg1
,
10063 size_int (coffset
)));
10068 case POINTER_PLUS_EXPR
:
10069 /* 0 +p index -> (type)index */
10070 if (integer_zerop (arg0
))
10071 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10073 /* PTR +p 0 -> PTR */
10074 if (integer_zerop (arg1
))
10075 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10077 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10078 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10079 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10080 return fold_convert_loc (loc
, type
,
10081 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10082 fold_convert_loc (loc
, sizetype
,
10084 fold_convert_loc (loc
, sizetype
,
10087 /* (PTR +p B) +p A -> PTR +p (B + A) */
10088 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10091 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10092 tree arg00
= TREE_OPERAND (arg0
, 0);
10093 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10094 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10095 return fold_convert_loc (loc
, type
,
10096 fold_build_pointer_plus_loc (loc
,
10100 /* PTR_CST +p CST -> CST1 */
10101 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10102 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10103 fold_convert_loc (loc
, type
, arg1
));
10105 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10106 of the array. Loop optimizer sometimes produce this type of
10108 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10110 tem
= try_move_mult_to_index (loc
, arg0
,
10111 fold_convert_loc (loc
,
10114 return fold_convert_loc (loc
, type
, tem
);
10120 /* A + (-B) -> A - B */
10121 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10122 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10123 fold_convert_loc (loc
, type
, arg0
),
10124 fold_convert_loc (loc
, type
,
10125 TREE_OPERAND (arg1
, 0)));
10126 /* (-A) + B -> B - A */
10127 if (TREE_CODE (arg0
) == NEGATE_EXPR
10128 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10129 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10130 fold_convert_loc (loc
, type
, arg1
),
10131 fold_convert_loc (loc
, type
,
10132 TREE_OPERAND (arg0
, 0)));
10134 if (INTEGRAL_TYPE_P (type
))
10136 /* Convert ~A + 1 to -A. */
10137 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10138 && integer_onep (arg1
))
10139 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10140 fold_convert_loc (loc
, type
,
10141 TREE_OPERAND (arg0
, 0)));
10143 /* ~X + X is -1. */
10144 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10145 && !TYPE_OVERFLOW_TRAPS (type
))
10147 tree tem
= TREE_OPERAND (arg0
, 0);
10150 if (operand_equal_p (tem
, arg1
, 0))
10152 t1
= build_minus_one_cst (type
);
10153 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10157 /* X + ~X is -1. */
10158 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10159 && !TYPE_OVERFLOW_TRAPS (type
))
10161 tree tem
= TREE_OPERAND (arg1
, 0);
10164 if (operand_equal_p (arg0
, tem
, 0))
10166 t1
= build_minus_one_cst (type
);
10167 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10171 /* X + (X / CST) * -CST is X % CST. */
10172 if (TREE_CODE (arg1
) == MULT_EXPR
10173 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10174 && operand_equal_p (arg0
,
10175 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10177 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10178 tree cst1
= TREE_OPERAND (arg1
, 1);
10179 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10181 if (sum
&& integer_zerop (sum
))
10182 return fold_convert_loc (loc
, type
,
10183 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10184 TREE_TYPE (arg0
), arg0
,
10189 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10190 one. Make sure the type is not saturating and has the signedness of
10191 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10192 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10193 if ((TREE_CODE (arg0
) == MULT_EXPR
10194 || TREE_CODE (arg1
) == MULT_EXPR
)
10195 && !TYPE_SATURATING (type
)
10196 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10197 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10198 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10200 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10205 if (! FLOAT_TYPE_P (type
))
10207 if (integer_zerop (arg1
))
10208 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10210 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10211 with a constant, and the two constants have no bits in common,
10212 we should treat this as a BIT_IOR_EXPR since this may produce more
10213 simplifications. */
10214 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10215 && TREE_CODE (arg1
) == BIT_AND_EXPR
10216 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10217 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10218 && integer_zerop (const_binop (BIT_AND_EXPR
,
10219 TREE_OPERAND (arg0
, 1),
10220 TREE_OPERAND (arg1
, 1))))
10222 code
= BIT_IOR_EXPR
;
10226 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10227 (plus (plus (mult) (mult)) (foo)) so that we can
10228 take advantage of the factoring cases below. */
10229 if (TYPE_OVERFLOW_WRAPS (type
)
10230 && (((TREE_CODE (arg0
) == PLUS_EXPR
10231 || TREE_CODE (arg0
) == MINUS_EXPR
)
10232 && TREE_CODE (arg1
) == MULT_EXPR
)
10233 || ((TREE_CODE (arg1
) == PLUS_EXPR
10234 || TREE_CODE (arg1
) == MINUS_EXPR
)
10235 && TREE_CODE (arg0
) == MULT_EXPR
)))
10237 tree parg0
, parg1
, parg
, marg
;
10238 enum tree_code pcode
;
10240 if (TREE_CODE (arg1
) == MULT_EXPR
)
10241 parg
= arg0
, marg
= arg1
;
10243 parg
= arg1
, marg
= arg0
;
10244 pcode
= TREE_CODE (parg
);
10245 parg0
= TREE_OPERAND (parg
, 0);
10246 parg1
= TREE_OPERAND (parg
, 1);
10247 STRIP_NOPS (parg0
);
10248 STRIP_NOPS (parg1
);
10250 if (TREE_CODE (parg0
) == MULT_EXPR
10251 && TREE_CODE (parg1
) != MULT_EXPR
)
10252 return fold_build2_loc (loc
, pcode
, type
,
10253 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10254 fold_convert_loc (loc
, type
,
10256 fold_convert_loc (loc
, type
,
10258 fold_convert_loc (loc
, type
, parg1
));
10259 if (TREE_CODE (parg0
) != MULT_EXPR
10260 && TREE_CODE (parg1
) == MULT_EXPR
)
10262 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10263 fold_convert_loc (loc
, type
, parg0
),
10264 fold_build2_loc (loc
, pcode
, type
,
10265 fold_convert_loc (loc
, type
, marg
),
10266 fold_convert_loc (loc
, type
,
10272 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10273 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10274 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10276 /* Likewise if the operands are reversed. */
10277 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10278 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10280 /* Convert X + -C into X - C. */
10281 if (TREE_CODE (arg1
) == REAL_CST
10282 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10284 tem
= fold_negate_const (arg1
, type
);
10285 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10286 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10287 fold_convert_loc (loc
, type
, arg0
),
10288 fold_convert_loc (loc
, type
, tem
));
10291 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10292 to __complex__ ( x, y ). This is not the same for SNaNs or
10293 if signed zeros are involved. */
10294 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10295 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10296 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10298 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10299 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10300 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10301 bool arg0rz
= false, arg0iz
= false;
10302 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10303 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10305 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10306 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10307 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10309 tree rp
= arg1r
? arg1r
10310 : build1 (REALPART_EXPR
, rtype
, arg1
);
10311 tree ip
= arg0i
? arg0i
10312 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10313 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10315 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10317 tree rp
= arg0r
? arg0r
10318 : build1 (REALPART_EXPR
, rtype
, arg0
);
10319 tree ip
= arg1i
? arg1i
10320 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10321 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10326 if (flag_unsafe_math_optimizations
10327 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10328 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10329 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10332 /* Convert x+x into x*2.0. */
10333 if (operand_equal_p (arg0
, arg1
, 0)
10334 && SCALAR_FLOAT_TYPE_P (type
))
10335 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10336 build_real (type
, dconst2
));
10338 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10339 We associate floats only if the user has specified
10340 -fassociative-math. */
10341 if (flag_associative_math
10342 && TREE_CODE (arg1
) == PLUS_EXPR
10343 && TREE_CODE (arg0
) != MULT_EXPR
)
10345 tree tree10
= TREE_OPERAND (arg1
, 0);
10346 tree tree11
= TREE_OPERAND (arg1
, 1);
10347 if (TREE_CODE (tree11
) == MULT_EXPR
10348 && TREE_CODE (tree10
) == MULT_EXPR
)
10351 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10352 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10355 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10356 We associate floats only if the user has specified
10357 -fassociative-math. */
10358 if (flag_associative_math
10359 && TREE_CODE (arg0
) == PLUS_EXPR
10360 && TREE_CODE (arg1
) != MULT_EXPR
)
10362 tree tree00
= TREE_OPERAND (arg0
, 0);
10363 tree tree01
= TREE_OPERAND (arg0
, 1);
10364 if (TREE_CODE (tree01
) == MULT_EXPR
10365 && TREE_CODE (tree00
) == MULT_EXPR
)
10368 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10369 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10375 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10376 is a rotate of A by C1 bits. */
10377 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10378 is a rotate of A by B bits. */
10380 enum tree_code code0
, code1
;
10382 code0
= TREE_CODE (arg0
);
10383 code1
= TREE_CODE (arg1
);
10384 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10385 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10386 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10387 TREE_OPERAND (arg1
, 0), 0)
10388 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10389 TYPE_UNSIGNED (rtype
))
10390 /* Only create rotates in complete modes. Other cases are not
10391 expanded properly. */
10392 && (element_precision (rtype
)
10393 == element_precision (TYPE_MODE (rtype
))))
10395 tree tree01
, tree11
;
10396 enum tree_code code01
, code11
;
10398 tree01
= TREE_OPERAND (arg0
, 1);
10399 tree11
= TREE_OPERAND (arg1
, 1);
10400 STRIP_NOPS (tree01
);
10401 STRIP_NOPS (tree11
);
10402 code01
= TREE_CODE (tree01
);
10403 code11
= TREE_CODE (tree11
);
10404 if (code01
== INTEGER_CST
10405 && code11
== INTEGER_CST
10406 && TREE_INT_CST_HIGH (tree01
) == 0
10407 && TREE_INT_CST_HIGH (tree11
) == 0
10408 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10409 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10411 tem
= build2_loc (loc
, LROTATE_EXPR
,
10412 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10413 TREE_OPERAND (arg0
, 0),
10414 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10415 return fold_convert_loc (loc
, type
, tem
);
10417 else if (code11
== MINUS_EXPR
)
10419 tree tree110
, tree111
;
10420 tree110
= TREE_OPERAND (tree11
, 0);
10421 tree111
= TREE_OPERAND (tree11
, 1);
10422 STRIP_NOPS (tree110
);
10423 STRIP_NOPS (tree111
);
10424 if (TREE_CODE (tree110
) == INTEGER_CST
10425 && 0 == compare_tree_int (tree110
,
10427 (TREE_TYPE (TREE_OPERAND
10429 && operand_equal_p (tree01
, tree111
, 0))
10431 fold_convert_loc (loc
, type
,
10432 build2 ((code0
== LSHIFT_EXPR
10435 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10436 TREE_OPERAND (arg0
, 0), tree01
));
10438 else if (code01
== MINUS_EXPR
)
10440 tree tree010
, tree011
;
10441 tree010
= TREE_OPERAND (tree01
, 0);
10442 tree011
= TREE_OPERAND (tree01
, 1);
10443 STRIP_NOPS (tree010
);
10444 STRIP_NOPS (tree011
);
10445 if (TREE_CODE (tree010
) == INTEGER_CST
10446 && 0 == compare_tree_int (tree010
,
10448 (TREE_TYPE (TREE_OPERAND
10450 && operand_equal_p (tree11
, tree011
, 0))
10451 return fold_convert_loc
10453 build2 ((code0
!= LSHIFT_EXPR
10456 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10457 TREE_OPERAND (arg0
, 0), tree11
));
10463 /* In most languages, can't associate operations on floats through
10464 parentheses. Rather than remember where the parentheses were, we
10465 don't associate floats at all, unless the user has specified
10466 -fassociative-math.
10467 And, we need to make sure type is not saturating. */
10469 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10470 && !TYPE_SATURATING (type
))
10472 tree var0
, con0
, lit0
, minus_lit0
;
10473 tree var1
, con1
, lit1
, minus_lit1
;
10477 /* Split both trees into variables, constants, and literals. Then
10478 associate each group together, the constants with literals,
10479 then the result with variables. This increases the chances of
10480 literals being recombined later and of generating relocatable
10481 expressions for the sum of a constant and literal. */
10482 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10483 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10484 code
== MINUS_EXPR
);
10486 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10487 if (code
== MINUS_EXPR
)
10490 /* With undefined overflow prefer doing association in a type
10491 which wraps on overflow, if that is one of the operand types. */
10492 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10493 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10495 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10496 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10497 atype
= TREE_TYPE (arg0
);
10498 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10499 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10500 atype
= TREE_TYPE (arg1
);
10501 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10504 /* With undefined overflow we can only associate constants with one
10505 variable, and constants whose association doesn't overflow. */
10506 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10507 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10514 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10515 tmp0
= TREE_OPERAND (tmp0
, 0);
10516 if (CONVERT_EXPR_P (tmp0
)
10517 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10518 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10519 <= TYPE_PRECISION (atype
)))
10520 tmp0
= TREE_OPERAND (tmp0
, 0);
10521 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10522 tmp1
= TREE_OPERAND (tmp1
, 0);
10523 if (CONVERT_EXPR_P (tmp1
)
10524 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10525 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10526 <= TYPE_PRECISION (atype
)))
10527 tmp1
= TREE_OPERAND (tmp1
, 0);
10528 /* The only case we can still associate with two variables
10529 is if they are the same, modulo negation and bit-pattern
10530 preserving conversions. */
10531 if (!operand_equal_p (tmp0
, tmp1
, 0))
10536 /* Only do something if we found more than two objects. Otherwise,
10537 nothing has changed and we risk infinite recursion. */
10539 && (2 < ((var0
!= 0) + (var1
!= 0)
10540 + (con0
!= 0) + (con1
!= 0)
10541 + (lit0
!= 0) + (lit1
!= 0)
10542 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10544 bool any_overflows
= false;
10545 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10546 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10547 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10548 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10549 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10550 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10551 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10552 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10555 /* Preserve the MINUS_EXPR if the negative part of the literal is
10556 greater than the positive part. Otherwise, the multiplicative
10557 folding code (i.e extract_muldiv) may be fooled in case
10558 unsigned constants are subtracted, like in the following
10559 example: ((X*2 + 4) - 8U)/2. */
10560 if (minus_lit0
&& lit0
)
10562 if (TREE_CODE (lit0
) == INTEGER_CST
10563 && TREE_CODE (minus_lit0
) == INTEGER_CST
10564 && tree_int_cst_lt (lit0
, minus_lit0
))
10566 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10567 MINUS_EXPR
, atype
);
10572 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10573 MINUS_EXPR
, atype
);
10578 /* Don't introduce overflows through reassociation. */
10580 && ((lit0
&& TREE_OVERFLOW (lit0
))
10581 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10588 fold_convert_loc (loc
, type
,
10589 associate_trees (loc
, var0
, minus_lit0
,
10590 MINUS_EXPR
, atype
));
10593 con0
= associate_trees (loc
, con0
, minus_lit0
,
10594 MINUS_EXPR
, atype
);
10596 fold_convert_loc (loc
, type
,
10597 associate_trees (loc
, var0
, con0
,
10598 PLUS_EXPR
, atype
));
10602 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10604 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10612 /* Pointer simplifications for subtraction, simple reassociations. */
10613 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10615 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10616 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10617 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10619 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10620 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10621 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10622 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10623 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10624 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10626 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10629 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10630 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10632 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10633 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10634 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10635 fold_convert_loc (loc
, type
, arg1
));
10637 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10640 /* A - (-B) -> A + B */
10641 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10642 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10643 fold_convert_loc (loc
, type
,
10644 TREE_OPERAND (arg1
, 0)));
10645 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10646 if (TREE_CODE (arg0
) == NEGATE_EXPR
10647 && (FLOAT_TYPE_P (type
)
10648 || INTEGRAL_TYPE_P (type
))
10649 && negate_expr_p (arg1
)
10650 && reorder_operands_p (arg0
, arg1
))
10651 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10652 fold_convert_loc (loc
, type
,
10653 negate_expr (arg1
)),
10654 fold_convert_loc (loc
, type
,
10655 TREE_OPERAND (arg0
, 0)));
10656 /* Convert -A - 1 to ~A. */
10657 if (INTEGRAL_TYPE_P (type
)
10658 && TREE_CODE (arg0
) == NEGATE_EXPR
10659 && integer_onep (arg1
)
10660 && !TYPE_OVERFLOW_TRAPS (type
))
10661 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10662 fold_convert_loc (loc
, type
,
10663 TREE_OPERAND (arg0
, 0)));
10665 /* Convert -1 - A to ~A. */
10666 if (INTEGRAL_TYPE_P (type
)
10667 && integer_all_onesp (arg0
))
10668 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10671 /* X - (X / CST) * CST is X % CST. */
10672 if (INTEGRAL_TYPE_P (type
)
10673 && TREE_CODE (arg1
) == MULT_EXPR
10674 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10675 && operand_equal_p (arg0
,
10676 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10677 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10678 TREE_OPERAND (arg1
, 1), 0))
10680 fold_convert_loc (loc
, type
,
10681 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10682 arg0
, TREE_OPERAND (arg1
, 1)));
10684 if (! FLOAT_TYPE_P (type
))
10686 if (integer_zerop (arg0
))
10687 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10688 if (integer_zerop (arg1
))
10689 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10691 /* Fold A - (A & B) into ~B & A. */
10692 if (!TREE_SIDE_EFFECTS (arg0
)
10693 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10695 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10697 tree arg10
= fold_convert_loc (loc
, type
,
10698 TREE_OPERAND (arg1
, 0));
10699 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10700 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10702 fold_convert_loc (loc
, type
, arg0
));
10704 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10706 tree arg11
= fold_convert_loc (loc
,
10707 type
, TREE_OPERAND (arg1
, 1));
10708 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10709 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10711 fold_convert_loc (loc
, type
, arg0
));
10715 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10716 any power of 2 minus 1. */
10717 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10718 && TREE_CODE (arg1
) == BIT_AND_EXPR
10719 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10720 TREE_OPERAND (arg1
, 0), 0))
10722 tree mask0
= TREE_OPERAND (arg0
, 1);
10723 tree mask1
= TREE_OPERAND (arg1
, 1);
10724 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10726 if (operand_equal_p (tem
, mask1
, 0))
10728 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10729 TREE_OPERAND (arg0
, 0), mask1
);
10730 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10735 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10736 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10737 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10739 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10740 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10741 (-ARG1 + ARG0) reduces to -ARG1. */
10742 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10743 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10745 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10746 __complex__ ( x, -y ). This is not the same for SNaNs or if
10747 signed zeros are involved. */
10748 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10749 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10750 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10752 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10753 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10754 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10755 bool arg0rz
= false, arg0iz
= false;
10756 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10757 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10759 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10760 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10761 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10763 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10765 : build1 (REALPART_EXPR
, rtype
, arg1
));
10766 tree ip
= arg0i
? arg0i
10767 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10768 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10770 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10772 tree rp
= arg0r
? arg0r
10773 : build1 (REALPART_EXPR
, rtype
, arg0
);
10774 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10776 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10777 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10782 /* Fold &x - &x. This can happen from &x.foo - &x.
10783 This is unsafe for certain floats even in non-IEEE formats.
10784 In IEEE, it is unsafe because it does wrong for NaNs.
10785 Also note that operand_equal_p is always false if an operand
10788 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10789 && operand_equal_p (arg0
, arg1
, 0))
10790 return build_zero_cst (type
);
10792 /* A - B -> A + (-B) if B is easily negatable. */
10793 if (negate_expr_p (arg1
)
10794 && ((FLOAT_TYPE_P (type
)
10795 /* Avoid this transformation if B is a positive REAL_CST. */
10796 && (TREE_CODE (arg1
) != REAL_CST
10797 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10798 || INTEGRAL_TYPE_P (type
)))
10799 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10800 fold_convert_loc (loc
, type
, arg0
),
10801 fold_convert_loc (loc
, type
,
10802 negate_expr (arg1
)));
10804 /* Try folding difference of addresses. */
10806 HOST_WIDE_INT diff
;
10808 if ((TREE_CODE (arg0
) == ADDR_EXPR
10809 || TREE_CODE (arg1
) == ADDR_EXPR
)
10810 && ptr_difference_const (arg0
, arg1
, &diff
))
10811 return build_int_cst_type (type
, diff
);
10814 /* Fold &a[i] - &a[j] to i-j. */
10815 if (TREE_CODE (arg0
) == ADDR_EXPR
10816 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10817 && TREE_CODE (arg1
) == ADDR_EXPR
10818 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10820 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10821 TREE_OPERAND (arg0
, 0),
10822 TREE_OPERAND (arg1
, 0));
10827 if (FLOAT_TYPE_P (type
)
10828 && flag_unsafe_math_optimizations
10829 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10830 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10831 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10834 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10835 one. Make sure the type is not saturating and has the signedness of
10836 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10837 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10838 if ((TREE_CODE (arg0
) == MULT_EXPR
10839 || TREE_CODE (arg1
) == MULT_EXPR
)
10840 && !TYPE_SATURATING (type
)
10841 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10842 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10843 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10845 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10853 /* (-A) * (-B) -> A * B */
10854 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10855 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10856 fold_convert_loc (loc
, type
,
10857 TREE_OPERAND (arg0
, 0)),
10858 fold_convert_loc (loc
, type
,
10859 negate_expr (arg1
)));
10860 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10861 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10862 fold_convert_loc (loc
, type
,
10863 negate_expr (arg0
)),
10864 fold_convert_loc (loc
, type
,
10865 TREE_OPERAND (arg1
, 0)));
10867 if (! FLOAT_TYPE_P (type
))
10869 if (integer_zerop (arg1
))
10870 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10871 if (integer_onep (arg1
))
10872 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10873 /* Transform x * -1 into -x. Make sure to do the negation
10874 on the original operand with conversions not stripped
10875 because we can only strip non-sign-changing conversions. */
10876 if (integer_minus_onep (arg1
))
10877 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10878 /* Transform x * -C into -x * C if x is easily negatable. */
10879 if (TREE_CODE (arg1
) == INTEGER_CST
10880 && tree_int_cst_sgn (arg1
) == -1
10881 && negate_expr_p (arg0
)
10882 && (tem
= negate_expr (arg1
)) != arg1
10883 && !TREE_OVERFLOW (tem
))
10884 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10885 fold_convert_loc (loc
, type
,
10886 negate_expr (arg0
)),
10889 /* (a * (1 << b)) is (a << b) */
10890 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10891 && integer_onep (TREE_OPERAND (arg1
, 0)))
10892 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10893 TREE_OPERAND (arg1
, 1));
10894 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10895 && integer_onep (TREE_OPERAND (arg0
, 0)))
10896 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10897 TREE_OPERAND (arg0
, 1));
10899 /* (A + A) * C -> A * 2 * C */
10900 if (TREE_CODE (arg0
) == PLUS_EXPR
10901 && TREE_CODE (arg1
) == INTEGER_CST
10902 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10903 TREE_OPERAND (arg0
, 1), 0))
10904 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10905 omit_one_operand_loc (loc
, type
,
10906 TREE_OPERAND (arg0
, 0),
10907 TREE_OPERAND (arg0
, 1)),
10908 fold_build2_loc (loc
, MULT_EXPR
, type
,
10909 build_int_cst (type
, 2) , arg1
));
10911 strict_overflow_p
= false;
10912 if (TREE_CODE (arg1
) == INTEGER_CST
10913 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10914 &strict_overflow_p
)))
10916 if (strict_overflow_p
)
10917 fold_overflow_warning (("assuming signed overflow does not "
10918 "occur when simplifying "
10920 WARN_STRICT_OVERFLOW_MISC
);
10921 return fold_convert_loc (loc
, type
, tem
);
10924 /* Optimize z * conj(z) for integer complex numbers. */
10925 if (TREE_CODE (arg0
) == CONJ_EXPR
10926 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10927 return fold_mult_zconjz (loc
, type
, arg1
);
10928 if (TREE_CODE (arg1
) == CONJ_EXPR
10929 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10930 return fold_mult_zconjz (loc
, type
, arg0
);
10934 /* Maybe fold x * 0 to 0. The expressions aren't the same
10935 when x is NaN, since x * 0 is also NaN. Nor are they the
10936 same in modes with signed zeros, since multiplying a
10937 negative value by 0 gives -0, not +0. */
10938 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10939 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10940 && real_zerop (arg1
))
10941 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10942 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10943 Likewise for complex arithmetic with signed zeros. */
10944 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10945 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10946 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10947 && real_onep (arg1
))
10948 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10950 /* Transform x * -1.0 into -x. */
10951 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10952 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10953 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10954 && real_minus_onep (arg1
))
10955 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10957 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10958 the result for floating point types due to rounding so it is applied
10959 only if -fassociative-math was specify. */
10960 if (flag_associative_math
10961 && TREE_CODE (arg0
) == RDIV_EXPR
10962 && TREE_CODE (arg1
) == REAL_CST
10963 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10965 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10968 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10969 TREE_OPERAND (arg0
, 1));
10972 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10973 if (operand_equal_p (arg0
, arg1
, 0))
10975 tree tem
= fold_strip_sign_ops (arg0
);
10976 if (tem
!= NULL_TREE
)
10978 tem
= fold_convert_loc (loc
, type
, tem
);
10979 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10983 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10984 This is not the same for NaNs or if signed zeros are
10986 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10987 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10988 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10989 && TREE_CODE (arg1
) == COMPLEX_CST
10990 && real_zerop (TREE_REALPART (arg1
)))
10992 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10993 if (real_onep (TREE_IMAGPART (arg1
)))
10995 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10996 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10998 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10999 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11001 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11002 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11003 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11007 /* Optimize z * conj(z) for floating point complex numbers.
11008 Guarded by flag_unsafe_math_optimizations as non-finite
11009 imaginary components don't produce scalar results. */
11010 if (flag_unsafe_math_optimizations
11011 && TREE_CODE (arg0
) == CONJ_EXPR
11012 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11013 return fold_mult_zconjz (loc
, type
, arg1
);
11014 if (flag_unsafe_math_optimizations
11015 && TREE_CODE (arg1
) == CONJ_EXPR
11016 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11017 return fold_mult_zconjz (loc
, type
, arg0
);
11019 if (flag_unsafe_math_optimizations
)
11021 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11022 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11024 /* Optimizations of root(...)*root(...). */
11025 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11028 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11029 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11031 /* Optimize sqrt(x)*sqrt(x) as x. */
11032 if (BUILTIN_SQRT_P (fcode0
)
11033 && operand_equal_p (arg00
, arg10
, 0)
11034 && ! HONOR_SNANS (TYPE_MODE (type
)))
11037 /* Optimize root(x)*root(y) as root(x*y). */
11038 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11039 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11040 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11043 /* Optimize expN(x)*expN(y) as expN(x+y). */
11044 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11046 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11047 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11048 CALL_EXPR_ARG (arg0
, 0),
11049 CALL_EXPR_ARG (arg1
, 0));
11050 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11053 /* Optimizations of pow(...)*pow(...). */
11054 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11055 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11056 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11058 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11059 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11060 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11061 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11063 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11064 if (operand_equal_p (arg01
, arg11
, 0))
11066 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11067 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11069 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11072 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11073 if (operand_equal_p (arg00
, arg10
, 0))
11075 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11076 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11078 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11082 /* Optimize tan(x)*cos(x) as sin(x). */
11083 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11084 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11085 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11086 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11087 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11088 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11089 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11090 CALL_EXPR_ARG (arg1
, 0), 0))
11092 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11094 if (sinfn
!= NULL_TREE
)
11095 return build_call_expr_loc (loc
, sinfn
, 1,
11096 CALL_EXPR_ARG (arg0
, 0));
11099 /* Optimize x*pow(x,c) as pow(x,c+1). */
11100 if (fcode1
== BUILT_IN_POW
11101 || fcode1
== BUILT_IN_POWF
11102 || fcode1
== BUILT_IN_POWL
)
11104 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11105 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11106 if (TREE_CODE (arg11
) == REAL_CST
11107 && !TREE_OVERFLOW (arg11
)
11108 && operand_equal_p (arg0
, arg10
, 0))
11110 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11114 c
= TREE_REAL_CST (arg11
);
11115 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11116 arg
= build_real (type
, c
);
11117 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11121 /* Optimize pow(x,c)*x as pow(x,c+1). */
11122 if (fcode0
== BUILT_IN_POW
11123 || fcode0
== BUILT_IN_POWF
11124 || fcode0
== BUILT_IN_POWL
)
11126 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11127 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11128 if (TREE_CODE (arg01
) == REAL_CST
11129 && !TREE_OVERFLOW (arg01
)
11130 && operand_equal_p (arg1
, arg00
, 0))
11132 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11136 c
= TREE_REAL_CST (arg01
);
11137 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11138 arg
= build_real (type
, c
);
11139 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11143 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11144 if (!in_gimple_form
11146 && operand_equal_p (arg0
, arg1
, 0))
11148 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11152 tree arg
= build_real (type
, dconst2
);
11153 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11162 if (integer_all_onesp (arg1
))
11163 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11164 if (integer_zerop (arg1
))
11165 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11166 if (operand_equal_p (arg0
, arg1
, 0))
11167 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11169 /* ~X | X is -1. */
11170 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11171 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11173 t1
= build_zero_cst (type
);
11174 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11175 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11178 /* X | ~X is -1. */
11179 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11180 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11182 t1
= build_zero_cst (type
);
11183 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11184 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11187 /* Canonicalize (X & C1) | C2. */
11188 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11189 && TREE_CODE (arg1
) == INTEGER_CST
11190 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11192 double_int c1
, c2
, c3
, msk
;
11193 int width
= TYPE_PRECISION (type
), w
;
11194 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11195 c2
= tree_to_double_int (arg1
);
11197 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11198 if ((c1
& c2
) == c1
)
11199 return omit_one_operand_loc (loc
, type
, arg1
,
11200 TREE_OPERAND (arg0
, 0));
11202 msk
= double_int::mask (width
);
11204 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11205 if (msk
.and_not (c1
| c2
).is_zero ())
11206 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11207 TREE_OPERAND (arg0
, 0), arg1
);
11209 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11210 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11211 mode which allows further optimizations. */
11214 c3
= c1
.and_not (c2
);
11215 for (w
= BITS_PER_UNIT
;
11216 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11219 unsigned HOST_WIDE_INT mask
11220 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11221 if (((c1
.low
| c2
.low
) & mask
) == mask
11222 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11224 c3
= double_int::from_uhwi (mask
);
11229 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11230 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11231 TREE_OPERAND (arg0
, 0),
11232 double_int_to_tree (type
,
11237 /* (X & Y) | Y is (X, Y). */
11238 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11239 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11240 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11241 /* (X & Y) | X is (Y, X). */
11242 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11243 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11244 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11245 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11246 /* X | (X & Y) is (Y, X). */
11247 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11248 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11249 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11250 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11251 /* X | (Y & X) is (Y, X). */
11252 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11253 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11254 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11255 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11257 /* (X & ~Y) | (~X & Y) is X ^ Y */
11258 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11259 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11261 tree a0
, a1
, l0
, l1
, n0
, n1
;
11263 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11264 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11266 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11267 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11269 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11270 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11272 if ((operand_equal_p (n0
, a0
, 0)
11273 && operand_equal_p (n1
, a1
, 0))
11274 || (operand_equal_p (n0
, a1
, 0)
11275 && operand_equal_p (n1
, a0
, 0)))
11276 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11279 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11280 if (t1
!= NULL_TREE
)
11283 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11285 This results in more efficient code for machines without a NAND
11286 instruction. Combine will canonicalize to the first form
11287 which will allow use of NAND instructions provided by the
11288 backend if they exist. */
11289 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11290 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11293 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11294 build2 (BIT_AND_EXPR
, type
,
11295 fold_convert_loc (loc
, type
,
11296 TREE_OPERAND (arg0
, 0)),
11297 fold_convert_loc (loc
, type
,
11298 TREE_OPERAND (arg1
, 0))));
11301 /* See if this can be simplified into a rotate first. If that
11302 is unsuccessful continue in the association code. */
11306 if (integer_zerop (arg1
))
11307 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11308 if (integer_all_onesp (arg1
))
11309 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11310 if (operand_equal_p (arg0
, arg1
, 0))
11311 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11313 /* ~X ^ X is -1. */
11314 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11315 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11317 t1
= build_zero_cst (type
);
11318 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11319 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11322 /* X ^ ~X is -1. */
11323 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11324 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11326 t1
= build_zero_cst (type
);
11327 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11328 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11331 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11332 with a constant, and the two constants have no bits in common,
11333 we should treat this as a BIT_IOR_EXPR since this may produce more
11334 simplifications. */
11335 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11336 && TREE_CODE (arg1
) == BIT_AND_EXPR
11337 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11338 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11339 && integer_zerop (const_binop (BIT_AND_EXPR
,
11340 TREE_OPERAND (arg0
, 1),
11341 TREE_OPERAND (arg1
, 1))))
11343 code
= BIT_IOR_EXPR
;
11347 /* (X | Y) ^ X -> Y & ~ X*/
11348 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11349 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11351 tree t2
= TREE_OPERAND (arg0
, 1);
11352 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11354 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11355 fold_convert_loc (loc
, type
, t2
),
11356 fold_convert_loc (loc
, type
, t1
));
11360 /* (Y | X) ^ X -> Y & ~ X*/
11361 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11362 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11364 tree t2
= TREE_OPERAND (arg0
, 0);
11365 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11367 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11368 fold_convert_loc (loc
, type
, t2
),
11369 fold_convert_loc (loc
, type
, t1
));
11373 /* X ^ (X | Y) -> Y & ~ X*/
11374 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11375 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11377 tree t2
= TREE_OPERAND (arg1
, 1);
11378 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11380 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11381 fold_convert_loc (loc
, type
, t2
),
11382 fold_convert_loc (loc
, type
, t1
));
11386 /* X ^ (Y | X) -> Y & ~ X*/
11387 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11388 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11390 tree t2
= TREE_OPERAND (arg1
, 0);
11391 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11393 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11394 fold_convert_loc (loc
, type
, t2
),
11395 fold_convert_loc (loc
, type
, t1
));
11399 /* Convert ~X ^ ~Y to X ^ Y. */
11400 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11401 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11402 return fold_build2_loc (loc
, code
, type
,
11403 fold_convert_loc (loc
, type
,
11404 TREE_OPERAND (arg0
, 0)),
11405 fold_convert_loc (loc
, type
,
11406 TREE_OPERAND (arg1
, 0)));
11408 /* Convert ~X ^ C to X ^ ~C. */
11409 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11410 && TREE_CODE (arg1
) == INTEGER_CST
)
11411 return fold_build2_loc (loc
, code
, type
,
11412 fold_convert_loc (loc
, type
,
11413 TREE_OPERAND (arg0
, 0)),
11414 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11416 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11417 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11418 && integer_onep (TREE_OPERAND (arg0
, 1))
11419 && integer_onep (arg1
))
11420 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11421 build_zero_cst (TREE_TYPE (arg0
)));
11423 /* Fold (X & Y) ^ Y as ~X & Y. */
11424 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11425 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11427 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11428 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11429 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11430 fold_convert_loc (loc
, type
, arg1
));
11432 /* Fold (X & Y) ^ X as ~Y & X. */
11433 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11434 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11435 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11437 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11438 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11439 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11440 fold_convert_loc (loc
, type
, arg1
));
11442 /* Fold X ^ (X & Y) as X & ~Y. */
11443 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11444 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11446 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11447 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11448 fold_convert_loc (loc
, type
, arg0
),
11449 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11451 /* Fold X ^ (Y & X) as ~Y & X. */
11452 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11453 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11454 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11456 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11457 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11458 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11459 fold_convert_loc (loc
, type
, arg0
));
11462 /* See if this can be simplified into a rotate first. If that
11463 is unsuccessful continue in the association code. */
11467 if (integer_all_onesp (arg1
))
11468 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11469 if (integer_zerop (arg1
))
11470 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11471 if (operand_equal_p (arg0
, arg1
, 0))
11472 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11474 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11475 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11476 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11477 || (TREE_CODE (arg0
) == EQ_EXPR
11478 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11479 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11480 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11482 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11483 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11484 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11485 || (TREE_CODE (arg1
) == EQ_EXPR
11486 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11487 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11488 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11490 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11491 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11492 && TREE_CODE (arg1
) == INTEGER_CST
11493 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11495 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11496 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11497 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11498 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11499 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11501 fold_convert_loc (loc
, type
,
11502 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11503 type
, tmp2
, tmp3
));
11506 /* (X | Y) & Y is (X, Y). */
11507 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11508 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11509 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11510 /* (X | Y) & X is (Y, X). */
11511 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11512 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11513 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11514 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11515 /* X & (X | Y) is (Y, X). */
11516 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11517 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11518 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11519 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11520 /* X & (Y | X) is (Y, X). */
11521 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11522 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11523 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11524 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11526 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11527 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11528 && integer_onep (TREE_OPERAND (arg0
, 1))
11529 && integer_onep (arg1
))
11532 tem
= TREE_OPERAND (arg0
, 0);
11533 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11534 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11536 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11537 build_zero_cst (TREE_TYPE (tem
)));
11539 /* Fold ~X & 1 as (X & 1) == 0. */
11540 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11541 && integer_onep (arg1
))
11544 tem
= TREE_OPERAND (arg0
, 0);
11545 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11546 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11548 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11549 build_zero_cst (TREE_TYPE (tem
)));
11551 /* Fold !X & 1 as X == 0. */
11552 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11553 && integer_onep (arg1
))
11555 tem
= TREE_OPERAND (arg0
, 0);
11556 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11557 build_zero_cst (TREE_TYPE (tem
)));
11560 /* Fold (X ^ Y) & Y as ~X & Y. */
11561 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11562 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11564 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11565 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11566 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11567 fold_convert_loc (loc
, type
, arg1
));
11569 /* Fold (X ^ Y) & X as ~Y & X. */
11570 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11571 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11572 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11574 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11575 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11576 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11577 fold_convert_loc (loc
, type
, arg1
));
11579 /* Fold X & (X ^ Y) as X & ~Y. */
11580 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11581 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11583 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11584 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11585 fold_convert_loc (loc
, type
, arg0
),
11586 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11588 /* Fold X & (Y ^ X) as ~Y & X. */
11589 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11590 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11591 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11593 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11594 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11595 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11596 fold_convert_loc (loc
, type
, arg0
));
11599 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11600 multiple of 1 << CST. */
11601 if (TREE_CODE (arg1
) == INTEGER_CST
)
11603 double_int cst1
= tree_to_double_int (arg1
);
11604 double_int ncst1
= (-cst1
).ext(TYPE_PRECISION (TREE_TYPE (arg1
)),
11605 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11606 if ((cst1
& ncst1
) == ncst1
11607 && multiple_of_p (type
, arg0
,
11608 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11609 return fold_convert_loc (loc
, type
, arg0
);
11612 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11614 if (TREE_CODE (arg1
) == INTEGER_CST
11615 && TREE_CODE (arg0
) == MULT_EXPR
11616 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11619 = tree_to_double_int (TREE_OPERAND (arg0
, 1)).trailing_zeros ();
11622 double_int arg1mask
, masked
;
11623 arg1mask
= ~double_int::mask (arg1tz
);
11624 arg1mask
= arg1mask
.ext (TYPE_PRECISION (type
),
11625 TYPE_UNSIGNED (type
));
11626 masked
= arg1mask
& tree_to_double_int (arg1
);
11627 if (masked
.is_zero ())
11628 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11630 else if (masked
!= tree_to_double_int (arg1
))
11631 return fold_build2_loc (loc
, code
, type
, op0
,
11632 double_int_to_tree (type
, masked
));
11636 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11637 ((A & N) + B) & M -> (A + B) & M
11638 Similarly if (N & M) == 0,
11639 ((A | N) + B) & M -> (A + B) & M
11640 and for - instead of + (or unary - instead of +)
11641 and/or ^ instead of |.
11642 If B is constant and (B & M) == 0, fold into A & M. */
11643 if (host_integerp (arg1
, 1))
11645 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11646 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11647 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11648 && (TREE_CODE (arg0
) == PLUS_EXPR
11649 || TREE_CODE (arg0
) == MINUS_EXPR
11650 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11651 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11652 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11656 unsigned HOST_WIDE_INT cst0
;
11658 /* Now we know that arg0 is (C + D) or (C - D) or
11659 -C and arg1 (M) is == (1LL << cst) - 1.
11660 Store C into PMOP[0] and D into PMOP[1]. */
11661 pmop
[0] = TREE_OPERAND (arg0
, 0);
11663 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11665 pmop
[1] = TREE_OPERAND (arg0
, 1);
11669 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11670 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11674 for (; which
>= 0; which
--)
11675 switch (TREE_CODE (pmop
[which
]))
11680 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11683 /* tree_low_cst not used, because we don't care about
11685 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11687 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11692 else if (cst0
!= 0)
11694 /* If C or D is of the form (A & N) where
11695 (N & M) == M, or of the form (A | N) or
11696 (A ^ N) where (N & M) == 0, replace it with A. */
11697 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11700 /* If C or D is a N where (N & M) == 0, it can be
11701 omitted (assumed 0). */
11702 if ((TREE_CODE (arg0
) == PLUS_EXPR
11703 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11704 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11705 pmop
[which
] = NULL
;
11711 /* Only build anything new if we optimized one or both arguments
11713 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11714 || (TREE_CODE (arg0
) != NEGATE_EXPR
11715 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11717 tree utype
= TREE_TYPE (arg0
);
11718 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11720 /* Perform the operations in a type that has defined
11721 overflow behavior. */
11722 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11723 if (pmop
[0] != NULL
)
11724 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11725 if (pmop
[1] != NULL
)
11726 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11729 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11730 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11731 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11733 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11734 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11736 else if (pmop
[0] != NULL
)
11738 else if (pmop
[1] != NULL
)
11741 return build_int_cst (type
, 0);
11743 else if (pmop
[0] == NULL
)
11744 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11746 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11748 /* TEM is now the new binary +, - or unary - replacement. */
11749 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11750 fold_convert_loc (loc
, utype
, arg1
));
11751 return fold_convert_loc (loc
, type
, tem
);
11756 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11757 if (t1
!= NULL_TREE
)
11759 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11760 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11761 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11763 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11765 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11766 && (~TREE_INT_CST_LOW (arg1
)
11767 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11769 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11772 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11774 This results in more efficient code for machines without a NOR
11775 instruction. Combine will canonicalize to the first form
11776 which will allow use of NOR instructions provided by the
11777 backend if they exist. */
11778 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11779 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11781 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11782 build2 (BIT_IOR_EXPR
, type
,
11783 fold_convert_loc (loc
, type
,
11784 TREE_OPERAND (arg0
, 0)),
11785 fold_convert_loc (loc
, type
,
11786 TREE_OPERAND (arg1
, 0))));
11789 /* If arg0 is derived from the address of an object or function, we may
11790 be able to fold this expression using the object or function's
11792 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11794 unsigned HOST_WIDE_INT modulus
, residue
;
11795 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11797 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11798 integer_onep (arg1
));
11800 /* This works because modulus is a power of 2. If this weren't the
11801 case, we'd have to replace it by its greatest power-of-2
11802 divisor: modulus & -modulus. */
11804 return build_int_cst (type
, residue
& low
);
11807 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11808 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11809 if the new mask might be further optimized. */
11810 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11811 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11812 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11813 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11814 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11815 < TYPE_PRECISION (TREE_TYPE (arg0
))
11816 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11817 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11819 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11820 unsigned HOST_WIDE_INT mask
11821 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11822 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11823 tree shift_type
= TREE_TYPE (arg0
);
11825 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11826 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11827 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11828 && TYPE_PRECISION (TREE_TYPE (arg0
))
11829 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11831 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11832 tree arg00
= TREE_OPERAND (arg0
, 0);
11833 /* See if more bits can be proven as zero because of
11835 if (TREE_CODE (arg00
) == NOP_EXPR
11836 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11838 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11839 if (TYPE_PRECISION (inner_type
)
11840 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11841 && TYPE_PRECISION (inner_type
) < prec
)
11843 prec
= TYPE_PRECISION (inner_type
);
11844 /* See if we can shorten the right shift. */
11846 shift_type
= inner_type
;
11849 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11850 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11851 zerobits
<<= prec
- shiftc
;
11852 /* For arithmetic shift if sign bit could be set, zerobits
11853 can contain actually sign bits, so no transformation is
11854 possible, unless MASK masks them all away. In that
11855 case the shift needs to be converted into logical shift. */
11856 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11857 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11859 if ((mask
& zerobits
) == 0)
11860 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11866 /* ((X << 16) & 0xff00) is (X, 0). */
11867 if ((mask
& zerobits
) == mask
)
11868 return omit_one_operand_loc (loc
, type
,
11869 build_int_cst (type
, 0), arg0
);
11871 newmask
= mask
| zerobits
;
11872 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11874 /* Only do the transformation if NEWMASK is some integer
11876 for (prec
= BITS_PER_UNIT
;
11877 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11878 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11880 if (prec
< HOST_BITS_PER_WIDE_INT
11881 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11885 if (shift_type
!= TREE_TYPE (arg0
))
11887 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11888 fold_convert_loc (loc
, shift_type
,
11889 TREE_OPERAND (arg0
, 0)),
11890 TREE_OPERAND (arg0
, 1));
11891 tem
= fold_convert_loc (loc
, type
, tem
);
11895 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11896 if (!tree_int_cst_equal (newmaskt
, arg1
))
11897 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11905 /* Don't touch a floating-point divide by zero unless the mode
11906 of the constant can represent infinity. */
11907 if (TREE_CODE (arg1
) == REAL_CST
11908 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11909 && real_zerop (arg1
))
11912 /* Optimize A / A to 1.0 if we don't care about
11913 NaNs or Infinities. Skip the transformation
11914 for non-real operands. */
11915 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11916 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11917 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11918 && operand_equal_p (arg0
, arg1
, 0))
11920 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11922 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11925 /* The complex version of the above A / A optimization. */
11926 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11927 && operand_equal_p (arg0
, arg1
, 0))
11929 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11930 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11931 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11933 tree r
= build_real (elem_type
, dconst1
);
11934 /* omit_two_operands will call fold_convert for us. */
11935 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11939 /* (-A) / (-B) -> A / B */
11940 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11941 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11942 TREE_OPERAND (arg0
, 0),
11943 negate_expr (arg1
));
11944 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11945 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11946 negate_expr (arg0
),
11947 TREE_OPERAND (arg1
, 0));
11949 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11950 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11951 && real_onep (arg1
))
11952 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11954 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11955 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11956 && real_minus_onep (arg1
))
11957 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11958 negate_expr (arg0
)));
11960 /* If ARG1 is a constant, we can convert this to a multiply by the
11961 reciprocal. This does not have the same rounding properties,
11962 so only do this if -freciprocal-math. We can actually
11963 always safely do it if ARG1 is a power of two, but it's hard to
11964 tell if it is or not in a portable manner. */
11966 && (TREE_CODE (arg1
) == REAL_CST
11967 || (TREE_CODE (arg1
) == COMPLEX_CST
11968 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11969 || (TREE_CODE (arg1
) == VECTOR_CST
11970 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11972 if (flag_reciprocal_math
11973 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11974 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11975 /* Find the reciprocal if optimizing and the result is exact.
11976 TODO: Complex reciprocal not implemented. */
11977 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11979 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11982 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11985 /* Convert A/B/C to A/(B*C). */
11986 if (flag_reciprocal_math
11987 && TREE_CODE (arg0
) == RDIV_EXPR
)
11988 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11989 fold_build2_loc (loc
, MULT_EXPR
, type
,
11990 TREE_OPERAND (arg0
, 1), arg1
));
11992 /* Convert A/(B/C) to (A/B)*C. */
11993 if (flag_reciprocal_math
11994 && TREE_CODE (arg1
) == RDIV_EXPR
)
11995 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11996 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11997 TREE_OPERAND (arg1
, 0)),
11998 TREE_OPERAND (arg1
, 1));
12000 /* Convert C1/(X*C2) into (C1/C2)/X. */
12001 if (flag_reciprocal_math
12002 && TREE_CODE (arg1
) == MULT_EXPR
12003 && TREE_CODE (arg0
) == REAL_CST
12004 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12006 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12007 TREE_OPERAND (arg1
, 1));
12009 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12010 TREE_OPERAND (arg1
, 0));
12013 if (flag_unsafe_math_optimizations
)
12015 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12016 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12018 /* Optimize sin(x)/cos(x) as tan(x). */
12019 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12020 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12021 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12022 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12023 CALL_EXPR_ARG (arg1
, 0), 0))
12025 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12027 if (tanfn
!= NULL_TREE
)
12028 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12031 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12032 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12033 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12034 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12035 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12036 CALL_EXPR_ARG (arg1
, 0), 0))
12038 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12040 if (tanfn
!= NULL_TREE
)
12042 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12043 CALL_EXPR_ARG (arg0
, 0));
12044 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12045 build_real (type
, dconst1
), tmp
);
12049 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12050 NaNs or Infinities. */
12051 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12052 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12053 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12055 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12056 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12058 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12059 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12060 && operand_equal_p (arg00
, arg01
, 0))
12062 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12064 if (cosfn
!= NULL_TREE
)
12065 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12069 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12070 NaNs or Infinities. */
12071 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12072 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12073 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12075 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12076 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12078 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12079 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12080 && operand_equal_p (arg00
, arg01
, 0))
12082 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12084 if (cosfn
!= NULL_TREE
)
12086 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12087 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12088 build_real (type
, dconst1
),
12094 /* Optimize pow(x,c)/x as pow(x,c-1). */
12095 if (fcode0
== BUILT_IN_POW
12096 || fcode0
== BUILT_IN_POWF
12097 || fcode0
== BUILT_IN_POWL
)
12099 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12100 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12101 if (TREE_CODE (arg01
) == REAL_CST
12102 && !TREE_OVERFLOW (arg01
)
12103 && operand_equal_p (arg1
, arg00
, 0))
12105 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12109 c
= TREE_REAL_CST (arg01
);
12110 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12111 arg
= build_real (type
, c
);
12112 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12116 /* Optimize a/root(b/c) into a*root(c/b). */
12117 if (BUILTIN_ROOT_P (fcode1
))
12119 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12121 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12123 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12124 tree b
= TREE_OPERAND (rootarg
, 0);
12125 tree c
= TREE_OPERAND (rootarg
, 1);
12127 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12129 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12130 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12134 /* Optimize x/expN(y) into x*expN(-y). */
12135 if (BUILTIN_EXPONENT_P (fcode1
))
12137 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12138 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12139 arg1
= build_call_expr_loc (loc
,
12141 fold_convert_loc (loc
, type
, arg
));
12142 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12145 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12146 if (fcode1
== BUILT_IN_POW
12147 || fcode1
== BUILT_IN_POWF
12148 || fcode1
== BUILT_IN_POWL
)
12150 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12151 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12152 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12153 tree neg11
= fold_convert_loc (loc
, type
,
12154 negate_expr (arg11
));
12155 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12156 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12161 case TRUNC_DIV_EXPR
:
12162 /* Optimize (X & (-A)) / A where A is a power of 2,
12164 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12165 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12166 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12168 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12169 arg1
, TREE_OPERAND (arg0
, 1));
12170 if (sum
&& integer_zerop (sum
)) {
12171 unsigned long pow2
;
12173 if (TREE_INT_CST_LOW (arg1
))
12174 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12176 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12177 + HOST_BITS_PER_WIDE_INT
;
12179 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12180 TREE_OPERAND (arg0
, 0),
12181 build_int_cst (integer_type_node
, pow2
));
12187 case FLOOR_DIV_EXPR
:
12188 /* Simplify A / (B << N) where A and B are positive and B is
12189 a power of 2, to A >> (N + log2(B)). */
12190 strict_overflow_p
= false;
12191 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12192 && (TYPE_UNSIGNED (type
)
12193 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12195 tree sval
= TREE_OPERAND (arg1
, 0);
12196 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12198 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12199 unsigned long pow2
;
12201 if (TREE_INT_CST_LOW (sval
))
12202 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12204 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12205 + HOST_BITS_PER_WIDE_INT
;
12207 if (strict_overflow_p
)
12208 fold_overflow_warning (("assuming signed overflow does not "
12209 "occur when simplifying A / (B << N)"),
12210 WARN_STRICT_OVERFLOW_MISC
);
12212 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12214 build_int_cst (TREE_TYPE (sh_cnt
),
12216 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12217 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12221 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12222 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12223 if (INTEGRAL_TYPE_P (type
)
12224 && TYPE_UNSIGNED (type
)
12225 && code
== FLOOR_DIV_EXPR
)
12226 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12230 case ROUND_DIV_EXPR
:
12231 case CEIL_DIV_EXPR
:
12232 case EXACT_DIV_EXPR
:
12233 if (integer_onep (arg1
))
12234 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12235 if (integer_zerop (arg1
))
12237 /* X / -1 is -X. */
12238 if (!TYPE_UNSIGNED (type
)
12239 && TREE_CODE (arg1
) == INTEGER_CST
12240 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12241 && TREE_INT_CST_HIGH (arg1
) == -1)
12242 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12244 /* Convert -A / -B to A / B when the type is signed and overflow is
12246 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12247 && TREE_CODE (arg0
) == NEGATE_EXPR
12248 && negate_expr_p (arg1
))
12250 if (INTEGRAL_TYPE_P (type
))
12251 fold_overflow_warning (("assuming signed overflow does not occur "
12252 "when distributing negation across "
12254 WARN_STRICT_OVERFLOW_MISC
);
12255 return fold_build2_loc (loc
, code
, type
,
12256 fold_convert_loc (loc
, type
,
12257 TREE_OPERAND (arg0
, 0)),
12258 fold_convert_loc (loc
, type
,
12259 negate_expr (arg1
)));
12261 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12262 && TREE_CODE (arg1
) == NEGATE_EXPR
12263 && negate_expr_p (arg0
))
12265 if (INTEGRAL_TYPE_P (type
))
12266 fold_overflow_warning (("assuming signed overflow does not occur "
12267 "when distributing negation across "
12269 WARN_STRICT_OVERFLOW_MISC
);
12270 return fold_build2_loc (loc
, code
, type
,
12271 fold_convert_loc (loc
, type
,
12272 negate_expr (arg0
)),
12273 fold_convert_loc (loc
, type
,
12274 TREE_OPERAND (arg1
, 0)));
12277 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12278 operation, EXACT_DIV_EXPR.
12280 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12281 At one time others generated faster code, it's not clear if they do
12282 after the last round to changes to the DIV code in expmed.c. */
12283 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12284 && multiple_of_p (type
, arg0
, arg1
))
12285 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12287 strict_overflow_p
= false;
12288 if (TREE_CODE (arg1
) == INTEGER_CST
12289 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12290 &strict_overflow_p
)))
12292 if (strict_overflow_p
)
12293 fold_overflow_warning (("assuming signed overflow does not occur "
12294 "when simplifying division"),
12295 WARN_STRICT_OVERFLOW_MISC
);
12296 return fold_convert_loc (loc
, type
, tem
);
12301 case CEIL_MOD_EXPR
:
12302 case FLOOR_MOD_EXPR
:
12303 case ROUND_MOD_EXPR
:
12304 case TRUNC_MOD_EXPR
:
12305 /* X % 1 is always zero, but be sure to preserve any side
12307 if (integer_onep (arg1
))
12308 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12310 /* X % 0, return X % 0 unchanged so that we can get the
12311 proper warnings and errors. */
12312 if (integer_zerop (arg1
))
12315 /* 0 % X is always zero, but be sure to preserve any side
12316 effects in X. Place this after checking for X == 0. */
12317 if (integer_zerop (arg0
))
12318 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12320 /* X % -1 is zero. */
12321 if (!TYPE_UNSIGNED (type
)
12322 && TREE_CODE (arg1
) == INTEGER_CST
12323 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12324 && TREE_INT_CST_HIGH (arg1
) == -1)
12325 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12327 /* X % -C is the same as X % C. */
12328 if (code
== TRUNC_MOD_EXPR
12329 && !TYPE_UNSIGNED (type
)
12330 && TREE_CODE (arg1
) == INTEGER_CST
12331 && !TREE_OVERFLOW (arg1
)
12332 && TREE_INT_CST_HIGH (arg1
) < 0
12333 && !TYPE_OVERFLOW_TRAPS (type
)
12334 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12335 && !sign_bit_p (arg1
, arg1
))
12336 return fold_build2_loc (loc
, code
, type
,
12337 fold_convert_loc (loc
, type
, arg0
),
12338 fold_convert_loc (loc
, type
,
12339 negate_expr (arg1
)));
12341 /* X % -Y is the same as X % Y. */
12342 if (code
== TRUNC_MOD_EXPR
12343 && !TYPE_UNSIGNED (type
)
12344 && TREE_CODE (arg1
) == NEGATE_EXPR
12345 && !TYPE_OVERFLOW_TRAPS (type
))
12346 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12347 fold_convert_loc (loc
, type
,
12348 TREE_OPERAND (arg1
, 0)));
12350 strict_overflow_p
= false;
12351 if (TREE_CODE (arg1
) == INTEGER_CST
12352 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12353 &strict_overflow_p
)))
12355 if (strict_overflow_p
)
12356 fold_overflow_warning (("assuming signed overflow does not occur "
12357 "when simplifying modulus"),
12358 WARN_STRICT_OVERFLOW_MISC
);
12359 return fold_convert_loc (loc
, type
, tem
);
12362 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12363 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12364 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12365 && (TYPE_UNSIGNED (type
)
12366 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12369 /* Also optimize A % (C << N) where C is a power of 2,
12370 to A & ((C << N) - 1). */
12371 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12372 c
= TREE_OPERAND (arg1
, 0);
12374 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12377 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12378 build_int_cst (TREE_TYPE (arg1
), 1));
12379 if (strict_overflow_p
)
12380 fold_overflow_warning (("assuming signed overflow does not "
12381 "occur when simplifying "
12382 "X % (power of two)"),
12383 WARN_STRICT_OVERFLOW_MISC
);
12384 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12385 fold_convert_loc (loc
, type
, arg0
),
12386 fold_convert_loc (loc
, type
, mask
));
12394 if (integer_all_onesp (arg0
))
12395 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12399 /* Optimize -1 >> x for arithmetic right shifts. */
12400 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12401 && tree_expr_nonnegative_p (arg1
))
12402 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12403 /* ... fall through ... */
12407 if (integer_zerop (arg1
))
12408 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12409 if (integer_zerop (arg0
))
12410 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12412 /* Since negative shift count is not well-defined,
12413 don't try to compute it in the compiler. */
12414 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12417 prec
= element_precision (type
);
12419 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12420 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12421 && TREE_INT_CST_LOW (arg1
) < prec
12422 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12423 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12425 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12426 + TREE_INT_CST_LOW (arg1
));
12428 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12429 being well defined. */
12432 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12434 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12435 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12436 TREE_OPERAND (arg0
, 0));
12441 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12442 build_int_cst (TREE_TYPE (arg1
), low
));
12445 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12446 into x & ((unsigned)-1 >> c) for unsigned types. */
12447 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12448 || (TYPE_UNSIGNED (type
)
12449 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12450 && host_integerp (arg1
, false)
12451 && TREE_INT_CST_LOW (arg1
) < prec
12452 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12453 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12455 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12456 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12462 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12464 lshift
= build_minus_one_cst (type
);
12465 lshift
= const_binop (code
, lshift
, arg1
);
12467 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12471 /* Rewrite an LROTATE_EXPR by a constant into an
12472 RROTATE_EXPR by a new constant. */
12473 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12475 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12476 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12477 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12480 /* If we have a rotate of a bit operation with the rotate count and
12481 the second operand of the bit operation both constant,
12482 permute the two operations. */
12483 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12484 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12485 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12486 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12487 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12488 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12489 fold_build2_loc (loc
, code
, type
,
12490 TREE_OPERAND (arg0
, 0), arg1
),
12491 fold_build2_loc (loc
, code
, type
,
12492 TREE_OPERAND (arg0
, 1), arg1
));
12494 /* Two consecutive rotates adding up to the precision of the
12495 type can be ignored. */
12496 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12497 && TREE_CODE (arg0
) == RROTATE_EXPR
12498 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12499 && TREE_INT_CST_HIGH (arg1
) == 0
12500 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12501 && ((TREE_INT_CST_LOW (arg1
)
12502 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12504 return TREE_OPERAND (arg0
, 0);
12506 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12507 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12508 if the latter can be further optimized. */
12509 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12510 && TREE_CODE (arg0
) == BIT_AND_EXPR
12511 && TREE_CODE (arg1
) == INTEGER_CST
12512 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12514 tree mask
= fold_build2_loc (loc
, code
, type
,
12515 fold_convert_loc (loc
, type
,
12516 TREE_OPERAND (arg0
, 1)),
12518 tree shift
= fold_build2_loc (loc
, code
, type
,
12519 fold_convert_loc (loc
, type
,
12520 TREE_OPERAND (arg0
, 0)),
12522 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12530 if (operand_equal_p (arg0
, arg1
, 0))
12531 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12532 if (INTEGRAL_TYPE_P (type
)
12533 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12534 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12535 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12541 if (operand_equal_p (arg0
, arg1
, 0))
12542 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12543 if (INTEGRAL_TYPE_P (type
)
12544 && TYPE_MAX_VALUE (type
)
12545 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12546 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12547 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12552 case TRUTH_ANDIF_EXPR
:
12553 /* Note that the operands of this must be ints
12554 and their values must be 0 or 1.
12555 ("true" is a fixed value perhaps depending on the language.) */
12556 /* If first arg is constant zero, return it. */
12557 if (integer_zerop (arg0
))
12558 return fold_convert_loc (loc
, type
, arg0
);
12559 case TRUTH_AND_EXPR
:
12560 /* If either arg is constant true, drop it. */
12561 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12562 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12563 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12564 /* Preserve sequence points. */
12565 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12566 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12567 /* If second arg is constant zero, result is zero, but first arg
12568 must be evaluated. */
12569 if (integer_zerop (arg1
))
12570 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12571 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12572 case will be handled here. */
12573 if (integer_zerop (arg0
))
12574 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12576 /* !X && X is always false. */
12577 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12578 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12579 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12580 /* X && !X is always false. */
12581 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12582 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12583 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12585 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12586 means A >= Y && A != MAX, but in this case we know that
12589 if (!TREE_SIDE_EFFECTS (arg0
)
12590 && !TREE_SIDE_EFFECTS (arg1
))
12592 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12593 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12594 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12596 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12597 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12598 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12601 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12607 case TRUTH_ORIF_EXPR
:
12608 /* Note that the operands of this must be ints
12609 and their values must be 0 or true.
12610 ("true" is a fixed value perhaps depending on the language.) */
12611 /* If first arg is constant true, return it. */
12612 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12613 return fold_convert_loc (loc
, type
, arg0
);
12614 case TRUTH_OR_EXPR
:
12615 /* If either arg is constant zero, drop it. */
12616 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12617 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12618 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12619 /* Preserve sequence points. */
12620 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12621 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12622 /* If second arg is constant true, result is true, but we must
12623 evaluate first arg. */
12624 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12625 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12626 /* Likewise for first arg, but note this only occurs here for
12628 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12629 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12631 /* !X || X is always true. */
12632 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12633 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12634 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12635 /* X || !X is always true. */
12636 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12637 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12638 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12640 /* (X && !Y) || (!X && Y) is X ^ Y */
12641 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12642 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12644 tree a0
, a1
, l0
, l1
, n0
, n1
;
12646 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12647 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12649 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12650 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12652 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12653 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12655 if ((operand_equal_p (n0
, a0
, 0)
12656 && operand_equal_p (n1
, a1
, 0))
12657 || (operand_equal_p (n0
, a1
, 0)
12658 && operand_equal_p (n1
, a0
, 0)))
12659 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12662 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12668 case TRUTH_XOR_EXPR
:
12669 /* If the second arg is constant zero, drop it. */
12670 if (integer_zerop (arg1
))
12671 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12672 /* If the second arg is constant true, this is a logical inversion. */
12673 if (integer_onep (arg1
))
12675 /* Only call invert_truthvalue if operand is a truth value. */
12676 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12677 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12679 tem
= invert_truthvalue_loc (loc
, arg0
);
12680 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12682 /* Identical arguments cancel to zero. */
12683 if (operand_equal_p (arg0
, arg1
, 0))
12684 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12686 /* !X ^ X is always true. */
12687 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12688 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12689 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12691 /* X ^ !X is always true. */
12692 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12693 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12694 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12703 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12704 if (tem
!= NULL_TREE
)
12707 /* bool_var != 0 becomes bool_var. */
12708 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12709 && code
== NE_EXPR
)
12710 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12712 /* bool_var == 1 becomes bool_var. */
12713 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12714 && code
== EQ_EXPR
)
12715 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12717 /* bool_var != 1 becomes !bool_var. */
12718 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12719 && code
== NE_EXPR
)
12720 return fold_convert_loc (loc
, type
,
12721 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12722 TREE_TYPE (arg0
), arg0
));
12724 /* bool_var == 0 becomes !bool_var. */
12725 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12726 && code
== EQ_EXPR
)
12727 return fold_convert_loc (loc
, type
,
12728 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12729 TREE_TYPE (arg0
), arg0
));
12731 /* !exp != 0 becomes !exp */
12732 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12733 && code
== NE_EXPR
)
12734 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12736 /* If this is an equality comparison of the address of two non-weak,
12737 unaliased symbols neither of which are extern (since we do not
12738 have access to attributes for externs), then we know the result. */
12739 if (TREE_CODE (arg0
) == ADDR_EXPR
12740 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12741 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12742 && ! lookup_attribute ("alias",
12743 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12744 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12745 && TREE_CODE (arg1
) == ADDR_EXPR
12746 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12747 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12748 && ! lookup_attribute ("alias",
12749 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12750 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12752 /* We know that we're looking at the address of two
12753 non-weak, unaliased, static _DECL nodes.
12755 It is both wasteful and incorrect to call operand_equal_p
12756 to compare the two ADDR_EXPR nodes. It is wasteful in that
12757 all we need to do is test pointer equality for the arguments
12758 to the two ADDR_EXPR nodes. It is incorrect to use
12759 operand_equal_p as that function is NOT equivalent to a
12760 C equality test. It can in fact return false for two
12761 objects which would test as equal using the C equality
12763 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12764 return constant_boolean_node (equal
12765 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12769 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12770 a MINUS_EXPR of a constant, we can convert it into a comparison with
12771 a revised constant as long as no overflow occurs. */
12772 if (TREE_CODE (arg1
) == INTEGER_CST
12773 && (TREE_CODE (arg0
) == PLUS_EXPR
12774 || TREE_CODE (arg0
) == MINUS_EXPR
)
12775 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12776 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12777 ? MINUS_EXPR
: PLUS_EXPR
,
12778 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12780 TREE_OPERAND (arg0
, 1)))
12781 && !TREE_OVERFLOW (tem
))
12782 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12784 /* Similarly for a NEGATE_EXPR. */
12785 if (TREE_CODE (arg0
) == NEGATE_EXPR
12786 && TREE_CODE (arg1
) == INTEGER_CST
12787 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12789 && TREE_CODE (tem
) == INTEGER_CST
12790 && !TREE_OVERFLOW (tem
))
12791 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12793 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12794 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12795 && TREE_CODE (arg1
) == INTEGER_CST
12796 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12797 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12798 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12799 fold_convert_loc (loc
,
12802 TREE_OPERAND (arg0
, 1)));
12804 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12805 if ((TREE_CODE (arg0
) == PLUS_EXPR
12806 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12807 || TREE_CODE (arg0
) == MINUS_EXPR
)
12808 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12811 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12812 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12814 tree val
= TREE_OPERAND (arg0
, 1);
12815 return omit_two_operands_loc (loc
, type
,
12816 fold_build2_loc (loc
, code
, type
,
12818 build_int_cst (TREE_TYPE (val
),
12820 TREE_OPERAND (arg0
, 0), arg1
);
12823 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12824 if (TREE_CODE (arg0
) == MINUS_EXPR
12825 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12826 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12829 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12831 return omit_two_operands_loc (loc
, type
,
12833 ? boolean_true_node
: boolean_false_node
,
12834 TREE_OPERAND (arg0
, 1), arg1
);
12837 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12838 for !=. Don't do this for ordered comparisons due to overflow. */
12839 if (TREE_CODE (arg0
) == MINUS_EXPR
12840 && integer_zerop (arg1
))
12841 return fold_build2_loc (loc
, code
, type
,
12842 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12844 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12845 if (TREE_CODE (arg0
) == ABS_EXPR
12846 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12847 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12849 /* If this is an EQ or NE comparison with zero and ARG0 is
12850 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12851 two operations, but the latter can be done in one less insn
12852 on machines that have only two-operand insns or on which a
12853 constant cannot be the first operand. */
12854 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12855 && integer_zerop (arg1
))
12857 tree arg00
= TREE_OPERAND (arg0
, 0);
12858 tree arg01
= TREE_OPERAND (arg0
, 1);
12859 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12860 && integer_onep (TREE_OPERAND (arg00
, 0)))
12862 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12863 arg01
, TREE_OPERAND (arg00
, 1));
12864 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12865 build_int_cst (TREE_TYPE (arg0
), 1));
12866 return fold_build2_loc (loc
, code
, type
,
12867 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12870 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12871 && integer_onep (TREE_OPERAND (arg01
, 0)))
12873 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12874 arg00
, TREE_OPERAND (arg01
, 1));
12875 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12876 build_int_cst (TREE_TYPE (arg0
), 1));
12877 return fold_build2_loc (loc
, code
, type
,
12878 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12883 /* If this is an NE or EQ comparison of zero against the result of a
12884 signed MOD operation whose second operand is a power of 2, make
12885 the MOD operation unsigned since it is simpler and equivalent. */
12886 if (integer_zerop (arg1
)
12887 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12888 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12889 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12890 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12891 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12892 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12894 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12895 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12896 fold_convert_loc (loc
, newtype
,
12897 TREE_OPERAND (arg0
, 0)),
12898 fold_convert_loc (loc
, newtype
,
12899 TREE_OPERAND (arg0
, 1)));
12901 return fold_build2_loc (loc
, code
, type
, newmod
,
12902 fold_convert_loc (loc
, newtype
, arg1
));
12905 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12906 C1 is a valid shift constant, and C2 is a power of two, i.e.
12908 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12909 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12910 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12912 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12913 && integer_zerop (arg1
))
12915 tree itype
= TREE_TYPE (arg0
);
12916 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12917 prec
= TYPE_PRECISION (itype
);
12919 /* Check for a valid shift count. */
12920 if (TREE_INT_CST_HIGH (arg001
) == 0
12921 && TREE_INT_CST_LOW (arg001
) < prec
)
12923 tree arg01
= TREE_OPERAND (arg0
, 1);
12924 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12925 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12926 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12927 can be rewritten as (X & (C2 << C1)) != 0. */
12928 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12930 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12931 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12932 return fold_build2_loc (loc
, code
, type
, tem
,
12933 fold_convert_loc (loc
, itype
, arg1
));
12935 /* Otherwise, for signed (arithmetic) shifts,
12936 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12937 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12938 else if (!TYPE_UNSIGNED (itype
))
12939 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12940 arg000
, build_int_cst (itype
, 0));
12941 /* Otherwise, of unsigned (logical) shifts,
12942 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12943 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12945 return omit_one_operand_loc (loc
, type
,
12946 code
== EQ_EXPR
? integer_one_node
12947 : integer_zero_node
,
12952 /* If we have (A & C) == C where C is a power of 2, convert this into
12953 (A & C) != 0. Similarly for NE_EXPR. */
12954 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12955 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12956 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12957 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12958 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12959 integer_zero_node
));
12961 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12962 bit, then fold the expression into A < 0 or A >= 0. */
12963 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12967 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12968 Similarly for NE_EXPR. */
12969 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12970 && TREE_CODE (arg1
) == INTEGER_CST
12971 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12973 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12974 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12975 TREE_OPERAND (arg0
, 1));
12977 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12978 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12980 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12981 if (integer_nonzerop (dandnotc
))
12982 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12985 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12986 Similarly for NE_EXPR. */
12987 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12988 && TREE_CODE (arg1
) == INTEGER_CST
12989 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12991 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12993 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12994 TREE_OPERAND (arg0
, 1),
12995 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12996 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12997 if (integer_nonzerop (candnotd
))
12998 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13001 /* If this is a comparison of a field, we may be able to simplify it. */
13002 if ((TREE_CODE (arg0
) == COMPONENT_REF
13003 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13004 /* Handle the constant case even without -O
13005 to make sure the warnings are given. */
13006 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13008 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13013 /* Optimize comparisons of strlen vs zero to a compare of the
13014 first character of the string vs zero. To wit,
13015 strlen(ptr) == 0 => *ptr == 0
13016 strlen(ptr) != 0 => *ptr != 0
13017 Other cases should reduce to one of these two (or a constant)
13018 due to the return value of strlen being unsigned. */
13019 if (TREE_CODE (arg0
) == CALL_EXPR
13020 && integer_zerop (arg1
))
13022 tree fndecl
= get_callee_fndecl (arg0
);
13025 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13026 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13027 && call_expr_nargs (arg0
) == 1
13028 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13030 tree iref
= build_fold_indirect_ref_loc (loc
,
13031 CALL_EXPR_ARG (arg0
, 0));
13032 return fold_build2_loc (loc
, code
, type
, iref
,
13033 build_int_cst (TREE_TYPE (iref
), 0));
13037 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13038 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13039 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13040 && integer_zerop (arg1
)
13041 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13043 tree arg00
= TREE_OPERAND (arg0
, 0);
13044 tree arg01
= TREE_OPERAND (arg0
, 1);
13045 tree itype
= TREE_TYPE (arg00
);
13046 if (TREE_INT_CST_HIGH (arg01
) == 0
13047 && TREE_INT_CST_LOW (arg01
)
13048 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
13050 if (TYPE_UNSIGNED (itype
))
13052 itype
= signed_type_for (itype
);
13053 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13055 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13056 type
, arg00
, build_zero_cst (itype
));
13060 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13061 if (integer_zerop (arg1
)
13062 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13063 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13064 TREE_OPERAND (arg0
, 1));
13066 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13067 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13068 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13069 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13070 build_zero_cst (TREE_TYPE (arg0
)));
13071 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13072 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13073 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13074 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13075 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13076 build_zero_cst (TREE_TYPE (arg0
)));
13078 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13079 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13080 && TREE_CODE (arg1
) == INTEGER_CST
13081 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13082 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13083 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13084 TREE_OPERAND (arg0
, 1), arg1
));
13086 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13087 (X & C) == 0 when C is a single bit. */
13088 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13089 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13090 && integer_zerop (arg1
)
13091 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13093 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13094 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13095 TREE_OPERAND (arg0
, 1));
13096 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13098 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13102 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13103 constant C is a power of two, i.e. a single bit. */
13104 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13105 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13106 && integer_zerop (arg1
)
13107 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13108 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13109 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13111 tree arg00
= TREE_OPERAND (arg0
, 0);
13112 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13113 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13116 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13117 when is C is a power of two, i.e. a single bit. */
13118 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13119 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13120 && integer_zerop (arg1
)
13121 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13122 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13123 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13125 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13126 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13127 arg000
, TREE_OPERAND (arg0
, 1));
13128 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13129 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13132 if (integer_zerop (arg1
)
13133 && tree_expr_nonzero_p (arg0
))
13135 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13136 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13139 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13140 if (TREE_CODE (arg0
) == NEGATE_EXPR
13141 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13142 return fold_build2_loc (loc
, code
, type
,
13143 TREE_OPERAND (arg0
, 0),
13144 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13145 TREE_OPERAND (arg1
, 0)));
13147 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13148 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13149 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13151 tree arg00
= TREE_OPERAND (arg0
, 0);
13152 tree arg01
= TREE_OPERAND (arg0
, 1);
13153 tree arg10
= TREE_OPERAND (arg1
, 0);
13154 tree arg11
= TREE_OPERAND (arg1
, 1);
13155 tree itype
= TREE_TYPE (arg0
);
13157 if (operand_equal_p (arg01
, arg11
, 0))
13158 return fold_build2_loc (loc
, code
, type
,
13159 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13160 fold_build2_loc (loc
,
13161 BIT_XOR_EXPR
, itype
,
13164 build_zero_cst (itype
));
13166 if (operand_equal_p (arg01
, arg10
, 0))
13167 return fold_build2_loc (loc
, code
, type
,
13168 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13169 fold_build2_loc (loc
,
13170 BIT_XOR_EXPR
, itype
,
13173 build_zero_cst (itype
));
13175 if (operand_equal_p (arg00
, arg11
, 0))
13176 return fold_build2_loc (loc
, code
, type
,
13177 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13178 fold_build2_loc (loc
,
13179 BIT_XOR_EXPR
, itype
,
13182 build_zero_cst (itype
));
13184 if (operand_equal_p (arg00
, arg10
, 0))
13185 return fold_build2_loc (loc
, code
, type
,
13186 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13187 fold_build2_loc (loc
,
13188 BIT_XOR_EXPR
, itype
,
13191 build_zero_cst (itype
));
13194 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13195 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13197 tree arg00
= TREE_OPERAND (arg0
, 0);
13198 tree arg01
= TREE_OPERAND (arg0
, 1);
13199 tree arg10
= TREE_OPERAND (arg1
, 0);
13200 tree arg11
= TREE_OPERAND (arg1
, 1);
13201 tree itype
= TREE_TYPE (arg0
);
13203 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13204 operand_equal_p guarantees no side-effects so we don't need
13205 to use omit_one_operand on Z. */
13206 if (operand_equal_p (arg01
, arg11
, 0))
13207 return fold_build2_loc (loc
, code
, type
, arg00
,
13208 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13210 if (operand_equal_p (arg01
, arg10
, 0))
13211 return fold_build2_loc (loc
, code
, type
, arg00
,
13212 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13214 if (operand_equal_p (arg00
, arg11
, 0))
13215 return fold_build2_loc (loc
, code
, type
, arg01
,
13216 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13218 if (operand_equal_p (arg00
, arg10
, 0))
13219 return fold_build2_loc (loc
, code
, type
, arg01
,
13220 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13223 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13224 if (TREE_CODE (arg01
) == INTEGER_CST
13225 && TREE_CODE (arg11
) == INTEGER_CST
)
13227 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13228 fold_convert_loc (loc
, itype
, arg11
));
13229 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13230 return fold_build2_loc (loc
, code
, type
, tem
,
13231 fold_convert_loc (loc
, itype
, arg10
));
13235 /* Attempt to simplify equality/inequality comparisons of complex
13236 values. Only lower the comparison if the result is known or
13237 can be simplified to a single scalar comparison. */
13238 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13239 || TREE_CODE (arg0
) == COMPLEX_CST
)
13240 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13241 || TREE_CODE (arg1
) == COMPLEX_CST
))
13243 tree real0
, imag0
, real1
, imag1
;
13246 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13248 real0
= TREE_OPERAND (arg0
, 0);
13249 imag0
= TREE_OPERAND (arg0
, 1);
13253 real0
= TREE_REALPART (arg0
);
13254 imag0
= TREE_IMAGPART (arg0
);
13257 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13259 real1
= TREE_OPERAND (arg1
, 0);
13260 imag1
= TREE_OPERAND (arg1
, 1);
13264 real1
= TREE_REALPART (arg1
);
13265 imag1
= TREE_IMAGPART (arg1
);
13268 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13269 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13271 if (integer_zerop (rcond
))
13273 if (code
== EQ_EXPR
)
13274 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13276 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13280 if (code
== NE_EXPR
)
13281 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13283 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13287 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13288 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13290 if (integer_zerop (icond
))
13292 if (code
== EQ_EXPR
)
13293 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13295 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13299 if (code
== NE_EXPR
)
13300 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13302 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13313 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13314 if (tem
!= NULL_TREE
)
13317 /* Transform comparisons of the form X +- C CMP X. */
13318 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13319 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13320 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13321 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13322 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13323 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13325 tree arg01
= TREE_OPERAND (arg0
, 1);
13326 enum tree_code code0
= TREE_CODE (arg0
);
13329 if (TREE_CODE (arg01
) == REAL_CST
)
13330 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13332 is_positive
= tree_int_cst_sgn (arg01
);
13334 /* (X - c) > X becomes false. */
13335 if (code
== GT_EXPR
13336 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13337 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13339 if (TREE_CODE (arg01
) == INTEGER_CST
13340 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13341 fold_overflow_warning (("assuming signed overflow does not "
13342 "occur when assuming that (X - c) > X "
13343 "is always false"),
13344 WARN_STRICT_OVERFLOW_ALL
);
13345 return constant_boolean_node (0, type
);
13348 /* Likewise (X + c) < X becomes false. */
13349 if (code
== LT_EXPR
13350 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13351 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13353 if (TREE_CODE (arg01
) == INTEGER_CST
13354 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13355 fold_overflow_warning (("assuming signed overflow does not "
13356 "occur when assuming that "
13357 "(X + c) < X is always false"),
13358 WARN_STRICT_OVERFLOW_ALL
);
13359 return constant_boolean_node (0, type
);
13362 /* Convert (X - c) <= X to true. */
13363 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13365 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13366 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13368 if (TREE_CODE (arg01
) == INTEGER_CST
13369 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13370 fold_overflow_warning (("assuming signed overflow does not "
13371 "occur when assuming that "
13372 "(X - c) <= X is always true"),
13373 WARN_STRICT_OVERFLOW_ALL
);
13374 return constant_boolean_node (1, type
);
13377 /* Convert (X + c) >= X to true. */
13378 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13380 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13381 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13383 if (TREE_CODE (arg01
) == INTEGER_CST
13384 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13385 fold_overflow_warning (("assuming signed overflow does not "
13386 "occur when assuming that "
13387 "(X + c) >= X is always true"),
13388 WARN_STRICT_OVERFLOW_ALL
);
13389 return constant_boolean_node (1, type
);
13392 if (TREE_CODE (arg01
) == INTEGER_CST
)
13394 /* Convert X + c > X and X - c < X to true for integers. */
13395 if (code
== GT_EXPR
13396 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13397 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13399 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13400 fold_overflow_warning (("assuming signed overflow does "
13401 "not occur when assuming that "
13402 "(X + c) > X is always true"),
13403 WARN_STRICT_OVERFLOW_ALL
);
13404 return constant_boolean_node (1, type
);
13407 if (code
== LT_EXPR
13408 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13409 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13411 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13412 fold_overflow_warning (("assuming signed overflow does "
13413 "not occur when assuming that "
13414 "(X - c) < X is always true"),
13415 WARN_STRICT_OVERFLOW_ALL
);
13416 return constant_boolean_node (1, type
);
13419 /* Convert X + c <= X and X - c >= X to false for integers. */
13420 if (code
== LE_EXPR
13421 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13422 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13424 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13425 fold_overflow_warning (("assuming signed overflow does "
13426 "not occur when assuming that "
13427 "(X + c) <= X is always false"),
13428 WARN_STRICT_OVERFLOW_ALL
);
13429 return constant_boolean_node (0, type
);
13432 if (code
== GE_EXPR
13433 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13434 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13436 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13437 fold_overflow_warning (("assuming signed overflow does "
13438 "not occur when assuming that "
13439 "(X - c) >= X is always false"),
13440 WARN_STRICT_OVERFLOW_ALL
);
13441 return constant_boolean_node (0, type
);
13446 /* Comparisons with the highest or lowest possible integer of
13447 the specified precision will have known values. */
13449 tree arg1_type
= TREE_TYPE (arg1
);
13450 unsigned int width
= TYPE_PRECISION (arg1_type
);
13452 if (TREE_CODE (arg1
) == INTEGER_CST
13453 && width
<= HOST_BITS_PER_DOUBLE_INT
13454 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13456 HOST_WIDE_INT signed_max_hi
;
13457 unsigned HOST_WIDE_INT signed_max_lo
;
13458 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13460 if (width
<= HOST_BITS_PER_WIDE_INT
)
13462 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13467 if (TYPE_UNSIGNED (arg1_type
))
13469 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13475 max_lo
= signed_max_lo
;
13476 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13482 width
-= HOST_BITS_PER_WIDE_INT
;
13483 signed_max_lo
= -1;
13484 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13489 if (TYPE_UNSIGNED (arg1_type
))
13491 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13496 max_hi
= signed_max_hi
;
13497 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13501 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13502 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13506 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13509 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13512 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13515 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13517 /* The GE_EXPR and LT_EXPR cases above are not normally
13518 reached because of previous transformations. */
13523 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13525 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13529 arg1
= const_binop (PLUS_EXPR
, arg1
,
13530 build_int_cst (TREE_TYPE (arg1
), 1));
13531 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13532 fold_convert_loc (loc
,
13533 TREE_TYPE (arg1
), arg0
),
13536 arg1
= const_binop (PLUS_EXPR
, arg1
,
13537 build_int_cst (TREE_TYPE (arg1
), 1));
13538 return fold_build2_loc (loc
, NE_EXPR
, type
,
13539 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13545 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13547 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13551 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13554 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13557 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13560 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13565 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13567 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13571 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13572 return fold_build2_loc (loc
, NE_EXPR
, type
,
13573 fold_convert_loc (loc
,
13574 TREE_TYPE (arg1
), arg0
),
13577 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13578 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13579 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13586 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13587 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13588 && TYPE_UNSIGNED (arg1_type
)
13589 /* We will flip the signedness of the comparison operator
13590 associated with the mode of arg1, so the sign bit is
13591 specified by this mode. Check that arg1 is the signed
13592 max associated with this sign bit. */
13593 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13594 /* signed_type does not work on pointer types. */
13595 && INTEGRAL_TYPE_P (arg1_type
))
13597 /* The following case also applies to X < signed_max+1
13598 and X >= signed_max+1 because previous transformations. */
13599 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13602 st
= signed_type_for (TREE_TYPE (arg1
));
13603 return fold_build2_loc (loc
,
13604 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13605 type
, fold_convert_loc (loc
, st
, arg0
),
13606 build_int_cst (st
, 0));
13612 /* If we are comparing an ABS_EXPR with a constant, we can
13613 convert all the cases into explicit comparisons, but they may
13614 well not be faster than doing the ABS and one comparison.
13615 But ABS (X) <= C is a range comparison, which becomes a subtraction
13616 and a comparison, and is probably faster. */
13617 if (code
== LE_EXPR
13618 && TREE_CODE (arg1
) == INTEGER_CST
13619 && TREE_CODE (arg0
) == ABS_EXPR
13620 && ! TREE_SIDE_EFFECTS (arg0
)
13621 && (0 != (tem
= negate_expr (arg1
)))
13622 && TREE_CODE (tem
) == INTEGER_CST
13623 && !TREE_OVERFLOW (tem
))
13624 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13625 build2 (GE_EXPR
, type
,
13626 TREE_OPERAND (arg0
, 0), tem
),
13627 build2 (LE_EXPR
, type
,
13628 TREE_OPERAND (arg0
, 0), arg1
));
13630 /* Convert ABS_EXPR<x> >= 0 to true. */
13631 strict_overflow_p
= false;
13632 if (code
== GE_EXPR
13633 && (integer_zerop (arg1
)
13634 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13635 && real_zerop (arg1
)))
13636 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13638 if (strict_overflow_p
)
13639 fold_overflow_warning (("assuming signed overflow does not occur "
13640 "when simplifying comparison of "
13641 "absolute value and zero"),
13642 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13643 return omit_one_operand_loc (loc
, type
,
13644 constant_boolean_node (true, type
),
13648 /* Convert ABS_EXPR<x> < 0 to false. */
13649 strict_overflow_p
= false;
13650 if (code
== LT_EXPR
13651 && (integer_zerop (arg1
) || real_zerop (arg1
))
13652 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13654 if (strict_overflow_p
)
13655 fold_overflow_warning (("assuming signed overflow does not occur "
13656 "when simplifying comparison of "
13657 "absolute value and zero"),
13658 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13659 return omit_one_operand_loc (loc
, type
,
13660 constant_boolean_node (false, type
),
13664 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13665 and similarly for >= into !=. */
13666 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13667 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13668 && TREE_CODE (arg1
) == LSHIFT_EXPR
13669 && integer_onep (TREE_OPERAND (arg1
, 0)))
13670 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13671 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13672 TREE_OPERAND (arg1
, 1)),
13673 build_zero_cst (TREE_TYPE (arg0
)));
13675 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13676 otherwise Y might be >= # of bits in X's type and thus e.g.
13677 (unsigned char) (1 << Y) for Y 15 might be 0.
13678 If the cast is widening, then 1 << Y should have unsigned type,
13679 otherwise if Y is number of bits in the signed shift type minus 1,
13680 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13681 31 might be 0xffffffff80000000. */
13682 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13683 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13684 && CONVERT_EXPR_P (arg1
)
13685 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13686 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13687 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13688 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13689 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13690 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13691 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13693 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13694 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13695 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13696 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13697 build_zero_cst (TREE_TYPE (arg0
)));
13702 case UNORDERED_EXPR
:
13710 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13712 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13713 if (t1
!= NULL_TREE
)
13717 /* If the first operand is NaN, the result is constant. */
13718 if (TREE_CODE (arg0
) == REAL_CST
13719 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13720 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13722 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13723 ? integer_zero_node
13724 : integer_one_node
;
13725 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13728 /* If the second operand is NaN, the result is constant. */
13729 if (TREE_CODE (arg1
) == REAL_CST
13730 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13731 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13733 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13734 ? integer_zero_node
13735 : integer_one_node
;
13736 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13739 /* Simplify unordered comparison of something with itself. */
13740 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13741 && operand_equal_p (arg0
, arg1
, 0))
13742 return constant_boolean_node (1, type
);
13744 if (code
== LTGT_EXPR
13745 && !flag_trapping_math
13746 && operand_equal_p (arg0
, arg1
, 0))
13747 return constant_boolean_node (0, type
);
13749 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13751 tree targ0
= strip_float_extensions (arg0
);
13752 tree targ1
= strip_float_extensions (arg1
);
13753 tree newtype
= TREE_TYPE (targ0
);
13755 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13756 newtype
= TREE_TYPE (targ1
);
13758 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13759 return fold_build2_loc (loc
, code
, type
,
13760 fold_convert_loc (loc
, newtype
, targ0
),
13761 fold_convert_loc (loc
, newtype
, targ1
));
13766 case COMPOUND_EXPR
:
13767 /* When pedantic, a compound expression can be neither an lvalue
13768 nor an integer constant expression. */
13769 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13771 /* Don't let (0, 0) be null pointer constant. */
13772 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13773 : fold_convert_loc (loc
, type
, arg1
);
13774 return pedantic_non_lvalue_loc (loc
, tem
);
13777 if ((TREE_CODE (arg0
) == REAL_CST
13778 && TREE_CODE (arg1
) == REAL_CST
)
13779 || (TREE_CODE (arg0
) == INTEGER_CST
13780 && TREE_CODE (arg1
) == INTEGER_CST
))
13781 return build_complex (type
, arg0
, arg1
);
13782 if (TREE_CODE (arg0
) == REALPART_EXPR
13783 && TREE_CODE (arg1
) == IMAGPART_EXPR
13784 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13785 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13786 TREE_OPERAND (arg1
, 0), 0))
13787 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13788 TREE_OPERAND (arg1
, 0));
13792 /* An ASSERT_EXPR should never be passed to fold_binary. */
13793 gcc_unreachable ();
13795 case VEC_PACK_TRUNC_EXPR
:
13796 case VEC_PACK_FIX_TRUNC_EXPR
:
13798 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13801 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13802 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13803 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13806 elts
= XALLOCAVEC (tree
, nelts
);
13807 if (!vec_cst_ctor_to_array (arg0
, elts
)
13808 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13811 for (i
= 0; i
< nelts
; i
++)
13813 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13814 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13815 TREE_TYPE (type
), elts
[i
]);
13816 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13820 return build_vector (type
, elts
);
13823 case VEC_WIDEN_MULT_LO_EXPR
:
13824 case VEC_WIDEN_MULT_HI_EXPR
:
13825 case VEC_WIDEN_MULT_EVEN_EXPR
:
13826 case VEC_WIDEN_MULT_ODD_EXPR
:
13828 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13829 unsigned int out
, ofs
, scale
;
13832 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13833 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13834 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13837 elts
= XALLOCAVEC (tree
, nelts
* 4);
13838 if (!vec_cst_ctor_to_array (arg0
, elts
)
13839 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13842 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13843 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13844 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13845 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13846 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13847 scale
= 1, ofs
= 0;
13848 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13849 scale
= 1, ofs
= 1;
13851 for (out
= 0; out
< nelts
; out
++)
13853 unsigned int in1
= (out
<< scale
) + ofs
;
13854 unsigned int in2
= in1
+ nelts
* 2;
13857 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13858 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13860 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13862 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13863 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13867 return build_vector (type
, elts
);
13872 } /* switch (code) */
13875 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13876 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13880 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13882 switch (TREE_CODE (*tp
))
13888 *walk_subtrees
= 0;
13890 /* ... fall through ... */
13897 /* Return whether the sub-tree ST contains a label which is accessible from
13898 outside the sub-tree. */
13901 contains_label_p (tree st
)
13904 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13907 /* Fold a ternary expression of code CODE and type TYPE with operands
13908 OP0, OP1, and OP2. Return the folded expression if folding is
13909 successful. Otherwise, return NULL_TREE. */
13912 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13913 tree op0
, tree op1
, tree op2
)
13916 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13917 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13919 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13920 && TREE_CODE_LENGTH (code
) == 3);
13922 /* Strip any conversions that don't change the mode. This is safe
13923 for every expression, except for a comparison expression because
13924 its signedness is derived from its operands. So, in the latter
13925 case, only strip conversions that don't change the signedness.
13927 Note that this is done as an internal manipulation within the
13928 constant folder, in order to find the simplest representation of
13929 the arguments so that their form can be studied. In any cases,
13930 the appropriate type conversions should be put back in the tree
13931 that will get out of the constant folder. */
13952 case COMPONENT_REF
:
13953 if (TREE_CODE (arg0
) == CONSTRUCTOR
13954 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13956 unsigned HOST_WIDE_INT idx
;
13958 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13965 case VEC_COND_EXPR
:
13966 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13967 so all simple results must be passed through pedantic_non_lvalue. */
13968 if (TREE_CODE (arg0
) == INTEGER_CST
)
13970 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13971 tem
= integer_zerop (arg0
) ? op2
: op1
;
13972 /* Only optimize constant conditions when the selected branch
13973 has the same type as the COND_EXPR. This avoids optimizing
13974 away "c ? x : throw", where the throw has a void type.
13975 Avoid throwing away that operand which contains label. */
13976 if ((!TREE_SIDE_EFFECTS (unused_op
)
13977 || !contains_label_p (unused_op
))
13978 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13979 || VOID_TYPE_P (type
)))
13980 return pedantic_non_lvalue_loc (loc
, tem
);
13983 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13985 if (integer_all_onesp (arg0
))
13986 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
13987 if (integer_zerop (arg0
))
13988 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
13990 if ((TREE_CODE (arg1
) == VECTOR_CST
13991 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13992 && (TREE_CODE (arg2
) == VECTOR_CST
13993 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13995 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13996 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13997 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13998 for (i
= 0; i
< nelts
; i
++)
14000 tree val
= VECTOR_CST_ELT (arg0
, i
);
14001 if (integer_all_onesp (val
))
14003 else if (integer_zerop (val
))
14004 sel
[i
] = nelts
+ i
;
14005 else /* Currently unreachable. */
14008 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14009 if (t
!= NULL_TREE
)
14014 if (operand_equal_p (arg1
, op2
, 0))
14015 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14017 /* If we have A op B ? A : C, we may be able to convert this to a
14018 simpler expression, depending on the operation and the values
14019 of B and C. Signed zeros prevent all of these transformations,
14020 for reasons given above each one.
14022 Also try swapping the arguments and inverting the conditional. */
14023 if (COMPARISON_CLASS_P (arg0
)
14024 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14025 arg1
, TREE_OPERAND (arg0
, 1))
14026 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14028 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14033 if (COMPARISON_CLASS_P (arg0
)
14034 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14036 TREE_OPERAND (arg0
, 1))
14037 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14039 location_t loc0
= expr_location_or (arg0
, loc
);
14040 tem
= fold_truth_not_expr (loc0
, arg0
);
14041 if (tem
&& COMPARISON_CLASS_P (tem
))
14043 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14049 /* ??? Fixup the code below for VEC_COND_EXPR. */
14050 if (code
== VEC_COND_EXPR
)
14053 /* If the second operand is simpler than the third, swap them
14054 since that produces better jump optimization results. */
14055 if (truth_value_p (TREE_CODE (arg0
))
14056 && tree_swap_operands_p (op1
, op2
, false))
14058 location_t loc0
= expr_location_or (arg0
, loc
);
14059 /* See if this can be inverted. If it can't, possibly because
14060 it was a floating-point inequality comparison, don't do
14062 tem
= fold_truth_not_expr (loc0
, arg0
);
14064 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14067 /* Convert A ? 1 : 0 to simply A. */
14068 if (integer_onep (op1
)
14069 && integer_zerop (op2
)
14070 /* If we try to convert OP0 to our type, the
14071 call to fold will try to move the conversion inside
14072 a COND, which will recurse. In that case, the COND_EXPR
14073 is probably the best choice, so leave it alone. */
14074 && type
== TREE_TYPE (arg0
))
14075 return pedantic_non_lvalue_loc (loc
, arg0
);
14077 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14078 over COND_EXPR in cases such as floating point comparisons. */
14079 if (integer_zerop (op1
)
14080 && integer_onep (op2
)
14081 && truth_value_p (TREE_CODE (arg0
)))
14082 return pedantic_non_lvalue_loc (loc
,
14083 fold_convert_loc (loc
, type
,
14084 invert_truthvalue_loc (loc
,
14087 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14088 if (TREE_CODE (arg0
) == LT_EXPR
14089 && integer_zerop (TREE_OPERAND (arg0
, 1))
14090 && integer_zerop (op2
)
14091 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14093 /* sign_bit_p only checks ARG1 bits within A's precision.
14094 If <sign bit of A> has wider type than A, bits outside
14095 of A's precision in <sign bit of A> need to be checked.
14096 If they are all 0, this optimization needs to be done
14097 in unsigned A's type, if they are all 1 in signed A's type,
14098 otherwise this can't be done. */
14099 if (TYPE_PRECISION (TREE_TYPE (tem
))
14100 < TYPE_PRECISION (TREE_TYPE (arg1
))
14101 && TYPE_PRECISION (TREE_TYPE (tem
))
14102 < TYPE_PRECISION (type
))
14104 unsigned HOST_WIDE_INT mask_lo
;
14105 HOST_WIDE_INT mask_hi
;
14106 int inner_width
, outer_width
;
14109 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14110 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14111 if (outer_width
> TYPE_PRECISION (type
))
14112 outer_width
= TYPE_PRECISION (type
);
14114 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14116 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
14117 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14123 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
14124 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14126 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14128 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
14129 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14133 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
14134 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14136 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14137 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14139 tem_type
= signed_type_for (TREE_TYPE (tem
));
14140 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14142 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14143 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14145 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14146 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14154 fold_convert_loc (loc
, type
,
14155 fold_build2_loc (loc
, BIT_AND_EXPR
,
14156 TREE_TYPE (tem
), tem
,
14157 fold_convert_loc (loc
,
14162 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14163 already handled above. */
14164 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14165 && integer_onep (TREE_OPERAND (arg0
, 1))
14166 && integer_zerop (op2
)
14167 && integer_pow2p (arg1
))
14169 tree tem
= TREE_OPERAND (arg0
, 0);
14171 if (TREE_CODE (tem
) == RSHIFT_EXPR
14172 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14173 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14174 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14175 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14176 TREE_OPERAND (tem
, 0), arg1
);
14179 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14180 is probably obsolete because the first operand should be a
14181 truth value (that's why we have the two cases above), but let's
14182 leave it in until we can confirm this for all front-ends. */
14183 if (integer_zerop (op2
)
14184 && TREE_CODE (arg0
) == NE_EXPR
14185 && integer_zerop (TREE_OPERAND (arg0
, 1))
14186 && integer_pow2p (arg1
)
14187 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14188 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14189 arg1
, OEP_ONLY_CONST
))
14190 return pedantic_non_lvalue_loc (loc
,
14191 fold_convert_loc (loc
, type
,
14192 TREE_OPERAND (arg0
, 0)));
14194 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14195 if (integer_zerop (op2
)
14196 && truth_value_p (TREE_CODE (arg0
))
14197 && truth_value_p (TREE_CODE (arg1
)))
14198 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14199 fold_convert_loc (loc
, type
, arg0
),
14202 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14203 if (integer_onep (op2
)
14204 && truth_value_p (TREE_CODE (arg0
))
14205 && truth_value_p (TREE_CODE (arg1
)))
14207 location_t loc0
= expr_location_or (arg0
, loc
);
14208 /* Only perform transformation if ARG0 is easily inverted. */
14209 tem
= fold_truth_not_expr (loc0
, arg0
);
14211 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14212 fold_convert_loc (loc
, type
, tem
),
14216 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14217 if (integer_zerop (arg1
)
14218 && truth_value_p (TREE_CODE (arg0
))
14219 && truth_value_p (TREE_CODE (op2
)))
14221 location_t loc0
= expr_location_or (arg0
, loc
);
14222 /* Only perform transformation if ARG0 is easily inverted. */
14223 tem
= fold_truth_not_expr (loc0
, arg0
);
14225 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14226 fold_convert_loc (loc
, type
, tem
),
14230 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14231 if (integer_onep (arg1
)
14232 && truth_value_p (TREE_CODE (arg0
))
14233 && truth_value_p (TREE_CODE (op2
)))
14234 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14235 fold_convert_loc (loc
, type
, arg0
),
14241 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14242 of fold_ternary on them. */
14243 gcc_unreachable ();
14245 case BIT_FIELD_REF
:
14246 if ((TREE_CODE (arg0
) == VECTOR_CST
14247 || (TREE_CODE (arg0
) == CONSTRUCTOR
14248 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14249 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14250 || (TREE_CODE (type
) == VECTOR_TYPE
14251 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14253 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14254 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14255 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14256 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14259 && (idx
% width
) == 0
14260 && (n
% width
) == 0
14261 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14266 if (TREE_CODE (arg0
) == VECTOR_CST
)
14269 return VECTOR_CST_ELT (arg0
, idx
);
14271 tree
*vals
= XALLOCAVEC (tree
, n
);
14272 for (unsigned i
= 0; i
< n
; ++i
)
14273 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14274 return build_vector (type
, vals
);
14277 /* Constructor elements can be subvectors. */
14278 unsigned HOST_WIDE_INT k
= 1;
14279 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14281 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14282 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14283 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14286 /* We keep an exact subset of the constructor elements. */
14287 if ((idx
% k
) == 0 && (n
% k
) == 0)
14289 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14290 return build_constructor (type
, NULL
);
14295 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14296 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14297 return build_zero_cst (type
);
14300 vec
<constructor_elt
, va_gc
> *vals
;
14301 vec_alloc (vals
, n
);
14302 for (unsigned i
= 0;
14303 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14305 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14307 (arg0
, idx
+ i
)->value
);
14308 return build_constructor (type
, vals
);
14310 /* The bitfield references a single constructor element. */
14311 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14313 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14314 return build_zero_cst (type
);
14316 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14318 return fold_build3_loc (loc
, code
, type
,
14319 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14320 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14325 /* A bit-field-ref that referenced the full argument can be stripped. */
14326 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14327 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14328 && integer_zerop (op2
))
14329 return fold_convert_loc (loc
, type
, arg0
);
14331 /* On constants we can use native encode/interpret to constant
14332 fold (nearly) all BIT_FIELD_REFs. */
14333 if (CONSTANT_CLASS_P (arg0
)
14334 && can_native_interpret_type_p (type
)
14335 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14336 /* This limitation should not be necessary, we just need to
14337 round this up to mode size. */
14338 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14339 /* Need bit-shifting of the buffer to relax the following. */
14340 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14342 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14343 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14344 unsigned HOST_WIDE_INT clen
;
14345 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14346 /* ??? We cannot tell native_encode_expr to start at
14347 some random byte only. So limit us to a reasonable amount
14351 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14352 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14354 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14356 tree v
= native_interpret_expr (type
,
14357 b
+ bitpos
/ BITS_PER_UNIT
,
14358 bitsize
/ BITS_PER_UNIT
);
14368 /* For integers we can decompose the FMA if possible. */
14369 if (TREE_CODE (arg0
) == INTEGER_CST
14370 && TREE_CODE (arg1
) == INTEGER_CST
)
14371 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14372 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14373 if (integer_zerop (arg2
))
14374 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14376 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14378 case VEC_PERM_EXPR
:
14379 if (TREE_CODE (arg2
) == VECTOR_CST
)
14381 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14382 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14384 bool need_mask_canon
= false;
14385 bool all_in_vec0
= true;
14386 bool all_in_vec1
= true;
14387 bool maybe_identity
= true;
14388 bool single_arg
= (op0
== op1
);
14389 bool changed
= false;
14391 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14392 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14393 for (i
= 0; i
< nelts
; i
++)
14395 tree val
= VECTOR_CST_ELT (arg2
, i
);
14396 if (TREE_CODE (val
) != INTEGER_CST
)
14399 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14400 if (TREE_INT_CST_HIGH (val
)
14401 || ((unsigned HOST_WIDE_INT
)
14402 TREE_INT_CST_LOW (val
) != sel
[i
]))
14403 need_mask_canon
= true;
14405 if (sel
[i
] < nelts
)
14406 all_in_vec1
= false;
14408 all_in_vec0
= false;
14410 if ((sel
[i
] & (nelts
-1)) != i
)
14411 maybe_identity
= false;
14414 if (maybe_identity
)
14424 else if (all_in_vec1
)
14427 for (i
= 0; i
< nelts
; i
++)
14429 need_mask_canon
= true;
14432 if ((TREE_CODE (op0
) == VECTOR_CST
14433 || TREE_CODE (op0
) == CONSTRUCTOR
)
14434 && (TREE_CODE (op1
) == VECTOR_CST
14435 || TREE_CODE (op1
) == CONSTRUCTOR
))
14437 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14438 if (t
!= NULL_TREE
)
14442 if (op0
== op1
&& !single_arg
)
14445 if (need_mask_canon
&& arg2
== op2
)
14447 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14448 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14449 for (i
= 0; i
< nelts
; i
++)
14450 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14451 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14456 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14462 } /* switch (code) */
14465 /* Perform constant folding and related simplification of EXPR.
14466 The related simplifications include x*1 => x, x*0 => 0, etc.,
14467 and application of the associative law.
14468 NOP_EXPR conversions may be removed freely (as long as we
14469 are careful not to change the type of the overall expression).
14470 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14471 but we can constant-fold them if they have constant operands. */
14473 #ifdef ENABLE_FOLD_CHECKING
14474 # define fold(x) fold_1 (x)
14475 static tree
fold_1 (tree
);
14481 const tree t
= expr
;
14482 enum tree_code code
= TREE_CODE (t
);
14483 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14485 location_t loc
= EXPR_LOCATION (expr
);
14487 /* Return right away if a constant. */
14488 if (kind
== tcc_constant
)
14491 /* CALL_EXPR-like objects with variable numbers of operands are
14492 treated specially. */
14493 if (kind
== tcc_vl_exp
)
14495 if (code
== CALL_EXPR
)
14497 tem
= fold_call_expr (loc
, expr
, false);
14498 return tem
? tem
: expr
;
14503 if (IS_EXPR_CODE_CLASS (kind
))
14505 tree type
= TREE_TYPE (t
);
14506 tree op0
, op1
, op2
;
14508 switch (TREE_CODE_LENGTH (code
))
14511 op0
= TREE_OPERAND (t
, 0);
14512 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14513 return tem
? tem
: expr
;
14515 op0
= TREE_OPERAND (t
, 0);
14516 op1
= TREE_OPERAND (t
, 1);
14517 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14518 return tem
? tem
: expr
;
14520 op0
= TREE_OPERAND (t
, 0);
14521 op1
= TREE_OPERAND (t
, 1);
14522 op2
= TREE_OPERAND (t
, 2);
14523 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14524 return tem
? tem
: expr
;
14534 tree op0
= TREE_OPERAND (t
, 0);
14535 tree op1
= TREE_OPERAND (t
, 1);
14537 if (TREE_CODE (op1
) == INTEGER_CST
14538 && TREE_CODE (op0
) == CONSTRUCTOR
14539 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14541 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14542 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14543 unsigned HOST_WIDE_INT begin
= 0;
14545 /* Find a matching index by means of a binary search. */
14546 while (begin
!= end
)
14548 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14549 tree index
= (*elts
)[middle
].index
;
14551 if (TREE_CODE (index
) == INTEGER_CST
14552 && tree_int_cst_lt (index
, op1
))
14553 begin
= middle
+ 1;
14554 else if (TREE_CODE (index
) == INTEGER_CST
14555 && tree_int_cst_lt (op1
, index
))
14557 else if (TREE_CODE (index
) == RANGE_EXPR
14558 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14559 begin
= middle
+ 1;
14560 else if (TREE_CODE (index
) == RANGE_EXPR
14561 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14564 return (*elts
)[middle
].value
;
14571 /* Return a VECTOR_CST if possible. */
14574 tree type
= TREE_TYPE (t
);
14575 if (TREE_CODE (type
) != VECTOR_TYPE
)
14578 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14579 unsigned HOST_WIDE_INT idx
, pos
= 0;
14582 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14584 if (!CONSTANT_CLASS_P (value
))
14586 if (TREE_CODE (value
) == VECTOR_CST
)
14588 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14589 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14592 vec
[pos
++] = value
;
14594 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14595 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14597 return build_vector (type
, vec
);
14601 return fold (DECL_INITIAL (t
));
14605 } /* switch (code) */
14608 #ifdef ENABLE_FOLD_CHECKING
14611 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14612 hash_table
<pointer_hash
<tree_node
> >);
14613 static void fold_check_failed (const_tree
, const_tree
);
14614 void print_fold_checksum (const_tree
);
14616 /* When --enable-checking=fold, compute a digest of expr before
14617 and after actual fold call to see if fold did not accidentally
14618 change original expr. */
14624 struct md5_ctx ctx
;
14625 unsigned char checksum_before
[16], checksum_after
[16];
14626 hash_table
<pointer_hash
<tree_node
> > ht
;
14629 md5_init_ctx (&ctx
);
14630 fold_checksum_tree (expr
, &ctx
, ht
);
14631 md5_finish_ctx (&ctx
, checksum_before
);
14634 ret
= fold_1 (expr
);
14636 md5_init_ctx (&ctx
);
14637 fold_checksum_tree (expr
, &ctx
, ht
);
14638 md5_finish_ctx (&ctx
, checksum_after
);
14641 if (memcmp (checksum_before
, checksum_after
, 16))
14642 fold_check_failed (expr
, ret
);
14648 print_fold_checksum (const_tree expr
)
14650 struct md5_ctx ctx
;
14651 unsigned char checksum
[16], cnt
;
14652 hash_table
<pointer_hash
<tree_node
> > ht
;
14655 md5_init_ctx (&ctx
);
14656 fold_checksum_tree (expr
, &ctx
, ht
);
14657 md5_finish_ctx (&ctx
, checksum
);
14659 for (cnt
= 0; cnt
< 16; ++cnt
)
14660 fprintf (stderr
, "%02x", checksum
[cnt
]);
14661 putc ('\n', stderr
);
14665 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14667 internal_error ("fold check: original tree changed by fold");
14671 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14672 hash_table
<pointer_hash
<tree_node
> > ht
)
14675 enum tree_code code
;
14676 union tree_node buf
;
14682 slot
= ht
.find_slot (expr
, INSERT
);
14685 *slot
= CONST_CAST_TREE (expr
);
14686 code
= TREE_CODE (expr
);
14687 if (TREE_CODE_CLASS (code
) == tcc_declaration
14688 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14690 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14691 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14692 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14693 expr
= (tree
) &buf
;
14695 else if (TREE_CODE_CLASS (code
) == tcc_type
14696 && (TYPE_POINTER_TO (expr
)
14697 || TYPE_REFERENCE_TO (expr
)
14698 || TYPE_CACHED_VALUES_P (expr
)
14699 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14700 || TYPE_NEXT_VARIANT (expr
)))
14702 /* Allow these fields to be modified. */
14704 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14705 expr
= tmp
= (tree
) &buf
;
14706 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14707 TYPE_POINTER_TO (tmp
) = NULL
;
14708 TYPE_REFERENCE_TO (tmp
) = NULL
;
14709 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14710 if (TYPE_CACHED_VALUES_P (tmp
))
14712 TYPE_CACHED_VALUES_P (tmp
) = 0;
14713 TYPE_CACHED_VALUES (tmp
) = NULL
;
14716 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14717 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14718 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14719 if (TREE_CODE_CLASS (code
) != tcc_type
14720 && TREE_CODE_CLASS (code
) != tcc_declaration
14721 && code
!= TREE_LIST
14722 && code
!= SSA_NAME
14723 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14724 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14725 switch (TREE_CODE_CLASS (code
))
14731 md5_process_bytes (TREE_STRING_POINTER (expr
),
14732 TREE_STRING_LENGTH (expr
), ctx
);
14735 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14736 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14739 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14740 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14746 case tcc_exceptional
:
14750 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14751 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14752 expr
= TREE_CHAIN (expr
);
14753 goto recursive_label
;
14756 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14757 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14763 case tcc_expression
:
14764 case tcc_reference
:
14765 case tcc_comparison
:
14768 case tcc_statement
:
14770 len
= TREE_OPERAND_LENGTH (expr
);
14771 for (i
= 0; i
< len
; ++i
)
14772 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14774 case tcc_declaration
:
14775 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14776 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14777 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14779 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14780 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14781 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14782 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14783 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14785 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14786 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14788 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14790 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14791 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14792 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14796 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14797 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14798 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14799 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14800 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14801 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14802 if (INTEGRAL_TYPE_P (expr
)
14803 || SCALAR_FLOAT_TYPE_P (expr
))
14805 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14806 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14808 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14809 if (TREE_CODE (expr
) == RECORD_TYPE
14810 || TREE_CODE (expr
) == UNION_TYPE
14811 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14812 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14813 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14820 /* Helper function for outputting the checksum of a tree T. When
14821 debugging with gdb, you can "define mynext" to be "next" followed
14822 by "call debug_fold_checksum (op0)", then just trace down till the
14825 DEBUG_FUNCTION
void
14826 debug_fold_checksum (const_tree t
)
14829 unsigned char checksum
[16];
14830 struct md5_ctx ctx
;
14831 hash_table
<pointer_hash
<tree_node
> > ht
;
14834 md5_init_ctx (&ctx
);
14835 fold_checksum_tree (t
, &ctx
, ht
);
14836 md5_finish_ctx (&ctx
, checksum
);
14839 for (i
= 0; i
< 16; i
++)
14840 fprintf (stderr
, "%d ", checksum
[i
]);
14842 fprintf (stderr
, "\n");
14847 /* Fold a unary tree expression with code CODE of type TYPE with an
14848 operand OP0. LOC is the location of the resulting expression.
14849 Return a folded expression if successful. Otherwise, return a tree
14850 expression with code CODE of type TYPE with an operand OP0. */
14853 fold_build1_stat_loc (location_t loc
,
14854 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14857 #ifdef ENABLE_FOLD_CHECKING
14858 unsigned char checksum_before
[16], checksum_after
[16];
14859 struct md5_ctx ctx
;
14860 hash_table
<pointer_hash
<tree_node
> > ht
;
14863 md5_init_ctx (&ctx
);
14864 fold_checksum_tree (op0
, &ctx
, ht
);
14865 md5_finish_ctx (&ctx
, checksum_before
);
14869 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14871 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14873 #ifdef ENABLE_FOLD_CHECKING
14874 md5_init_ctx (&ctx
);
14875 fold_checksum_tree (op0
, &ctx
, ht
);
14876 md5_finish_ctx (&ctx
, checksum_after
);
14879 if (memcmp (checksum_before
, checksum_after
, 16))
14880 fold_check_failed (op0
, tem
);
14885 /* Fold a binary tree expression with code CODE of type TYPE with
14886 operands OP0 and OP1. LOC is the location of the resulting
14887 expression. Return a folded expression if successful. Otherwise,
14888 return a tree expression with code CODE of type TYPE with operands
14892 fold_build2_stat_loc (location_t loc
,
14893 enum tree_code code
, tree type
, tree op0
, tree op1
14897 #ifdef ENABLE_FOLD_CHECKING
14898 unsigned char checksum_before_op0
[16],
14899 checksum_before_op1
[16],
14900 checksum_after_op0
[16],
14901 checksum_after_op1
[16];
14902 struct md5_ctx ctx
;
14903 hash_table
<pointer_hash
<tree_node
> > ht
;
14906 md5_init_ctx (&ctx
);
14907 fold_checksum_tree (op0
, &ctx
, ht
);
14908 md5_finish_ctx (&ctx
, checksum_before_op0
);
14911 md5_init_ctx (&ctx
);
14912 fold_checksum_tree (op1
, &ctx
, ht
);
14913 md5_finish_ctx (&ctx
, checksum_before_op1
);
14917 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14919 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14921 #ifdef ENABLE_FOLD_CHECKING
14922 md5_init_ctx (&ctx
);
14923 fold_checksum_tree (op0
, &ctx
, ht
);
14924 md5_finish_ctx (&ctx
, checksum_after_op0
);
14927 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14928 fold_check_failed (op0
, tem
);
14930 md5_init_ctx (&ctx
);
14931 fold_checksum_tree (op1
, &ctx
, ht
);
14932 md5_finish_ctx (&ctx
, checksum_after_op1
);
14935 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14936 fold_check_failed (op1
, tem
);
14941 /* Fold a ternary tree expression with code CODE of type TYPE with
14942 operands OP0, OP1, and OP2. Return a folded expression if
14943 successful. Otherwise, return a tree expression with code CODE of
14944 type TYPE with operands OP0, OP1, and OP2. */
14947 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14948 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14951 #ifdef ENABLE_FOLD_CHECKING
14952 unsigned char checksum_before_op0
[16],
14953 checksum_before_op1
[16],
14954 checksum_before_op2
[16],
14955 checksum_after_op0
[16],
14956 checksum_after_op1
[16],
14957 checksum_after_op2
[16];
14958 struct md5_ctx ctx
;
14959 hash_table
<pointer_hash
<tree_node
> > ht
;
14962 md5_init_ctx (&ctx
);
14963 fold_checksum_tree (op0
, &ctx
, ht
);
14964 md5_finish_ctx (&ctx
, checksum_before_op0
);
14967 md5_init_ctx (&ctx
);
14968 fold_checksum_tree (op1
, &ctx
, ht
);
14969 md5_finish_ctx (&ctx
, checksum_before_op1
);
14972 md5_init_ctx (&ctx
);
14973 fold_checksum_tree (op2
, &ctx
, ht
);
14974 md5_finish_ctx (&ctx
, checksum_before_op2
);
14978 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14979 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14981 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14983 #ifdef ENABLE_FOLD_CHECKING
14984 md5_init_ctx (&ctx
);
14985 fold_checksum_tree (op0
, &ctx
, ht
);
14986 md5_finish_ctx (&ctx
, checksum_after_op0
);
14989 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14990 fold_check_failed (op0
, tem
);
14992 md5_init_ctx (&ctx
);
14993 fold_checksum_tree (op1
, &ctx
, ht
);
14994 md5_finish_ctx (&ctx
, checksum_after_op1
);
14997 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14998 fold_check_failed (op1
, tem
);
15000 md5_init_ctx (&ctx
);
15001 fold_checksum_tree (op2
, &ctx
, ht
);
15002 md5_finish_ctx (&ctx
, checksum_after_op2
);
15005 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15006 fold_check_failed (op2
, tem
);
15011 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15012 arguments in ARGARRAY, and a null static chain.
15013 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15014 of type TYPE from the given operands as constructed by build_call_array. */
15017 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15018 int nargs
, tree
*argarray
)
15021 #ifdef ENABLE_FOLD_CHECKING
15022 unsigned char checksum_before_fn
[16],
15023 checksum_before_arglist
[16],
15024 checksum_after_fn
[16],
15025 checksum_after_arglist
[16];
15026 struct md5_ctx ctx
;
15027 hash_table
<pointer_hash
<tree_node
> > ht
;
15031 md5_init_ctx (&ctx
);
15032 fold_checksum_tree (fn
, &ctx
, ht
);
15033 md5_finish_ctx (&ctx
, checksum_before_fn
);
15036 md5_init_ctx (&ctx
);
15037 for (i
= 0; i
< nargs
; i
++)
15038 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15039 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15043 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15045 #ifdef ENABLE_FOLD_CHECKING
15046 md5_init_ctx (&ctx
);
15047 fold_checksum_tree (fn
, &ctx
, ht
);
15048 md5_finish_ctx (&ctx
, checksum_after_fn
);
15051 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15052 fold_check_failed (fn
, tem
);
15054 md5_init_ctx (&ctx
);
15055 for (i
= 0; i
< nargs
; i
++)
15056 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15057 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15060 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15061 fold_check_failed (NULL_TREE
, tem
);
15066 /* Perform constant folding and related simplification of initializer
15067 expression EXPR. These behave identically to "fold_buildN" but ignore
15068 potential run-time traps and exceptions that fold must preserve. */
15070 #define START_FOLD_INIT \
15071 int saved_signaling_nans = flag_signaling_nans;\
15072 int saved_trapping_math = flag_trapping_math;\
15073 int saved_rounding_math = flag_rounding_math;\
15074 int saved_trapv = flag_trapv;\
15075 int saved_folding_initializer = folding_initializer;\
15076 flag_signaling_nans = 0;\
15077 flag_trapping_math = 0;\
15078 flag_rounding_math = 0;\
15080 folding_initializer = 1;
15082 #define END_FOLD_INIT \
15083 flag_signaling_nans = saved_signaling_nans;\
15084 flag_trapping_math = saved_trapping_math;\
15085 flag_rounding_math = saved_rounding_math;\
15086 flag_trapv = saved_trapv;\
15087 folding_initializer = saved_folding_initializer;
15090 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15091 tree type
, tree op
)
15096 result
= fold_build1_loc (loc
, code
, type
, op
);
15103 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15104 tree type
, tree op0
, tree op1
)
15109 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15116 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
15117 tree type
, tree op0
, tree op1
, tree op2
)
15122 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
15129 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15130 int nargs
, tree
*argarray
)
15135 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15141 #undef START_FOLD_INIT
15142 #undef END_FOLD_INIT
15144 /* Determine if first argument is a multiple of second argument. Return 0 if
15145 it is not, or we cannot easily determined it to be.
15147 An example of the sort of thing we care about (at this point; this routine
15148 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15149 fold cases do now) is discovering that
15151 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15157 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15159 This code also handles discovering that
15161 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15163 is a multiple of 8 so we don't have to worry about dealing with a
15164 possible remainder.
15166 Note that we *look* inside a SAVE_EXPR only to determine how it was
15167 calculated; it is not safe for fold to do much of anything else with the
15168 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15169 at run time. For example, the latter example above *cannot* be implemented
15170 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15171 evaluation time of the original SAVE_EXPR is not necessarily the same at
15172 the time the new expression is evaluated. The only optimization of this
15173 sort that would be valid is changing
15175 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15179 SAVE_EXPR (I) * SAVE_EXPR (J)
15181 (where the same SAVE_EXPR (J) is used in the original and the
15182 transformed version). */
15185 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15187 if (operand_equal_p (top
, bottom
, 0))
15190 if (TREE_CODE (type
) != INTEGER_TYPE
)
15193 switch (TREE_CODE (top
))
15196 /* Bitwise and provides a power of two multiple. If the mask is
15197 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15198 if (!integer_pow2p (bottom
))
15203 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15204 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15208 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15209 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15212 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15216 op1
= TREE_OPERAND (top
, 1);
15217 /* const_binop may not detect overflow correctly,
15218 so check for it explicitly here. */
15219 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15220 > TREE_INT_CST_LOW (op1
)
15221 && TREE_INT_CST_HIGH (op1
) == 0
15222 && 0 != (t1
= fold_convert (type
,
15223 const_binop (LSHIFT_EXPR
,
15226 && !TREE_OVERFLOW (t1
))
15227 return multiple_of_p (type
, t1
, bottom
);
15232 /* Can't handle conversions from non-integral or wider integral type. */
15233 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15234 || (TYPE_PRECISION (type
)
15235 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15238 /* .. fall through ... */
15241 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15244 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15245 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15248 if (TREE_CODE (bottom
) != INTEGER_CST
15249 || integer_zerop (bottom
)
15250 || (TYPE_UNSIGNED (type
)
15251 && (tree_int_cst_sgn (top
) < 0
15252 || tree_int_cst_sgn (bottom
) < 0)))
15254 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15262 /* Return true if CODE or TYPE is known to be non-negative. */
15265 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15267 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15268 && truth_value_p (code
))
15269 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15270 have a signed:1 type (where the value is -1 and 0). */
15275 /* Return true if (CODE OP0) is known to be non-negative. If the return
15276 value is based on the assumption that signed overflow is undefined,
15277 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15278 *STRICT_OVERFLOW_P. */
15281 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15282 bool *strict_overflow_p
)
15284 if (TYPE_UNSIGNED (type
))
15290 /* We can't return 1 if flag_wrapv is set because
15291 ABS_EXPR<INT_MIN> = INT_MIN. */
15292 if (!INTEGRAL_TYPE_P (type
))
15294 if (TYPE_OVERFLOW_UNDEFINED (type
))
15296 *strict_overflow_p
= true;
15301 case NON_LVALUE_EXPR
:
15303 case FIX_TRUNC_EXPR
:
15304 return tree_expr_nonnegative_warnv_p (op0
,
15305 strict_overflow_p
);
15309 tree inner_type
= TREE_TYPE (op0
);
15310 tree outer_type
= type
;
15312 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15314 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15315 return tree_expr_nonnegative_warnv_p (op0
,
15316 strict_overflow_p
);
15317 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15319 if (TYPE_UNSIGNED (inner_type
))
15321 return tree_expr_nonnegative_warnv_p (op0
,
15322 strict_overflow_p
);
15325 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
15327 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15328 return tree_expr_nonnegative_warnv_p (op0
,
15329 strict_overflow_p
);
15330 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15331 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15332 && TYPE_UNSIGNED (inner_type
);
15338 return tree_simple_nonnegative_warnv_p (code
, type
);
15341 /* We don't know sign of `t', so be conservative and return false. */
15345 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15346 value is based on the assumption that signed overflow is undefined,
15347 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15348 *STRICT_OVERFLOW_P. */
15351 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15352 tree op1
, bool *strict_overflow_p
)
15354 if (TYPE_UNSIGNED (type
))
15359 case POINTER_PLUS_EXPR
:
15361 if (FLOAT_TYPE_P (type
))
15362 return (tree_expr_nonnegative_warnv_p (op0
,
15364 && tree_expr_nonnegative_warnv_p (op1
,
15365 strict_overflow_p
));
15367 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15368 both unsigned and at least 2 bits shorter than the result. */
15369 if (TREE_CODE (type
) == INTEGER_TYPE
15370 && TREE_CODE (op0
) == NOP_EXPR
15371 && TREE_CODE (op1
) == NOP_EXPR
)
15373 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15374 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15375 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15376 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15378 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15379 TYPE_PRECISION (inner2
)) + 1;
15380 return prec
< TYPE_PRECISION (type
);
15386 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15388 /* x * x is always non-negative for floating point x
15389 or without overflow. */
15390 if (operand_equal_p (op0
, op1
, 0)
15391 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15392 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15394 if (TYPE_OVERFLOW_UNDEFINED (type
))
15395 *strict_overflow_p
= true;
15400 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15401 both unsigned and their total bits is shorter than the result. */
15402 if (TREE_CODE (type
) == INTEGER_TYPE
15403 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15404 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15406 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15407 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15409 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15410 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15413 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15414 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15416 if (TREE_CODE (op0
) == INTEGER_CST
)
15417 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15419 if (TREE_CODE (op1
) == INTEGER_CST
)
15420 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15422 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15423 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15425 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15426 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15427 : TYPE_PRECISION (inner0
);
15429 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15430 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15431 : TYPE_PRECISION (inner1
);
15433 return precision0
+ precision1
< TYPE_PRECISION (type
);
15440 return (tree_expr_nonnegative_warnv_p (op0
,
15442 || tree_expr_nonnegative_warnv_p (op1
,
15443 strict_overflow_p
));
15449 case TRUNC_DIV_EXPR
:
15450 case CEIL_DIV_EXPR
:
15451 case FLOOR_DIV_EXPR
:
15452 case ROUND_DIV_EXPR
:
15453 return (tree_expr_nonnegative_warnv_p (op0
,
15455 && tree_expr_nonnegative_warnv_p (op1
,
15456 strict_overflow_p
));
15458 case TRUNC_MOD_EXPR
:
15459 case CEIL_MOD_EXPR
:
15460 case FLOOR_MOD_EXPR
:
15461 case ROUND_MOD_EXPR
:
15462 return tree_expr_nonnegative_warnv_p (op0
,
15463 strict_overflow_p
);
15465 return tree_simple_nonnegative_warnv_p (code
, type
);
15468 /* We don't know sign of `t', so be conservative and return false. */
15472 /* Return true if T is known to be non-negative. If the return
15473 value is based on the assumption that signed overflow is undefined,
15474 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15475 *STRICT_OVERFLOW_P. */
15478 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15480 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15483 switch (TREE_CODE (t
))
15486 return tree_int_cst_sgn (t
) >= 0;
15489 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15492 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15495 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15497 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15498 strict_overflow_p
));
15500 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15503 /* We don't know sign of `t', so be conservative and return false. */
15507 /* Return true if T is known to be non-negative. If the return
15508 value is based on the assumption that signed overflow is undefined,
15509 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15510 *STRICT_OVERFLOW_P. */
15513 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15514 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15516 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15517 switch (DECL_FUNCTION_CODE (fndecl
))
15519 CASE_FLT_FN (BUILT_IN_ACOS
):
15520 CASE_FLT_FN (BUILT_IN_ACOSH
):
15521 CASE_FLT_FN (BUILT_IN_CABS
):
15522 CASE_FLT_FN (BUILT_IN_COSH
):
15523 CASE_FLT_FN (BUILT_IN_ERFC
):
15524 CASE_FLT_FN (BUILT_IN_EXP
):
15525 CASE_FLT_FN (BUILT_IN_EXP10
):
15526 CASE_FLT_FN (BUILT_IN_EXP2
):
15527 CASE_FLT_FN (BUILT_IN_FABS
):
15528 CASE_FLT_FN (BUILT_IN_FDIM
):
15529 CASE_FLT_FN (BUILT_IN_HYPOT
):
15530 CASE_FLT_FN (BUILT_IN_POW10
):
15531 CASE_INT_FN (BUILT_IN_FFS
):
15532 CASE_INT_FN (BUILT_IN_PARITY
):
15533 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15534 case BUILT_IN_BSWAP32
:
15535 case BUILT_IN_BSWAP64
:
15539 CASE_FLT_FN (BUILT_IN_SQRT
):
15540 /* sqrt(-0.0) is -0.0. */
15541 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15543 return tree_expr_nonnegative_warnv_p (arg0
,
15544 strict_overflow_p
);
15546 CASE_FLT_FN (BUILT_IN_ASINH
):
15547 CASE_FLT_FN (BUILT_IN_ATAN
):
15548 CASE_FLT_FN (BUILT_IN_ATANH
):
15549 CASE_FLT_FN (BUILT_IN_CBRT
):
15550 CASE_FLT_FN (BUILT_IN_CEIL
):
15551 CASE_FLT_FN (BUILT_IN_ERF
):
15552 CASE_FLT_FN (BUILT_IN_EXPM1
):
15553 CASE_FLT_FN (BUILT_IN_FLOOR
):
15554 CASE_FLT_FN (BUILT_IN_FMOD
):
15555 CASE_FLT_FN (BUILT_IN_FREXP
):
15556 CASE_FLT_FN (BUILT_IN_ICEIL
):
15557 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15558 CASE_FLT_FN (BUILT_IN_IRINT
):
15559 CASE_FLT_FN (BUILT_IN_IROUND
):
15560 CASE_FLT_FN (BUILT_IN_LCEIL
):
15561 CASE_FLT_FN (BUILT_IN_LDEXP
):
15562 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15563 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15564 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15565 CASE_FLT_FN (BUILT_IN_LLRINT
):
15566 CASE_FLT_FN (BUILT_IN_LLROUND
):
15567 CASE_FLT_FN (BUILT_IN_LRINT
):
15568 CASE_FLT_FN (BUILT_IN_LROUND
):
15569 CASE_FLT_FN (BUILT_IN_MODF
):
15570 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15571 CASE_FLT_FN (BUILT_IN_RINT
):
15572 CASE_FLT_FN (BUILT_IN_ROUND
):
15573 CASE_FLT_FN (BUILT_IN_SCALB
):
15574 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15575 CASE_FLT_FN (BUILT_IN_SCALBN
):
15576 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15577 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15578 CASE_FLT_FN (BUILT_IN_SINH
):
15579 CASE_FLT_FN (BUILT_IN_TANH
):
15580 CASE_FLT_FN (BUILT_IN_TRUNC
):
15581 /* True if the 1st argument is nonnegative. */
15582 return tree_expr_nonnegative_warnv_p (arg0
,
15583 strict_overflow_p
);
15585 CASE_FLT_FN (BUILT_IN_FMAX
):
15586 /* True if the 1st OR 2nd arguments are nonnegative. */
15587 return (tree_expr_nonnegative_warnv_p (arg0
,
15589 || (tree_expr_nonnegative_warnv_p (arg1
,
15590 strict_overflow_p
)));
15592 CASE_FLT_FN (BUILT_IN_FMIN
):
15593 /* True if the 1st AND 2nd arguments are nonnegative. */
15594 return (tree_expr_nonnegative_warnv_p (arg0
,
15596 && (tree_expr_nonnegative_warnv_p (arg1
,
15597 strict_overflow_p
)));
15599 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15600 /* True if the 2nd argument is nonnegative. */
15601 return tree_expr_nonnegative_warnv_p (arg1
,
15602 strict_overflow_p
);
15604 CASE_FLT_FN (BUILT_IN_POWI
):
15605 /* True if the 1st argument is nonnegative or the second
15606 argument is an even integer. */
15607 if (TREE_CODE (arg1
) == INTEGER_CST
15608 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15610 return tree_expr_nonnegative_warnv_p (arg0
,
15611 strict_overflow_p
);
15613 CASE_FLT_FN (BUILT_IN_POW
):
15614 /* True if the 1st argument is nonnegative or the second
15615 argument is an even integer valued real. */
15616 if (TREE_CODE (arg1
) == REAL_CST
)
15621 c
= TREE_REAL_CST (arg1
);
15622 n
= real_to_integer (&c
);
15625 REAL_VALUE_TYPE cint
;
15626 real_from_integer (&cint
, VOIDmode
, n
,
15627 n
< 0 ? -1 : 0, 0);
15628 if (real_identical (&c
, &cint
))
15632 return tree_expr_nonnegative_warnv_p (arg0
,
15633 strict_overflow_p
);
15638 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15642 /* Return true if T is known to be non-negative. If the return
15643 value is based on the assumption that signed overflow is undefined,
15644 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15645 *STRICT_OVERFLOW_P. */
15648 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15650 enum tree_code code
= TREE_CODE (t
);
15651 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15658 tree temp
= TARGET_EXPR_SLOT (t
);
15659 t
= TARGET_EXPR_INITIAL (t
);
15661 /* If the initializer is non-void, then it's a normal expression
15662 that will be assigned to the slot. */
15663 if (!VOID_TYPE_P (t
))
15664 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15666 /* Otherwise, the initializer sets the slot in some way. One common
15667 way is an assignment statement at the end of the initializer. */
15670 if (TREE_CODE (t
) == BIND_EXPR
)
15671 t
= expr_last (BIND_EXPR_BODY (t
));
15672 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15673 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15674 t
= expr_last (TREE_OPERAND (t
, 0));
15675 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15680 if (TREE_CODE (t
) == MODIFY_EXPR
15681 && TREE_OPERAND (t
, 0) == temp
)
15682 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15683 strict_overflow_p
);
15690 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15691 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15693 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15694 get_callee_fndecl (t
),
15697 strict_overflow_p
);
15699 case COMPOUND_EXPR
:
15701 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15702 strict_overflow_p
);
15704 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15705 strict_overflow_p
);
15707 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15708 strict_overflow_p
);
15711 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15715 /* We don't know sign of `t', so be conservative and return false. */
15719 /* Return true if T is known to be non-negative. If the return
15720 value is based on the assumption that signed overflow is undefined,
15721 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15722 *STRICT_OVERFLOW_P. */
15725 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15727 enum tree_code code
;
15728 if (t
== error_mark_node
)
15731 code
= TREE_CODE (t
);
15732 switch (TREE_CODE_CLASS (code
))
15735 case tcc_comparison
:
15736 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15738 TREE_OPERAND (t
, 0),
15739 TREE_OPERAND (t
, 1),
15740 strict_overflow_p
);
15743 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15745 TREE_OPERAND (t
, 0),
15746 strict_overflow_p
);
15749 case tcc_declaration
:
15750 case tcc_reference
:
15751 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15759 case TRUTH_AND_EXPR
:
15760 case TRUTH_OR_EXPR
:
15761 case TRUTH_XOR_EXPR
:
15762 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15764 TREE_OPERAND (t
, 0),
15765 TREE_OPERAND (t
, 1),
15766 strict_overflow_p
);
15767 case TRUTH_NOT_EXPR
:
15768 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15770 TREE_OPERAND (t
, 0),
15771 strict_overflow_p
);
15778 case WITH_SIZE_EXPR
:
15780 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15783 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15787 /* Return true if `t' is known to be non-negative. Handle warnings
15788 about undefined signed overflow. */
15791 tree_expr_nonnegative_p (tree t
)
15793 bool ret
, strict_overflow_p
;
15795 strict_overflow_p
= false;
15796 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15797 if (strict_overflow_p
)
15798 fold_overflow_warning (("assuming signed overflow does not occur when "
15799 "determining that expression is always "
15801 WARN_STRICT_OVERFLOW_MISC
);
15806 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15807 For floating point we further ensure that T is not denormal.
15808 Similar logic is present in nonzero_address in rtlanal.h.
15810 If the return value is based on the assumption that signed overflow
15811 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15812 change *STRICT_OVERFLOW_P. */
15815 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15816 bool *strict_overflow_p
)
15821 return tree_expr_nonzero_warnv_p (op0
,
15822 strict_overflow_p
);
15826 tree inner_type
= TREE_TYPE (op0
);
15827 tree outer_type
= type
;
15829 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15830 && tree_expr_nonzero_warnv_p (op0
,
15831 strict_overflow_p
));
15835 case NON_LVALUE_EXPR
:
15836 return tree_expr_nonzero_warnv_p (op0
,
15837 strict_overflow_p
);
15846 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15847 For floating point we further ensure that T is not denormal.
15848 Similar logic is present in nonzero_address in rtlanal.h.
15850 If the return value is based on the assumption that signed overflow
15851 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15852 change *STRICT_OVERFLOW_P. */
15855 tree_binary_nonzero_warnv_p (enum tree_code code
,
15858 tree op1
, bool *strict_overflow_p
)
15860 bool sub_strict_overflow_p
;
15863 case POINTER_PLUS_EXPR
:
15865 if (TYPE_OVERFLOW_UNDEFINED (type
))
15867 /* With the presence of negative values it is hard
15868 to say something. */
15869 sub_strict_overflow_p
= false;
15870 if (!tree_expr_nonnegative_warnv_p (op0
,
15871 &sub_strict_overflow_p
)
15872 || !tree_expr_nonnegative_warnv_p (op1
,
15873 &sub_strict_overflow_p
))
15875 /* One of operands must be positive and the other non-negative. */
15876 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15877 overflows, on a twos-complement machine the sum of two
15878 nonnegative numbers can never be zero. */
15879 return (tree_expr_nonzero_warnv_p (op0
,
15881 || tree_expr_nonzero_warnv_p (op1
,
15882 strict_overflow_p
));
15887 if (TYPE_OVERFLOW_UNDEFINED (type
))
15889 if (tree_expr_nonzero_warnv_p (op0
,
15891 && tree_expr_nonzero_warnv_p (op1
,
15892 strict_overflow_p
))
15894 *strict_overflow_p
= true;
15901 sub_strict_overflow_p
= false;
15902 if (tree_expr_nonzero_warnv_p (op0
,
15903 &sub_strict_overflow_p
)
15904 && tree_expr_nonzero_warnv_p (op1
,
15905 &sub_strict_overflow_p
))
15907 if (sub_strict_overflow_p
)
15908 *strict_overflow_p
= true;
15913 sub_strict_overflow_p
= false;
15914 if (tree_expr_nonzero_warnv_p (op0
,
15915 &sub_strict_overflow_p
))
15917 if (sub_strict_overflow_p
)
15918 *strict_overflow_p
= true;
15920 /* When both operands are nonzero, then MAX must be too. */
15921 if (tree_expr_nonzero_warnv_p (op1
,
15922 strict_overflow_p
))
15925 /* MAX where operand 0 is positive is positive. */
15926 return tree_expr_nonnegative_warnv_p (op0
,
15927 strict_overflow_p
);
15929 /* MAX where operand 1 is positive is positive. */
15930 else if (tree_expr_nonzero_warnv_p (op1
,
15931 &sub_strict_overflow_p
)
15932 && tree_expr_nonnegative_warnv_p (op1
,
15933 &sub_strict_overflow_p
))
15935 if (sub_strict_overflow_p
)
15936 *strict_overflow_p
= true;
15942 return (tree_expr_nonzero_warnv_p (op1
,
15944 || tree_expr_nonzero_warnv_p (op0
,
15945 strict_overflow_p
));
15954 /* Return true when T is an address and is known to be nonzero.
15955 For floating point we further ensure that T is not denormal.
15956 Similar logic is present in nonzero_address in rtlanal.h.
15958 If the return value is based on the assumption that signed overflow
15959 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15960 change *STRICT_OVERFLOW_P. */
15963 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15965 bool sub_strict_overflow_p
;
15966 switch (TREE_CODE (t
))
15969 return !integer_zerop (t
);
15973 tree base
= TREE_OPERAND (t
, 0);
15974 if (!DECL_P (base
))
15975 base
= get_base_address (base
);
15980 /* Weak declarations may link to NULL. Other things may also be NULL
15981 so protect with -fdelete-null-pointer-checks; but not variables
15982 allocated on the stack. */
15984 && (flag_delete_null_pointer_checks
15985 || (DECL_CONTEXT (base
)
15986 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15987 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15988 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15990 /* Constants are never weak. */
15991 if (CONSTANT_CLASS_P (base
))
15998 sub_strict_overflow_p
= false;
15999 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16000 &sub_strict_overflow_p
)
16001 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16002 &sub_strict_overflow_p
))
16004 if (sub_strict_overflow_p
)
16005 *strict_overflow_p
= true;
16016 /* Return true when T is an address and is known to be nonzero.
16017 For floating point we further ensure that T is not denormal.
16018 Similar logic is present in nonzero_address in rtlanal.h.
16020 If the return value is based on the assumption that signed overflow
16021 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16022 change *STRICT_OVERFLOW_P. */
16025 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16027 tree type
= TREE_TYPE (t
);
16028 enum tree_code code
;
16030 /* Doing something useful for floating point would need more work. */
16031 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
16034 code
= TREE_CODE (t
);
16035 switch (TREE_CODE_CLASS (code
))
16038 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
16039 strict_overflow_p
);
16041 case tcc_comparison
:
16042 return tree_binary_nonzero_warnv_p (code
, type
,
16043 TREE_OPERAND (t
, 0),
16044 TREE_OPERAND (t
, 1),
16045 strict_overflow_p
);
16047 case tcc_declaration
:
16048 case tcc_reference
:
16049 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
16057 case TRUTH_NOT_EXPR
:
16058 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
16059 strict_overflow_p
);
16061 case TRUTH_AND_EXPR
:
16062 case TRUTH_OR_EXPR
:
16063 case TRUTH_XOR_EXPR
:
16064 return tree_binary_nonzero_warnv_p (code
, type
,
16065 TREE_OPERAND (t
, 0),
16066 TREE_OPERAND (t
, 1),
16067 strict_overflow_p
);
16074 case WITH_SIZE_EXPR
:
16076 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
16078 case COMPOUND_EXPR
:
16081 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16082 strict_overflow_p
);
16085 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
16086 strict_overflow_p
);
16089 return alloca_call_p (t
);
16097 /* Return true when T is an address and is known to be nonzero.
16098 Handle warnings about undefined signed overflow. */
16101 tree_expr_nonzero_p (tree t
)
16103 bool ret
, strict_overflow_p
;
16105 strict_overflow_p
= false;
16106 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
16107 if (strict_overflow_p
)
16108 fold_overflow_warning (("assuming signed overflow does not occur when "
16109 "determining that expression is always "
16111 WARN_STRICT_OVERFLOW_MISC
);
16115 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16116 attempt to fold the expression to a constant without modifying TYPE,
16119 If the expression could be simplified to a constant, then return
16120 the constant. If the expression would not be simplified to a
16121 constant, then return NULL_TREE. */
16124 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16126 tree tem
= fold_binary (code
, type
, op0
, op1
);
16127 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16130 /* Given the components of a unary expression CODE, TYPE and OP0,
16131 attempt to fold the expression to a constant without modifying
16134 If the expression could be simplified to a constant, then return
16135 the constant. If the expression would not be simplified to a
16136 constant, then return NULL_TREE. */
16139 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16141 tree tem
= fold_unary (code
, type
, op0
);
16142 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16145 /* If EXP represents referencing an element in a constant string
16146 (either via pointer arithmetic or array indexing), return the
16147 tree representing the value accessed, otherwise return NULL. */
16150 fold_read_from_constant_string (tree exp
)
16152 if ((TREE_CODE (exp
) == INDIRECT_REF
16153 || TREE_CODE (exp
) == ARRAY_REF
)
16154 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16156 tree exp1
= TREE_OPERAND (exp
, 0);
16159 location_t loc
= EXPR_LOCATION (exp
);
16161 if (TREE_CODE (exp
) == INDIRECT_REF
)
16162 string
= string_constant (exp1
, &index
);
16165 tree low_bound
= array_ref_low_bound (exp
);
16166 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16168 /* Optimize the special-case of a zero lower bound.
16170 We convert the low_bound to sizetype to avoid some problems
16171 with constant folding. (E.g. suppose the lower bound is 1,
16172 and its mode is QI. Without the conversion,l (ARRAY
16173 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16174 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16175 if (! integer_zerop (low_bound
))
16176 index
= size_diffop_loc (loc
, index
,
16177 fold_convert_loc (loc
, sizetype
, low_bound
));
16183 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16184 && TREE_CODE (string
) == STRING_CST
16185 && TREE_CODE (index
) == INTEGER_CST
16186 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16187 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16189 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16190 return build_int_cst_type (TREE_TYPE (exp
),
16191 (TREE_STRING_POINTER (string
)
16192 [TREE_INT_CST_LOW (index
)]));
16197 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16198 an integer constant, real, or fixed-point constant.
16200 TYPE is the type of the result. */
16203 fold_negate_const (tree arg0
, tree type
)
16205 tree t
= NULL_TREE
;
16207 switch (TREE_CODE (arg0
))
16211 double_int val
= tree_to_double_int (arg0
);
16213 val
= val
.neg_with_overflow (&overflow
);
16214 t
= force_fit_type_double (type
, val
, 1,
16215 (overflow
| TREE_OVERFLOW (arg0
))
16216 && !TYPE_UNSIGNED (type
));
16221 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16226 FIXED_VALUE_TYPE f
;
16227 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16228 &(TREE_FIXED_CST (arg0
)), NULL
,
16229 TYPE_SATURATING (type
));
16230 t
= build_fixed (type
, f
);
16231 /* Propagate overflow flags. */
16232 if (overflow_p
| TREE_OVERFLOW (arg0
))
16233 TREE_OVERFLOW (t
) = 1;
16238 gcc_unreachable ();
16244 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16245 an integer constant or real constant.
16247 TYPE is the type of the result. */
16250 fold_abs_const (tree arg0
, tree type
)
16252 tree t
= NULL_TREE
;
16254 switch (TREE_CODE (arg0
))
16258 double_int val
= tree_to_double_int (arg0
);
16260 /* If the value is unsigned or non-negative, then the absolute value
16261 is the same as the ordinary value. */
16262 if (TYPE_UNSIGNED (type
)
16263 || !val
.is_negative ())
16266 /* If the value is negative, then the absolute value is
16271 val
= val
.neg_with_overflow (&overflow
);
16272 t
= force_fit_type_double (type
, val
, -1,
16273 overflow
| TREE_OVERFLOW (arg0
));
16279 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16280 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16286 gcc_unreachable ();
16292 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16293 constant. TYPE is the type of the result. */
16296 fold_not_const (const_tree arg0
, tree type
)
16300 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16302 val
= ~tree_to_double_int (arg0
);
16303 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16306 /* Given CODE, a relational operator, the target type, TYPE and two
16307 constant operands OP0 and OP1, return the result of the
16308 relational operation. If the result is not a compile time
16309 constant, then return NULL_TREE. */
16312 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16314 int result
, invert
;
16316 /* From here on, the only cases we handle are when the result is
16317 known to be a constant. */
16319 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16321 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16322 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16324 /* Handle the cases where either operand is a NaN. */
16325 if (real_isnan (c0
) || real_isnan (c1
))
16335 case UNORDERED_EXPR
:
16349 if (flag_trapping_math
)
16355 gcc_unreachable ();
16358 return constant_boolean_node (result
, type
);
16361 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16364 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16366 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16367 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16368 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16371 /* Handle equality/inequality of complex constants. */
16372 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16374 tree rcond
= fold_relational_const (code
, type
,
16375 TREE_REALPART (op0
),
16376 TREE_REALPART (op1
));
16377 tree icond
= fold_relational_const (code
, type
,
16378 TREE_IMAGPART (op0
),
16379 TREE_IMAGPART (op1
));
16380 if (code
== EQ_EXPR
)
16381 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16382 else if (code
== NE_EXPR
)
16383 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16388 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16390 unsigned count
= VECTOR_CST_NELTS (op0
);
16391 tree
*elts
= XALLOCAVEC (tree
, count
);
16392 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16393 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16395 for (unsigned i
= 0; i
< count
; i
++)
16397 tree elem_type
= TREE_TYPE (type
);
16398 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16399 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16401 tree tem
= fold_relational_const (code
, elem_type
,
16404 if (tem
== NULL_TREE
)
16407 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16410 return build_vector (type
, elts
);
16413 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16415 To compute GT, swap the arguments and do LT.
16416 To compute GE, do LT and invert the result.
16417 To compute LE, swap the arguments, do LT and invert the result.
16418 To compute NE, do EQ and invert the result.
16420 Therefore, the code below must handle only EQ and LT. */
16422 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16427 code
= swap_tree_comparison (code
);
16430 /* Note that it is safe to invert for real values here because we
16431 have already handled the one case that it matters. */
16434 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16437 code
= invert_tree_comparison (code
, false);
16440 /* Compute a result for LT or EQ if args permit;
16441 Otherwise return T. */
16442 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16444 if (code
== EQ_EXPR
)
16445 result
= tree_int_cst_equal (op0
, op1
);
16446 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16447 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16449 result
= INT_CST_LT (op0
, op1
);
16456 return constant_boolean_node (result
, type
);
16459 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16460 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16464 fold_build_cleanup_point_expr (tree type
, tree expr
)
16466 /* If the expression does not have side effects then we don't have to wrap
16467 it with a cleanup point expression. */
16468 if (!TREE_SIDE_EFFECTS (expr
))
16471 /* If the expression is a return, check to see if the expression inside the
16472 return has no side effects or the right hand side of the modify expression
16473 inside the return. If either don't have side effects set we don't need to
16474 wrap the expression in a cleanup point expression. Note we don't check the
16475 left hand side of the modify because it should always be a return decl. */
16476 if (TREE_CODE (expr
) == RETURN_EXPR
)
16478 tree op
= TREE_OPERAND (expr
, 0);
16479 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16481 op
= TREE_OPERAND (op
, 1);
16482 if (!TREE_SIDE_EFFECTS (op
))
16486 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16489 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16490 of an indirection through OP0, or NULL_TREE if no simplification is
16494 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16500 subtype
= TREE_TYPE (sub
);
16501 if (!POINTER_TYPE_P (subtype
))
16504 if (TREE_CODE (sub
) == ADDR_EXPR
)
16506 tree op
= TREE_OPERAND (sub
, 0);
16507 tree optype
= TREE_TYPE (op
);
16508 /* *&CONST_DECL -> to the value of the const decl. */
16509 if (TREE_CODE (op
) == CONST_DECL
)
16510 return DECL_INITIAL (op
);
16511 /* *&p => p; make sure to handle *&"str"[cst] here. */
16512 if (type
== optype
)
16514 tree fop
= fold_read_from_constant_string (op
);
16520 /* *(foo *)&fooarray => fooarray[0] */
16521 else if (TREE_CODE (optype
) == ARRAY_TYPE
16522 && type
== TREE_TYPE (optype
)
16523 && (!in_gimple_form
16524 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16526 tree type_domain
= TYPE_DOMAIN (optype
);
16527 tree min_val
= size_zero_node
;
16528 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16529 min_val
= TYPE_MIN_VALUE (type_domain
);
16531 && TREE_CODE (min_val
) != INTEGER_CST
)
16533 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16534 NULL_TREE
, NULL_TREE
);
16536 /* *(foo *)&complexfoo => __real__ complexfoo */
16537 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16538 && type
== TREE_TYPE (optype
))
16539 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16540 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16541 else if (TREE_CODE (optype
) == VECTOR_TYPE
16542 && type
== TREE_TYPE (optype
))
16544 tree part_width
= TYPE_SIZE (type
);
16545 tree index
= bitsize_int (0);
16546 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16550 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16551 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16553 tree op00
= TREE_OPERAND (sub
, 0);
16554 tree op01
= TREE_OPERAND (sub
, 1);
16557 if (TREE_CODE (op00
) == ADDR_EXPR
)
16560 op00
= TREE_OPERAND (op00
, 0);
16561 op00type
= TREE_TYPE (op00
);
16563 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16564 if (TREE_CODE (op00type
) == VECTOR_TYPE
16565 && type
== TREE_TYPE (op00type
))
16567 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16568 tree part_width
= TYPE_SIZE (type
);
16569 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16570 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16571 tree index
= bitsize_int (indexi
);
16573 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16574 return fold_build3_loc (loc
,
16575 BIT_FIELD_REF
, type
, op00
,
16576 part_width
, index
);
16579 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16580 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16581 && type
== TREE_TYPE (op00type
))
16583 tree size
= TYPE_SIZE_UNIT (type
);
16584 if (tree_int_cst_equal (size
, op01
))
16585 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16587 /* ((foo *)&fooarray)[1] => fooarray[1] */
16588 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16589 && type
== TREE_TYPE (op00type
))
16591 tree type_domain
= TYPE_DOMAIN (op00type
);
16592 tree min_val
= size_zero_node
;
16593 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16594 min_val
= TYPE_MIN_VALUE (type_domain
);
16595 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16596 TYPE_SIZE_UNIT (type
));
16597 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16598 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16599 NULL_TREE
, NULL_TREE
);
16604 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16605 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16606 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16607 && (!in_gimple_form
16608 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16611 tree min_val
= size_zero_node
;
16612 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16613 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16614 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16615 min_val
= TYPE_MIN_VALUE (type_domain
);
16617 && TREE_CODE (min_val
) != INTEGER_CST
)
16619 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16626 /* Builds an expression for an indirection through T, simplifying some
16630 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16632 tree type
= TREE_TYPE (TREE_TYPE (t
));
16633 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16638 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16641 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16644 fold_indirect_ref_loc (location_t loc
, tree t
)
16646 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16654 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16655 whose result is ignored. The type of the returned tree need not be
16656 the same as the original expression. */
16659 fold_ignored_result (tree t
)
16661 if (!TREE_SIDE_EFFECTS (t
))
16662 return integer_zero_node
;
16665 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16668 t
= TREE_OPERAND (t
, 0);
16672 case tcc_comparison
:
16673 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16674 t
= TREE_OPERAND (t
, 0);
16675 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16676 t
= TREE_OPERAND (t
, 1);
16681 case tcc_expression
:
16682 switch (TREE_CODE (t
))
16684 case COMPOUND_EXPR
:
16685 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16687 t
= TREE_OPERAND (t
, 0);
16691 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16692 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16694 t
= TREE_OPERAND (t
, 0);
16707 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16708 This can only be applied to objects of a sizetype. */
16711 round_up_loc (location_t loc
, tree value
, int divisor
)
16713 tree div
= NULL_TREE
;
16715 gcc_assert (divisor
> 0);
16719 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16720 have to do anything. Only do this when we are not given a const,
16721 because in that case, this check is more expensive than just
16723 if (TREE_CODE (value
) != INTEGER_CST
)
16725 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16727 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16731 /* If divisor is a power of two, simplify this to bit manipulation. */
16732 if (divisor
== (divisor
& -divisor
))
16734 if (TREE_CODE (value
) == INTEGER_CST
)
16736 double_int val
= tree_to_double_int (value
);
16739 if ((val
.low
& (divisor
- 1)) == 0)
16742 overflow_p
= TREE_OVERFLOW (value
);
16743 val
.low
&= ~(divisor
- 1);
16744 val
.low
+= divisor
;
16752 return force_fit_type_double (TREE_TYPE (value
), val
,
16759 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16760 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16761 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16762 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16768 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16769 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16770 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16776 /* Likewise, but round down. */
16779 round_down_loc (location_t loc
, tree value
, int divisor
)
16781 tree div
= NULL_TREE
;
16783 gcc_assert (divisor
> 0);
16787 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16788 have to do anything. Only do this when we are not given a const,
16789 because in that case, this check is more expensive than just
16791 if (TREE_CODE (value
) != INTEGER_CST
)
16793 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16795 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16799 /* If divisor is a power of two, simplify this to bit manipulation. */
16800 if (divisor
== (divisor
& -divisor
))
16804 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16805 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16810 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16811 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16812 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16818 /* Returns the pointer to the base of the object addressed by EXP and
16819 extracts the information about the offset of the access, storing it
16820 to PBITPOS and POFFSET. */
16823 split_address_to_core_and_offset (tree exp
,
16824 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16827 enum machine_mode mode
;
16828 int unsignedp
, volatilep
;
16829 HOST_WIDE_INT bitsize
;
16830 location_t loc
= EXPR_LOCATION (exp
);
16832 if (TREE_CODE (exp
) == ADDR_EXPR
)
16834 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16835 poffset
, &mode
, &unsignedp
, &volatilep
,
16837 core
= build_fold_addr_expr_loc (loc
, core
);
16843 *poffset
= NULL_TREE
;
16849 /* Returns true if addresses of E1 and E2 differ by a constant, false
16850 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16853 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16856 HOST_WIDE_INT bitpos1
, bitpos2
;
16857 tree toffset1
, toffset2
, tdiff
, type
;
16859 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16860 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16862 if (bitpos1
% BITS_PER_UNIT
!= 0
16863 || bitpos2
% BITS_PER_UNIT
!= 0
16864 || !operand_equal_p (core1
, core2
, 0))
16867 if (toffset1
&& toffset2
)
16869 type
= TREE_TYPE (toffset1
);
16870 if (type
!= TREE_TYPE (toffset2
))
16871 toffset2
= fold_convert (type
, toffset2
);
16873 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16874 if (!cst_and_fits_in_hwi (tdiff
))
16877 *diff
= int_cst_value (tdiff
);
16879 else if (toffset1
|| toffset2
)
16881 /* If only one of the offsets is non-constant, the difference cannot
16888 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16892 /* Simplify the floating point expression EXP when the sign of the
16893 result is not significant. Return NULL_TREE if no simplification
16897 fold_strip_sign_ops (tree exp
)
16900 location_t loc
= EXPR_LOCATION (exp
);
16902 switch (TREE_CODE (exp
))
16906 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16907 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16911 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16913 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16914 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16915 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16916 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16917 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16918 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16921 case COMPOUND_EXPR
:
16922 arg0
= TREE_OPERAND (exp
, 0);
16923 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16925 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16929 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16930 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16932 return fold_build3_loc (loc
,
16933 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16934 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16935 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16940 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16943 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16944 /* Strip copysign function call, return the 1st argument. */
16945 arg0
= CALL_EXPR_ARG (exp
, 0);
16946 arg1
= CALL_EXPR_ARG (exp
, 1);
16947 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16950 /* Strip sign ops from the argument of "odd" math functions. */
16951 if (negate_mathfn_p (fcode
))
16953 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16955 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);