1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "diagnostic-core.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
65 int folding_initializer
= 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code
{
89 static bool negate_mathfn_p (enum built_in_function
);
90 static bool negate_expr_p (tree
);
91 static tree
negate_expr (tree
);
92 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
93 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
94 static tree
const_binop (enum tree_code
, tree
, tree
);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
101 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (location_t
, tree
, tree
,
103 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
104 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
106 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (const_tree
, int);
111 static tree
sign_bit_p (tree
, const_tree
);
112 static int simple_operand_p (const_tree
);
113 static bool simple_operand_p_2 (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
range_predecessor (tree
);
116 static tree
range_successor (tree
);
117 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
118 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
119 static tree
unextend (tree
, int, int, tree
);
120 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
122 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
123 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
124 static tree
fold_binary_op_with_conditional_arg (location_t
,
125 enum tree_code
, tree
,
128 static tree
fold_mathfn_compare (location_t
,
129 enum built_in_function
, enum tree_code
,
131 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
132 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
133 static bool reorder_operands_p (const_tree
, const_tree
);
134 static tree
fold_negate_const (tree
, tree
);
135 static tree
fold_not_const (const_tree
, tree
);
136 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
143 expr_location_or (tree t
, location_t loc
)
145 location_t tloc
= EXPR_LOCATION (t
);
146 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
153 protected_set_expr_location_unshare (tree x
, location_t loc
)
155 if (CAN_HAVE_LOCATION_P (x
)
156 && EXPR_LOCATION (x
) != loc
157 && !(TREE_CODE (x
) == SAVE_EXPR
158 || TREE_CODE (x
) == TARGET_EXPR
159 || TREE_CODE (x
) == BIND_EXPR
))
162 SET_EXPR_LOCATION (x
, loc
);
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
172 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
182 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
186 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
200 static int fold_deferring_overflow_warnings
;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning
;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings
;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
233 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
238 gcc_assert (fold_deferring_overflow_warnings
> 0);
239 --fold_deferring_overflow_warnings
;
240 if (fold_deferring_overflow_warnings
> 0)
242 if (fold_deferred_overflow_warning
!= NULL
244 && code
< (int) fold_deferred_overflow_code
)
245 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
249 warnmsg
= fold_deferred_overflow_warning
;
250 fold_deferred_overflow_warning
= NULL
;
252 if (!issue
|| warnmsg
== NULL
)
255 if (gimple_no_warning_p (stmt
))
258 /* Use the smallest code level when deciding to issue the
260 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
261 code
= fold_deferred_overflow_code
;
263 if (!issue_strict_overflow_warning (code
))
267 locus
= input_location
;
269 locus
= gimple_location (stmt
);
270 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
273 /* Stop deferring overflow warnings, ignoring any deferred
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL
, 0);
282 /* Whether we are deferring overflow warnings. */
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings
> 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
294 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
296 if (fold_deferring_overflow_warnings
> 0)
298 if (fold_deferred_overflow_warning
== NULL
299 || wc
< fold_deferred_overflow_code
)
301 fold_deferred_overflow_warning
= gmsgid
;
302 fold_deferred_overflow_code
= wc
;
305 else if (issue_strict_overflow_warning (wc
))
306 warning (OPT_Wstrict_overflow
, gmsgid
);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
313 negate_mathfn_p (enum built_in_function code
)
317 CASE_FLT_FN (BUILT_IN_ASIN
):
318 CASE_FLT_FN (BUILT_IN_ASINH
):
319 CASE_FLT_FN (BUILT_IN_ATAN
):
320 CASE_FLT_FN (BUILT_IN_ATANH
):
321 CASE_FLT_FN (BUILT_IN_CASIN
):
322 CASE_FLT_FN (BUILT_IN_CASINH
):
323 CASE_FLT_FN (BUILT_IN_CATAN
):
324 CASE_FLT_FN (BUILT_IN_CATANH
):
325 CASE_FLT_FN (BUILT_IN_CBRT
):
326 CASE_FLT_FN (BUILT_IN_CPROJ
):
327 CASE_FLT_FN (BUILT_IN_CSIN
):
328 CASE_FLT_FN (BUILT_IN_CSINH
):
329 CASE_FLT_FN (BUILT_IN_CTAN
):
330 CASE_FLT_FN (BUILT_IN_CTANH
):
331 CASE_FLT_FN (BUILT_IN_ERF
):
332 CASE_FLT_FN (BUILT_IN_LLROUND
):
333 CASE_FLT_FN (BUILT_IN_LROUND
):
334 CASE_FLT_FN (BUILT_IN_ROUND
):
335 CASE_FLT_FN (BUILT_IN_SIN
):
336 CASE_FLT_FN (BUILT_IN_SINH
):
337 CASE_FLT_FN (BUILT_IN_TAN
):
338 CASE_FLT_FN (BUILT_IN_TANH
):
339 CASE_FLT_FN (BUILT_IN_TRUNC
):
342 CASE_FLT_FN (BUILT_IN_LLRINT
):
343 CASE_FLT_FN (BUILT_IN_LRINT
):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
345 CASE_FLT_FN (BUILT_IN_RINT
):
346 return !flag_rounding_math
;
354 /* Check whether we may negate an integer constant T without causing
358 may_negate_without_overflow_p (const_tree t
)
360 unsigned HOST_WIDE_INT val
;
364 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
366 type
= TREE_TYPE (t
);
367 if (TYPE_UNSIGNED (type
))
370 prec
= TYPE_PRECISION (type
);
371 if (prec
> HOST_BITS_PER_WIDE_INT
)
373 if (TREE_INT_CST_LOW (t
) != 0)
375 prec
-= HOST_BITS_PER_WIDE_INT
;
376 val
= TREE_INT_CST_HIGH (t
);
379 val
= TREE_INT_CST_LOW (t
);
380 if (prec
< HOST_BITS_PER_WIDE_INT
)
381 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
382 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t
)
396 type
= TREE_TYPE (t
);
399 switch (TREE_CODE (t
))
402 if (TYPE_OVERFLOW_WRAPS (type
))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t
);
408 return (INTEGRAL_TYPE_P (type
)
409 && TYPE_OVERFLOW_WRAPS (type
));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
421 return negate_expr_p (TREE_REALPART (t
))
422 && negate_expr_p (TREE_IMAGPART (t
));
425 return negate_expr_p (TREE_OPERAND (t
, 0))
426 && negate_expr_p (TREE_OPERAND (t
, 1));
429 return negate_expr_p (TREE_OPERAND (t
, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t
, 1))
437 && reorder_operands_p (TREE_OPERAND (t
, 0),
438 TREE_OPERAND (t
, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t
, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
447 && reorder_operands_p (TREE_OPERAND (t
, 0),
448 TREE_OPERAND (t
, 1));
451 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
458 return negate_expr_p (TREE_OPERAND (t
, 1))
459 || negate_expr_p (TREE_OPERAND (t
, 0));
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type
) == REAL_TYPE
)
482 tree tem
= strip_float_extensions (t
);
484 return negate_expr_p (tem
);
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t
)))
491 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
498 tree op1
= TREE_OPERAND (t
, 1);
499 if (TREE_INT_CST_HIGH (op1
) == 0
500 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
501 == TREE_INT_CST_LOW (op1
))
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
518 fold_negate_expr (location_t loc
, tree t
)
520 tree type
= TREE_TYPE (t
);
523 switch (TREE_CODE (t
))
525 /* Convert - (~A) to A + 1. */
527 if (INTEGRAL_TYPE_P (type
))
528 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
529 build_int_cst (type
, 1));
533 tem
= fold_negate_const (t
, type
);
534 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
535 || !TYPE_OVERFLOW_TRAPS (type
))
540 tem
= fold_negate_const (t
, type
);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
547 tem
= fold_negate_const (t
, type
);
552 tree rpart
= negate_expr (TREE_REALPART (t
));
553 tree ipart
= negate_expr (TREE_IMAGPART (t
));
555 if ((TREE_CODE (rpart
) == REAL_CST
556 && TREE_CODE (ipart
) == REAL_CST
)
557 || (TREE_CODE (rpart
) == INTEGER_CST
558 && TREE_CODE (ipart
) == INTEGER_CST
))
559 return build_complex (type
, rpart
, ipart
);
564 if (negate_expr_p (t
))
565 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
566 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
567 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
571 if (negate_expr_p (t
))
572 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
573 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
577 return TREE_OPERAND (t
, 0);
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t
, 1))
585 && reorder_operands_p (TREE_OPERAND (t
, 0),
586 TREE_OPERAND (t
, 1)))
588 tem
= negate_expr (TREE_OPERAND (t
, 1));
589 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
590 tem
, TREE_OPERAND (t
, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t
, 0)))
596 tem
= negate_expr (TREE_OPERAND (t
, 0));
597 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
598 tem
, TREE_OPERAND (t
, 1));
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
607 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
608 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
609 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
613 if (TYPE_UNSIGNED (type
))
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
621 tem
= TREE_OPERAND (t
, 1);
622 if (negate_expr_p (tem
))
623 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
624 TREE_OPERAND (t
, 0), negate_expr (tem
));
625 tem
= TREE_OPERAND (t
, 0);
626 if (negate_expr_p (tem
))
627 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
628 negate_expr (tem
), TREE_OPERAND (t
, 1));
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
642 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
644 const char * const warnmsg
= G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem
= TREE_OPERAND (t
, 1);
647 if (negate_expr_p (tem
))
649 if (INTEGRAL_TYPE_P (type
)
650 && (TREE_CODE (tem
) != INTEGER_CST
651 || integer_onep (tem
)))
652 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
653 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
654 TREE_OPERAND (t
, 0), negate_expr (tem
));
656 tem
= TREE_OPERAND (t
, 0);
657 if (negate_expr_p (tem
))
659 if (INTEGRAL_TYPE_P (type
)
660 && (TREE_CODE (tem
) != INTEGER_CST
661 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
662 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
663 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
664 negate_expr (tem
), TREE_OPERAND (t
, 1));
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type
) == REAL_TYPE
)
673 tem
= strip_float_extensions (t
);
674 if (tem
!= t
&& negate_expr_p (tem
))
675 return fold_convert_loc (loc
, type
, negate_expr (tem
));
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t
))
682 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
686 fndecl
= get_callee_fndecl (t
);
687 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
688 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
696 tree op1
= TREE_OPERAND (t
, 1);
697 if (TREE_INT_CST_HIGH (op1
) == 0
698 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
699 == TREE_INT_CST_LOW (op1
))
701 tree ntype
= TYPE_UNSIGNED (type
)
702 ? signed_type_for (type
)
703 : unsigned_type_for (type
);
704 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
705 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
706 return fold_convert_loc (loc
, type
, temp
);
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 loc
= EXPR_LOCATION (t
);
732 type
= TREE_TYPE (t
);
735 tem
= fold_negate_expr (loc
, t
);
737 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
738 return fold_convert_loc (loc
, type
, tem
);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
762 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
763 tree
*minus_litp
, int negate_p
)
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in
);
774 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
775 || TREE_CODE (in
) == FIXED_CST
)
777 else if (TREE_CODE (in
) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
785 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
787 tree op0
= TREE_OPERAND (in
, 0);
788 tree op1
= TREE_OPERAND (in
, 1);
789 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
790 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
794 || TREE_CODE (op0
) == FIXED_CST
)
795 *litp
= op0
, op0
= 0;
796 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
797 || TREE_CODE (op1
) == FIXED_CST
)
798 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
800 if (op0
!= 0 && TREE_CONSTANT (op0
))
801 *conp
= op0
, op0
= 0;
802 else if (op1
!= 0 && TREE_CONSTANT (op1
))
803 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0
!= 0 && op1
!= 0)
812 var
= op1
, neg_var_p
= neg1_p
;
814 /* Now do any needed negations. */
816 *minus_litp
= *litp
, *litp
= 0;
818 *conp
= negate_expr (*conp
);
820 var
= negate_expr (var
);
822 else if (TREE_CODE (in
) == BIT_NOT_EXPR
823 && code
== PLUS_EXPR
)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp
= build_one_cst (TREE_TYPE (in
));
827 var
= negate_expr (TREE_OPERAND (in
, 0));
829 else if (TREE_CONSTANT (in
))
837 *minus_litp
= *litp
, *litp
= 0;
838 else if (*minus_litp
)
839 *litp
= *minus_litp
, *minus_litp
= 0;
840 *conp
= negate_expr (*conp
);
841 var
= negate_expr (var
);
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
853 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
864 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
866 if (code
== PLUS_EXPR
)
868 if (TREE_CODE (t1
) == NEGATE_EXPR
)
869 return build2_loc (loc
, MINUS_EXPR
, type
,
870 fold_convert_loc (loc
, type
, t2
),
871 fold_convert_loc (loc
, type
,
872 TREE_OPERAND (t1
, 0)));
873 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
874 return build2_loc (loc
, MINUS_EXPR
, type
,
875 fold_convert_loc (loc
, type
, t1
),
876 fold_convert_loc (loc
, type
,
877 TREE_OPERAND (t2
, 0)));
878 else if (integer_zerop (t2
))
879 return fold_convert_loc (loc
, type
, t1
);
881 else if (code
== MINUS_EXPR
)
883 if (integer_zerop (t2
))
884 return fold_convert_loc (loc
, type
, t1
);
887 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
888 fold_convert_loc (loc
, type
, t2
));
891 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
892 fold_convert_loc (loc
, type
, t2
));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
899 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
901 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
903 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
918 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
919 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
920 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
929 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
932 double_int op1
, op2
, res
, tmp
;
934 tree type
= TREE_TYPE (arg1
);
935 bool uns
= TYPE_UNSIGNED (type
);
936 bool overflow
= false;
938 op1
= tree_to_double_int (arg1
);
939 op2
= tree_to_double_int (arg2
);
956 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
967 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
971 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
975 res
= op1
.add_with_sign (op2
, false, &overflow
);
979 res
= op1
.sub_with_overflow (op2
, &overflow
);
983 res
= op1
.mul_with_sign (op2
, false, &overflow
);
986 case MULT_HIGHPART_EXPR
:
987 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
990 if (TYPE_PRECISION (type
) != 2 * HOST_BITS_PER_WIDE_INT
)
992 op1
.wide_mul_with_sign (op2
, uns
, &res
, &dummy_overflow
);
997 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
998 is performed in twice the precision of arguments. */
999 tmp
= op1
.mul_with_sign (op2
, false, &dummy_overflow
);
1000 res
= tmp
.rshift (TYPE_PRECISION (type
),
1001 2 * TYPE_PRECISION (type
), !uns
);
1005 case TRUNC_DIV_EXPR
:
1006 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1007 case EXACT_DIV_EXPR
:
1008 /* This is a shortcut for a common special case. */
1009 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1010 && !TREE_OVERFLOW (arg1
)
1011 && !TREE_OVERFLOW (arg2
)
1012 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1014 if (code
== CEIL_DIV_EXPR
)
1015 op1
.low
+= op2
.low
- 1;
1017 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1021 /* ... fall through ... */
1023 case ROUND_DIV_EXPR
:
1031 if (op1
== op2
&& !op1
.is_zero ())
1033 res
= double_int_one
;
1036 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1039 case TRUNC_MOD_EXPR
:
1040 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1041 /* This is a shortcut for a common special case. */
1042 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1043 && !TREE_OVERFLOW (arg1
)
1044 && !TREE_OVERFLOW (arg2
)
1045 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1047 if (code
== CEIL_MOD_EXPR
)
1048 op1
.low
+= op2
.low
- 1;
1049 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1053 /* ... fall through ... */
1055 case ROUND_MOD_EXPR
:
1058 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1062 res
= op1
.min (op2
, uns
);
1066 res
= op1
.max (op2
, uns
);
1073 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1075 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1081 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1083 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1086 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1087 constant. We assume ARG1 and ARG2 have the same data type, or at least
1088 are the same kind of constant and the same machine mode. Return zero if
1089 combining the constants is not allowed in the current operating mode. */
1092 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1094 /* Sanity check for the recursive cases. */
1101 if (TREE_CODE (arg1
) == INTEGER_CST
)
1102 return int_const_binop (code
, arg1
, arg2
);
1104 if (TREE_CODE (arg1
) == REAL_CST
)
1106 enum machine_mode mode
;
1109 REAL_VALUE_TYPE value
;
1110 REAL_VALUE_TYPE result
;
1114 /* The following codes are handled by real_arithmetic. */
1129 d1
= TREE_REAL_CST (arg1
);
1130 d2
= TREE_REAL_CST (arg2
);
1132 type
= TREE_TYPE (arg1
);
1133 mode
= TYPE_MODE (type
);
1135 /* Don't perform operation if we honor signaling NaNs and
1136 either operand is a NaN. */
1137 if (HONOR_SNANS (mode
)
1138 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1141 /* Don't perform operation if it would raise a division
1142 by zero exception. */
1143 if (code
== RDIV_EXPR
1144 && REAL_VALUES_EQUAL (d2
, dconst0
)
1145 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1148 /* If either operand is a NaN, just return it. Otherwise, set up
1149 for floating-point trap; we return an overflow. */
1150 if (REAL_VALUE_ISNAN (d1
))
1152 else if (REAL_VALUE_ISNAN (d2
))
1155 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1156 real_convert (&result
, mode
, &value
);
1158 /* Don't constant fold this floating point operation if
1159 the result has overflowed and flag_trapping_math. */
1160 if (flag_trapping_math
1161 && MODE_HAS_INFINITIES (mode
)
1162 && REAL_VALUE_ISINF (result
)
1163 && !REAL_VALUE_ISINF (d1
)
1164 && !REAL_VALUE_ISINF (d2
))
1167 /* Don't constant fold this floating point operation if the
1168 result may dependent upon the run-time rounding mode and
1169 flag_rounding_math is set, or if GCC's software emulation
1170 is unable to accurately represent the result. */
1171 if ((flag_rounding_math
1172 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1173 && (inexact
|| !real_identical (&result
, &value
)))
1176 t
= build_real (type
, result
);
1178 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1182 if (TREE_CODE (arg1
) == FIXED_CST
)
1184 FIXED_VALUE_TYPE f1
;
1185 FIXED_VALUE_TYPE f2
;
1186 FIXED_VALUE_TYPE result
;
1191 /* The following codes are handled by fixed_arithmetic. */
1197 case TRUNC_DIV_EXPR
:
1198 f2
= TREE_FIXED_CST (arg2
);
1203 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1204 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1212 f1
= TREE_FIXED_CST (arg1
);
1213 type
= TREE_TYPE (arg1
);
1214 sat_p
= TYPE_SATURATING (type
);
1215 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1216 t
= build_fixed (type
, result
);
1217 /* Propagate overflow flags. */
1218 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1219 TREE_OVERFLOW (t
) = 1;
1223 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1225 tree type
= TREE_TYPE (arg1
);
1226 tree r1
= TREE_REALPART (arg1
);
1227 tree i1
= TREE_IMAGPART (arg1
);
1228 tree r2
= TREE_REALPART (arg2
);
1229 tree i2
= TREE_IMAGPART (arg2
);
1236 real
= const_binop (code
, r1
, r2
);
1237 imag
= const_binop (code
, i1
, i2
);
1241 if (COMPLEX_FLOAT_TYPE_P (type
))
1242 return do_mpc_arg2 (arg1
, arg2
, type
,
1243 /* do_nonfinite= */ folding_initializer
,
1246 real
= const_binop (MINUS_EXPR
,
1247 const_binop (MULT_EXPR
, r1
, r2
),
1248 const_binop (MULT_EXPR
, i1
, i2
));
1249 imag
= const_binop (PLUS_EXPR
,
1250 const_binop (MULT_EXPR
, r1
, i2
),
1251 const_binop (MULT_EXPR
, i1
, r2
));
1255 if (COMPLEX_FLOAT_TYPE_P (type
))
1256 return do_mpc_arg2 (arg1
, arg2
, type
,
1257 /* do_nonfinite= */ folding_initializer
,
1260 case TRUNC_DIV_EXPR
:
1262 case FLOOR_DIV_EXPR
:
1263 case ROUND_DIV_EXPR
:
1264 if (flag_complex_method
== 0)
1266 /* Keep this algorithm in sync with
1267 tree-complex.c:expand_complex_div_straight().
1269 Expand complex division to scalars, straightforward algorithm.
1270 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1274 = const_binop (PLUS_EXPR
,
1275 const_binop (MULT_EXPR
, r2
, r2
),
1276 const_binop (MULT_EXPR
, i2
, i2
));
1278 = const_binop (PLUS_EXPR
,
1279 const_binop (MULT_EXPR
, r1
, r2
),
1280 const_binop (MULT_EXPR
, i1
, i2
));
1282 = const_binop (MINUS_EXPR
,
1283 const_binop (MULT_EXPR
, i1
, r2
),
1284 const_binop (MULT_EXPR
, r1
, i2
));
1286 real
= const_binop (code
, t1
, magsquared
);
1287 imag
= const_binop (code
, t2
, magsquared
);
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_wide().
1294 Expand complex division to scalars, modified algorithm to minimize
1295 overflow with wide input ranges. */
1296 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1297 fold_abs_const (r2
, TREE_TYPE (type
)),
1298 fold_abs_const (i2
, TREE_TYPE (type
)));
1300 if (integer_nonzerop (compare
))
1302 /* In the TRUE branch, we compute
1304 div = (br * ratio) + bi;
1305 tr = (ar * ratio) + ai;
1306 ti = (ai * ratio) - ar;
1309 tree ratio
= const_binop (code
, r2
, i2
);
1310 tree div
= const_binop (PLUS_EXPR
, i2
,
1311 const_binop (MULT_EXPR
, r2
, ratio
));
1312 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1313 real
= const_binop (PLUS_EXPR
, real
, i1
);
1314 real
= const_binop (code
, real
, div
);
1316 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1317 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1318 imag
= const_binop (code
, imag
, div
);
1322 /* In the FALSE branch, we compute
1324 divisor = (d * ratio) + c;
1325 tr = (b * ratio) + a;
1326 ti = b - (a * ratio);
1329 tree ratio
= const_binop (code
, i2
, r2
);
1330 tree div
= const_binop (PLUS_EXPR
, r2
,
1331 const_binop (MULT_EXPR
, i2
, ratio
));
1333 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1334 real
= const_binop (PLUS_EXPR
, real
, r1
);
1335 real
= const_binop (code
, real
, div
);
1337 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1338 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1339 imag
= const_binop (code
, imag
, div
);
1349 return build_complex (type
, real
, imag
);
1352 if (TREE_CODE (arg1
) == VECTOR_CST
1353 && TREE_CODE (arg2
) == VECTOR_CST
)
1355 tree type
= TREE_TYPE (arg1
);
1356 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1357 tree
*elts
= XALLOCAVEC (tree
, count
);
1359 for (i
= 0; i
< count
; i
++)
1361 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1362 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1364 elts
[i
] = const_binop (code
, elem1
, elem2
);
1366 /* It is possible that const_binop cannot handle the given
1367 code and return NULL_TREE */
1368 if (elts
[i
] == NULL_TREE
)
1372 return build_vector (type
, elts
);
1375 /* Shifts allow a scalar offset for a vector. */
1376 if (TREE_CODE (arg1
) == VECTOR_CST
1377 && TREE_CODE (arg2
) == INTEGER_CST
)
1379 tree type
= TREE_TYPE (arg1
);
1380 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1381 tree
*elts
= XALLOCAVEC (tree
, count
);
1383 if (code
== VEC_LSHIFT_EXPR
1384 || code
== VEC_RSHIFT_EXPR
)
1386 if (!host_integerp (arg2
, 1))
1389 unsigned HOST_WIDE_INT shiftc
= tree_low_cst (arg2
, 1);
1390 unsigned HOST_WIDE_INT outerc
= tree_low_cst (TYPE_SIZE (type
), 1);
1391 unsigned HOST_WIDE_INT innerc
1392 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type
)), 1);
1393 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1395 int offset
= shiftc
/ innerc
;
1396 if (code
== VEC_LSHIFT_EXPR
)
1398 tree zero
= build_zero_cst (TREE_TYPE (type
));
1399 for (i
= 0; i
< count
; i
++)
1401 if (i
+ offset
< 0 || i
+ offset
>= count
)
1404 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1408 for (i
= 0; i
< count
; i
++)
1410 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1412 elts
[i
] = const_binop (code
, elem1
, arg2
);
1414 /* It is possible that const_binop cannot handle the given
1415 code and return NULL_TREE */
1416 if (elts
[i
] == NULL_TREE
)
1420 return build_vector (type
, elts
);
1425 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1426 indicates which particular sizetype to create. */
1429 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1431 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1434 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1435 is a tree code. The type of the result is taken from the operands.
1436 Both must be equivalent integer types, ala int_binop_types_match_p.
1437 If the operands are constant, so is the result. */
1440 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1442 tree type
= TREE_TYPE (arg0
);
1444 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1445 return error_mark_node
;
1447 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1450 /* Handle the special case of two integer constants faster. */
1451 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1453 /* And some specific cases even faster than that. */
1454 if (code
== PLUS_EXPR
)
1456 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1458 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1461 else if (code
== MINUS_EXPR
)
1463 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1466 else if (code
== MULT_EXPR
)
1468 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1472 /* Handle general case of two integer constants. For sizetype
1473 constant calculations we always want to know about overflow,
1474 even in the unsigned case. */
1475 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1478 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1481 /* Given two values, either both of sizetype or both of bitsizetype,
1482 compute the difference between the two values. Return the value
1483 in signed type corresponding to the type of the operands. */
1486 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1488 tree type
= TREE_TYPE (arg0
);
1491 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1494 /* If the type is already signed, just do the simple thing. */
1495 if (!TYPE_UNSIGNED (type
))
1496 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1498 if (type
== sizetype
)
1500 else if (type
== bitsizetype
)
1501 ctype
= sbitsizetype
;
1503 ctype
= signed_type_for (type
);
1505 /* If either operand is not a constant, do the conversions to the signed
1506 type and subtract. The hardware will do the right thing with any
1507 overflow in the subtraction. */
1508 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1509 return size_binop_loc (loc
, MINUS_EXPR
,
1510 fold_convert_loc (loc
, ctype
, arg0
),
1511 fold_convert_loc (loc
, ctype
, arg1
));
1513 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1514 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1515 overflow) and negate (which can't either). Special-case a result
1516 of zero while we're here. */
1517 if (tree_int_cst_equal (arg0
, arg1
))
1518 return build_int_cst (ctype
, 0);
1519 else if (tree_int_cst_lt (arg1
, arg0
))
1520 return fold_convert_loc (loc
, ctype
,
1521 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1523 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1524 fold_convert_loc (loc
, ctype
,
1525 size_binop_loc (loc
,
1530 /* A subroutine of fold_convert_const handling conversions of an
1531 INTEGER_CST to another integer type. */
1534 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1538 /* Given an integer constant, make new constant with new type,
1539 appropriately sign-extended or truncated. */
1540 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1541 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1542 (TREE_INT_CST_HIGH (arg1
) < 0
1543 && (TYPE_UNSIGNED (type
)
1544 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1545 | TREE_OVERFLOW (arg1
));
1550 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1551 to an integer type. */
1554 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1559 /* The following code implements the floating point to integer
1560 conversion rules required by the Java Language Specification,
1561 that IEEE NaNs are mapped to zero and values that overflow
1562 the target precision saturate, i.e. values greater than
1563 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1564 are mapped to INT_MIN. These semantics are allowed by the
1565 C and C++ standards that simply state that the behavior of
1566 FP-to-integer conversion is unspecified upon overflow. */
1570 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1574 case FIX_TRUNC_EXPR
:
1575 real_trunc (&r
, VOIDmode
, &x
);
1582 /* If R is NaN, return zero and show we have an overflow. */
1583 if (REAL_VALUE_ISNAN (r
))
1586 val
= double_int_zero
;
1589 /* See if R is less than the lower bound or greater than the
1594 tree lt
= TYPE_MIN_VALUE (type
);
1595 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1596 if (REAL_VALUES_LESS (r
, l
))
1599 val
= tree_to_double_int (lt
);
1605 tree ut
= TYPE_MAX_VALUE (type
);
1608 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1609 if (REAL_VALUES_LESS (u
, r
))
1612 val
= tree_to_double_int (ut
);
1618 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1620 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1624 /* A subroutine of fold_convert_const handling conversions of a
1625 FIXED_CST to an integer type. */
1628 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1631 double_int temp
, temp_trunc
;
1634 /* Right shift FIXED_CST to temp by fbit. */
1635 temp
= TREE_FIXED_CST (arg1
).data
;
1636 mode
= TREE_FIXED_CST (arg1
).mode
;
1637 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1639 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1640 HOST_BITS_PER_DOUBLE_INT
,
1641 SIGNED_FIXED_POINT_MODE_P (mode
));
1643 /* Left shift temp to temp_trunc by fbit. */
1644 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1645 HOST_BITS_PER_DOUBLE_INT
,
1646 SIGNED_FIXED_POINT_MODE_P (mode
));
1650 temp
= double_int_zero
;
1651 temp_trunc
= double_int_zero
;
1654 /* If FIXED_CST is negative, we need to round the value toward 0.
1655 By checking if the fractional bits are not zero to add 1 to temp. */
1656 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1657 && temp_trunc
.is_negative ()
1658 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1659 temp
+= double_int_one
;
1661 /* Given a fixed-point constant, make new constant with new type,
1662 appropriately sign-extended or truncated. */
1663 t
= force_fit_type_double (type
, temp
, -1,
1664 (temp
.is_negative ()
1665 && (TYPE_UNSIGNED (type
)
1666 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1667 | TREE_OVERFLOW (arg1
));
1672 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1673 to another floating point type. */
1676 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1678 REAL_VALUE_TYPE value
;
1681 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1682 t
= build_real (type
, value
);
1684 /* If converting an infinity or NAN to a representation that doesn't
1685 have one, set the overflow bit so that we can produce some kind of
1686 error message at the appropriate point if necessary. It's not the
1687 most user-friendly message, but it's better than nothing. */
1688 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1689 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1690 TREE_OVERFLOW (t
) = 1;
1691 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1692 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1693 TREE_OVERFLOW (t
) = 1;
1694 /* Regular overflow, conversion produced an infinity in a mode that
1695 can't represent them. */
1696 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1697 && REAL_VALUE_ISINF (value
)
1698 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1699 TREE_OVERFLOW (t
) = 1;
1701 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1705 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1706 to a floating point type. */
1709 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1711 REAL_VALUE_TYPE value
;
1714 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1715 t
= build_real (type
, value
);
1717 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1721 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1722 to another fixed-point type. */
1725 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1727 FIXED_VALUE_TYPE value
;
1731 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1732 TYPE_SATURATING (type
));
1733 t
= build_fixed (type
, value
);
1735 /* Propagate overflow flags. */
1736 if (overflow_p
| TREE_OVERFLOW (arg1
))
1737 TREE_OVERFLOW (t
) = 1;
1741 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1742 to a fixed-point type. */
1745 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1747 FIXED_VALUE_TYPE value
;
1751 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1752 TREE_INT_CST (arg1
),
1753 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1754 TYPE_SATURATING (type
));
1755 t
= build_fixed (type
, value
);
1757 /* Propagate overflow flags. */
1758 if (overflow_p
| TREE_OVERFLOW (arg1
))
1759 TREE_OVERFLOW (t
) = 1;
1763 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1764 to a fixed-point type. */
1767 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1769 FIXED_VALUE_TYPE value
;
1773 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1774 &TREE_REAL_CST (arg1
),
1775 TYPE_SATURATING (type
));
1776 t
= build_fixed (type
, value
);
1778 /* Propagate overflow flags. */
1779 if (overflow_p
| TREE_OVERFLOW (arg1
))
1780 TREE_OVERFLOW (t
) = 1;
1784 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1785 type TYPE. If no simplification can be done return NULL_TREE. */
1788 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1790 if (TREE_TYPE (arg1
) == type
)
1793 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1794 || TREE_CODE (type
) == OFFSET_TYPE
)
1796 if (TREE_CODE (arg1
) == INTEGER_CST
)
1797 return fold_convert_const_int_from_int (type
, arg1
);
1798 else if (TREE_CODE (arg1
) == REAL_CST
)
1799 return fold_convert_const_int_from_real (code
, type
, arg1
);
1800 else if (TREE_CODE (arg1
) == FIXED_CST
)
1801 return fold_convert_const_int_from_fixed (type
, arg1
);
1803 else if (TREE_CODE (type
) == REAL_TYPE
)
1805 if (TREE_CODE (arg1
) == INTEGER_CST
)
1806 return build_real_from_int_cst (type
, arg1
);
1807 else if (TREE_CODE (arg1
) == REAL_CST
)
1808 return fold_convert_const_real_from_real (type
, arg1
);
1809 else if (TREE_CODE (arg1
) == FIXED_CST
)
1810 return fold_convert_const_real_from_fixed (type
, arg1
);
1812 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1814 if (TREE_CODE (arg1
) == FIXED_CST
)
1815 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1816 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1817 return fold_convert_const_fixed_from_int (type
, arg1
);
1818 else if (TREE_CODE (arg1
) == REAL_CST
)
1819 return fold_convert_const_fixed_from_real (type
, arg1
);
1824 /* Construct a vector of zero elements of vector type TYPE. */
1827 build_zero_vector (tree type
)
1831 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1832 return build_vector_from_val (type
, t
);
1835 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1838 fold_convertible_p (const_tree type
, const_tree arg
)
1840 tree orig
= TREE_TYPE (arg
);
1845 if (TREE_CODE (arg
) == ERROR_MARK
1846 || TREE_CODE (type
) == ERROR_MARK
1847 || TREE_CODE (orig
) == ERROR_MARK
)
1850 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1853 switch (TREE_CODE (type
))
1855 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1856 case POINTER_TYPE
: case REFERENCE_TYPE
:
1858 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1859 || TREE_CODE (orig
) == OFFSET_TYPE
)
1861 return (TREE_CODE (orig
) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1865 case FIXED_POINT_TYPE
:
1869 return TREE_CODE (type
) == TREE_CODE (orig
);
1876 /* Convert expression ARG to type TYPE. Used by the middle-end for
1877 simple conversions in preference to calling the front-end's convert. */
1880 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1882 tree orig
= TREE_TYPE (arg
);
1888 if (TREE_CODE (arg
) == ERROR_MARK
1889 || TREE_CODE (type
) == ERROR_MARK
1890 || TREE_CODE (orig
) == ERROR_MARK
)
1891 return error_mark_node
;
1893 switch (TREE_CODE (type
))
1896 case REFERENCE_TYPE
:
1897 /* Handle conversions between pointers to different address spaces. */
1898 if (POINTER_TYPE_P (orig
)
1899 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1900 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1901 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1904 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1906 if (TREE_CODE (arg
) == INTEGER_CST
)
1908 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1909 if (tem
!= NULL_TREE
)
1912 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1913 || TREE_CODE (orig
) == OFFSET_TYPE
)
1914 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1915 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1916 return fold_convert_loc (loc
, type
,
1917 fold_build1_loc (loc
, REALPART_EXPR
,
1918 TREE_TYPE (orig
), arg
));
1919 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1920 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1921 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1924 if (TREE_CODE (arg
) == INTEGER_CST
)
1926 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1927 if (tem
!= NULL_TREE
)
1930 else if (TREE_CODE (arg
) == REAL_CST
)
1932 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1933 if (tem
!= NULL_TREE
)
1936 else if (TREE_CODE (arg
) == FIXED_CST
)
1938 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1939 if (tem
!= NULL_TREE
)
1943 switch (TREE_CODE (orig
))
1946 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1947 case POINTER_TYPE
: case REFERENCE_TYPE
:
1948 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1951 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1953 case FIXED_POINT_TYPE
:
1954 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1957 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1958 return fold_convert_loc (loc
, type
, tem
);
1964 case FIXED_POINT_TYPE
:
1965 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1966 || TREE_CODE (arg
) == REAL_CST
)
1968 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1969 if (tem
!= NULL_TREE
)
1970 goto fold_convert_exit
;
1973 switch (TREE_CODE (orig
))
1975 case FIXED_POINT_TYPE
:
1980 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1983 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1984 return fold_convert_loc (loc
, type
, tem
);
1991 switch (TREE_CODE (orig
))
1994 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1995 case POINTER_TYPE
: case REFERENCE_TYPE
:
1997 case FIXED_POINT_TYPE
:
1998 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1999 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2000 fold_convert_loc (loc
, TREE_TYPE (type
),
2001 integer_zero_node
));
2006 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2008 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2009 TREE_OPERAND (arg
, 0));
2010 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2011 TREE_OPERAND (arg
, 1));
2012 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2015 arg
= save_expr (arg
);
2016 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2017 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2018 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2019 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2020 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2028 if (integer_zerop (arg
))
2029 return build_zero_vector (type
);
2030 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2031 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2032 || TREE_CODE (orig
) == VECTOR_TYPE
);
2033 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2036 tem
= fold_ignored_result (arg
);
2037 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2040 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2041 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2045 protected_set_expr_location_unshare (tem
, loc
);
2049 /* Return false if expr can be assumed not to be an lvalue, true
2053 maybe_lvalue_p (const_tree x
)
2055 /* We only need to wrap lvalue tree codes. */
2056 switch (TREE_CODE (x
))
2069 case ARRAY_RANGE_REF
:
2075 case PREINCREMENT_EXPR
:
2076 case PREDECREMENT_EXPR
:
2078 case TRY_CATCH_EXPR
:
2079 case WITH_CLEANUP_EXPR
:
2088 /* Assume the worst for front-end tree codes. */
2089 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2097 /* Return an expr equal to X but certainly not valid as an lvalue. */
2100 non_lvalue_loc (location_t loc
, tree x
)
2102 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2107 if (! maybe_lvalue_p (x
))
2109 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2112 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2113 Zero means allow extended lvalues. */
2115 int pedantic_lvalues
;
2117 /* When pedantic, return an expr equal to X but certainly not valid as a
2118 pedantic lvalue. Otherwise, return X. */
2121 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2123 if (pedantic_lvalues
)
2124 return non_lvalue_loc (loc
, x
);
2126 return protected_set_expr_location_unshare (x
, loc
);
2129 /* Given a tree comparison code, return the code that is the logical inverse.
2130 It is generally not safe to do this for floating-point comparisons, except
2131 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2132 ERROR_MARK in this case. */
2135 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2137 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2138 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2148 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2150 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2152 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2154 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2168 return UNORDERED_EXPR
;
2169 case UNORDERED_EXPR
:
2170 return ORDERED_EXPR
;
2176 /* Similar, but return the comparison that results if the operands are
2177 swapped. This is safe for floating-point. */
2180 swap_tree_comparison (enum tree_code code
)
2187 case UNORDERED_EXPR
:
2213 /* Convert a comparison tree code from an enum tree_code representation
2214 into a compcode bit-based encoding. This function is the inverse of
2215 compcode_to_comparison. */
2217 static enum comparison_code
2218 comparison_to_compcode (enum tree_code code
)
2235 return COMPCODE_ORD
;
2236 case UNORDERED_EXPR
:
2237 return COMPCODE_UNORD
;
2239 return COMPCODE_UNLT
;
2241 return COMPCODE_UNEQ
;
2243 return COMPCODE_UNLE
;
2245 return COMPCODE_UNGT
;
2247 return COMPCODE_LTGT
;
2249 return COMPCODE_UNGE
;
2255 /* Convert a compcode bit-based encoding of a comparison operator back
2256 to GCC's enum tree_code representation. This function is the
2257 inverse of comparison_to_compcode. */
2259 static enum tree_code
2260 compcode_to_comparison (enum comparison_code code
)
2277 return ORDERED_EXPR
;
2278 case COMPCODE_UNORD
:
2279 return UNORDERED_EXPR
;
2297 /* Return a tree for the comparison which is the combination of
2298 doing the AND or OR (depending on CODE) of the two operations LCODE
2299 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2300 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2301 if this makes the transformation invalid. */
2304 combine_comparisons (location_t loc
,
2305 enum tree_code code
, enum tree_code lcode
,
2306 enum tree_code rcode
, tree truth_type
,
2307 tree ll_arg
, tree lr_arg
)
2309 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2310 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2311 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2316 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2317 compcode
= lcompcode
& rcompcode
;
2320 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2321 compcode
= lcompcode
| rcompcode
;
2330 /* Eliminate unordered comparisons, as well as LTGT and ORD
2331 which are not used unless the mode has NaNs. */
2332 compcode
&= ~COMPCODE_UNORD
;
2333 if (compcode
== COMPCODE_LTGT
)
2334 compcode
= COMPCODE_NE
;
2335 else if (compcode
== COMPCODE_ORD
)
2336 compcode
= COMPCODE_TRUE
;
2338 else if (flag_trapping_math
)
2340 /* Check that the original operation and the optimized ones will trap
2341 under the same condition. */
2342 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2343 && (lcompcode
!= COMPCODE_EQ
)
2344 && (lcompcode
!= COMPCODE_ORD
);
2345 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2346 && (rcompcode
!= COMPCODE_EQ
)
2347 && (rcompcode
!= COMPCODE_ORD
);
2348 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2349 && (compcode
!= COMPCODE_EQ
)
2350 && (compcode
!= COMPCODE_ORD
);
2352 /* In a short-circuited boolean expression the LHS might be
2353 such that the RHS, if evaluated, will never trap. For
2354 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2355 if neither x nor y is NaN. (This is a mixed blessing: for
2356 example, the expression above will never trap, hence
2357 optimizing it to x < y would be invalid). */
2358 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2359 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2362 /* If the comparison was short-circuited, and only the RHS
2363 trapped, we may now generate a spurious trap. */
2365 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2368 /* If we changed the conditions that cause a trap, we lose. */
2369 if ((ltrap
|| rtrap
) != trap
)
2373 if (compcode
== COMPCODE_TRUE
)
2374 return constant_boolean_node (true, truth_type
);
2375 else if (compcode
== COMPCODE_FALSE
)
2376 return constant_boolean_node (false, truth_type
);
2379 enum tree_code tcode
;
2381 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2382 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2386 /* Return nonzero if two operands (typically of the same tree node)
2387 are necessarily equal. If either argument has side-effects this
2388 function returns zero. FLAGS modifies behavior as follows:
2390 If OEP_ONLY_CONST is set, only return nonzero for constants.
2391 This function tests whether the operands are indistinguishable;
2392 it does not test whether they are equal using C's == operation.
2393 The distinction is important for IEEE floating point, because
2394 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2395 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2397 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2398 even though it may hold multiple values during a function.
2399 This is because a GCC tree node guarantees that nothing else is
2400 executed between the evaluation of its "operands" (which may often
2401 be evaluated in arbitrary order). Hence if the operands themselves
2402 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2403 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2404 unset means assuming isochronic (or instantaneous) tree equivalence.
2405 Unless comparing arbitrary expression trees, such as from different
2406 statements, this flag can usually be left unset.
2408 If OEP_PURE_SAME is set, then pure functions with identical arguments
2409 are considered the same. It is used when the caller has other ways
2410 to ensure that global memory is unchanged in between. */
2413 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2415 /* If either is ERROR_MARK, they aren't equal. */
2416 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2417 || TREE_TYPE (arg0
) == error_mark_node
2418 || TREE_TYPE (arg1
) == error_mark_node
)
2421 /* Similar, if either does not have a type (like a released SSA name),
2422 they aren't equal. */
2423 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2426 /* Check equality of integer constants before bailing out due to
2427 precision differences. */
2428 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2429 return tree_int_cst_equal (arg0
, arg1
);
2431 /* If both types don't have the same signedness, then we can't consider
2432 them equal. We must check this before the STRIP_NOPS calls
2433 because they may change the signedness of the arguments. As pointers
2434 strictly don't have a signedness, require either two pointers or
2435 two non-pointers as well. */
2436 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2437 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2440 /* We cannot consider pointers to different address space equal. */
2441 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2442 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2443 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2446 /* If both types don't have the same precision, then it is not safe
2448 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2454 /* In case both args are comparisons but with different comparison
2455 code, try to swap the comparison operands of one arg to produce
2456 a match and compare that variant. */
2457 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2458 && COMPARISON_CLASS_P (arg0
)
2459 && COMPARISON_CLASS_P (arg1
))
2461 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2463 if (TREE_CODE (arg0
) == swap_code
)
2464 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2465 TREE_OPERAND (arg1
, 1), flags
)
2466 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2467 TREE_OPERAND (arg1
, 0), flags
);
2470 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2471 /* This is needed for conversions and for COMPONENT_REF.
2472 Might as well play it safe and always test this. */
2473 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2474 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2475 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2478 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2479 We don't care about side effects in that case because the SAVE_EXPR
2480 takes care of that for us. In all other cases, two expressions are
2481 equal if they have no side effects. If we have two identical
2482 expressions with side effects that should be treated the same due
2483 to the only side effects being identical SAVE_EXPR's, that will
2484 be detected in the recursive calls below.
2485 If we are taking an invariant address of two identical objects
2486 they are necessarily equal as well. */
2487 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2488 && (TREE_CODE (arg0
) == SAVE_EXPR
2489 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2490 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2493 /* Next handle constant cases, those for which we can return 1 even
2494 if ONLY_CONST is set. */
2495 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2496 switch (TREE_CODE (arg0
))
2499 return tree_int_cst_equal (arg0
, arg1
);
2502 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2503 TREE_FIXED_CST (arg1
));
2506 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2507 TREE_REAL_CST (arg1
)))
2511 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2513 /* If we do not distinguish between signed and unsigned zero,
2514 consider them equal. */
2515 if (real_zerop (arg0
) && real_zerop (arg1
))
2524 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2527 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2529 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2530 VECTOR_CST_ELT (arg1
, i
), flags
))
2537 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2539 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2543 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2544 && ! memcmp (TREE_STRING_POINTER (arg0
),
2545 TREE_STRING_POINTER (arg1
),
2546 TREE_STRING_LENGTH (arg0
)));
2549 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2550 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2551 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2556 if (flags
& OEP_ONLY_CONST
)
2559 /* Define macros to test an operand from arg0 and arg1 for equality and a
2560 variant that allows null and views null as being different from any
2561 non-null value. In the latter case, if either is null, the both
2562 must be; otherwise, do the normal comparison. */
2563 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2564 TREE_OPERAND (arg1, N), flags)
2566 #define OP_SAME_WITH_NULL(N) \
2567 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2568 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2570 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2573 /* Two conversions are equal only if signedness and modes match. */
2574 switch (TREE_CODE (arg0
))
2577 case FIX_TRUNC_EXPR
:
2578 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2579 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2589 case tcc_comparison
:
2591 if (OP_SAME (0) && OP_SAME (1))
2594 /* For commutative ops, allow the other order. */
2595 return (commutative_tree_code (TREE_CODE (arg0
))
2596 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2597 TREE_OPERAND (arg1
, 1), flags
)
2598 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2599 TREE_OPERAND (arg1
, 0), flags
));
2602 /* If either of the pointer (or reference) expressions we are
2603 dereferencing contain a side effect, these cannot be equal,
2604 but their addresses can be. */
2605 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2606 && (TREE_SIDE_EFFECTS (arg0
)
2607 || TREE_SIDE_EFFECTS (arg1
)))
2610 switch (TREE_CODE (arg0
))
2613 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2620 case TARGET_MEM_REF
:
2621 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2622 /* Require equal extra operands and then fall through to MEM_REF
2623 handling of the two common operands. */
2624 if (!OP_SAME_WITH_NULL (2)
2625 || !OP_SAME_WITH_NULL (3)
2626 || !OP_SAME_WITH_NULL (4))
2630 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2631 /* Require equal access sizes, and similar pointer types.
2632 We can have incomplete types for array references of
2633 variable-sized arrays from the Fortran frontend
2634 though. Also verify the types are compatible. */
2635 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2636 || (TYPE_SIZE (TREE_TYPE (arg0
))
2637 && TYPE_SIZE (TREE_TYPE (arg1
))
2638 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2639 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2640 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2641 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2642 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2643 && OP_SAME (0) && OP_SAME (1));
2646 case ARRAY_RANGE_REF
:
2647 /* Operands 2 and 3 may be null.
2648 Compare the array index by value if it is constant first as we
2649 may have different types but same value here. */
2652 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2653 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2654 TREE_OPERAND (arg1
, 1))
2656 && OP_SAME_WITH_NULL (2)
2657 && OP_SAME_WITH_NULL (3));
2660 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2661 may be NULL when we're called to compare MEM_EXPRs. */
2662 if (!OP_SAME_WITH_NULL (0))
2664 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2665 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2670 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2671 return OP_SAME (1) && OP_SAME (2);
2677 case tcc_expression
:
2678 switch (TREE_CODE (arg0
))
2681 case TRUTH_NOT_EXPR
:
2684 case TRUTH_ANDIF_EXPR
:
2685 case TRUTH_ORIF_EXPR
:
2686 return OP_SAME (0) && OP_SAME (1);
2689 case WIDEN_MULT_PLUS_EXPR
:
2690 case WIDEN_MULT_MINUS_EXPR
:
2693 /* The multiplcation operands are commutative. */
2696 case TRUTH_AND_EXPR
:
2698 case TRUTH_XOR_EXPR
:
2699 if (OP_SAME (0) && OP_SAME (1))
2702 /* Otherwise take into account this is a commutative operation. */
2703 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2704 TREE_OPERAND (arg1
, 1), flags
)
2705 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2706 TREE_OPERAND (arg1
, 0), flags
));
2711 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2718 switch (TREE_CODE (arg0
))
2721 /* If the CALL_EXPRs call different functions, then they
2722 clearly can not be equal. */
2723 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2728 unsigned int cef
= call_expr_flags (arg0
);
2729 if (flags
& OEP_PURE_SAME
)
2730 cef
&= ECF_CONST
| ECF_PURE
;
2737 /* Now see if all the arguments are the same. */
2739 const_call_expr_arg_iterator iter0
, iter1
;
2741 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2742 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2744 a0
= next_const_call_expr_arg (&iter0
),
2745 a1
= next_const_call_expr_arg (&iter1
))
2746 if (! operand_equal_p (a0
, a1
, flags
))
2749 /* If we get here and both argument lists are exhausted
2750 then the CALL_EXPRs are equal. */
2751 return ! (a0
|| a1
);
2757 case tcc_declaration
:
2758 /* Consider __builtin_sqrt equal to sqrt. */
2759 return (TREE_CODE (arg0
) == FUNCTION_DECL
2760 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2761 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2762 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2769 #undef OP_SAME_WITH_NULL
2772 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2773 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2775 When in doubt, return 0. */
2778 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2780 int unsignedp1
, unsignedpo
;
2781 tree primarg0
, primarg1
, primother
;
2782 unsigned int correct_width
;
2784 if (operand_equal_p (arg0
, arg1
, 0))
2787 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2788 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2791 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2792 and see if the inner values are the same. This removes any
2793 signedness comparison, which doesn't matter here. */
2794 primarg0
= arg0
, primarg1
= arg1
;
2795 STRIP_NOPS (primarg0
);
2796 STRIP_NOPS (primarg1
);
2797 if (operand_equal_p (primarg0
, primarg1
, 0))
2800 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2801 actual comparison operand, ARG0.
2803 First throw away any conversions to wider types
2804 already present in the operands. */
2806 primarg1
= get_narrower (arg1
, &unsignedp1
);
2807 primother
= get_narrower (other
, &unsignedpo
);
2809 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2810 if (unsignedp1
== unsignedpo
2811 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2812 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2814 tree type
= TREE_TYPE (arg0
);
2816 /* Make sure shorter operand is extended the right way
2817 to match the longer operand. */
2818 primarg1
= fold_convert (signed_or_unsigned_type_for
2819 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2821 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2828 /* See if ARG is an expression that is either a comparison or is performing
2829 arithmetic on comparisons. The comparisons must only be comparing
2830 two different values, which will be stored in *CVAL1 and *CVAL2; if
2831 they are nonzero it means that some operands have already been found.
2832 No variables may be used anywhere else in the expression except in the
2833 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2834 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2836 If this is true, return 1. Otherwise, return zero. */
2839 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2841 enum tree_code code
= TREE_CODE (arg
);
2842 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2844 /* We can handle some of the tcc_expression cases here. */
2845 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2847 else if (tclass
== tcc_expression
2848 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2849 || code
== COMPOUND_EXPR
))
2850 tclass
= tcc_binary
;
2852 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2853 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2855 /* If we've already found a CVAL1 or CVAL2, this expression is
2856 two complex to handle. */
2857 if (*cval1
|| *cval2
)
2867 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2870 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2871 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2872 cval1
, cval2
, save_p
));
2877 case tcc_expression
:
2878 if (code
== COND_EXPR
)
2879 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2880 cval1
, cval2
, save_p
)
2881 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2882 cval1
, cval2
, save_p
)
2883 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2884 cval1
, cval2
, save_p
));
2887 case tcc_comparison
:
2888 /* First see if we can handle the first operand, then the second. For
2889 the second operand, we know *CVAL1 can't be zero. It must be that
2890 one side of the comparison is each of the values; test for the
2891 case where this isn't true by failing if the two operands
2894 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2895 TREE_OPERAND (arg
, 1), 0))
2899 *cval1
= TREE_OPERAND (arg
, 0);
2900 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2902 else if (*cval2
== 0)
2903 *cval2
= TREE_OPERAND (arg
, 0);
2904 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2909 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2911 else if (*cval2
== 0)
2912 *cval2
= TREE_OPERAND (arg
, 1);
2913 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2925 /* ARG is a tree that is known to contain just arithmetic operations and
2926 comparisons. Evaluate the operations in the tree substituting NEW0 for
2927 any occurrence of OLD0 as an operand of a comparison and likewise for
2931 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2932 tree old1
, tree new1
)
2934 tree type
= TREE_TYPE (arg
);
2935 enum tree_code code
= TREE_CODE (arg
);
2936 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2938 /* We can handle some of the tcc_expression cases here. */
2939 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2941 else if (tclass
== tcc_expression
2942 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2943 tclass
= tcc_binary
;
2948 return fold_build1_loc (loc
, code
, type
,
2949 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2950 old0
, new0
, old1
, new1
));
2953 return fold_build2_loc (loc
, code
, type
,
2954 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2955 old0
, new0
, old1
, new1
),
2956 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2957 old0
, new0
, old1
, new1
));
2959 case tcc_expression
:
2963 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2967 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2971 return fold_build3_loc (loc
, code
, type
,
2972 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2973 old0
, new0
, old1
, new1
),
2974 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2975 old0
, new0
, old1
, new1
),
2976 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2977 old0
, new0
, old1
, new1
));
2981 /* Fall through - ??? */
2983 case tcc_comparison
:
2985 tree arg0
= TREE_OPERAND (arg
, 0);
2986 tree arg1
= TREE_OPERAND (arg
, 1);
2988 /* We need to check both for exact equality and tree equality. The
2989 former will be true if the operand has a side-effect. In that
2990 case, we know the operand occurred exactly once. */
2992 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2994 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2997 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2999 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3002 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3010 /* Return a tree for the case when the result of an expression is RESULT
3011 converted to TYPE and OMITTED was previously an operand of the expression
3012 but is now not needed (e.g., we folded OMITTED * 0).
3014 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3015 the conversion of RESULT to TYPE. */
3018 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3020 tree t
= fold_convert_loc (loc
, type
, result
);
3022 /* If the resulting operand is an empty statement, just return the omitted
3023 statement casted to void. */
3024 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3025 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3026 fold_ignored_result (omitted
));
3028 if (TREE_SIDE_EFFECTS (omitted
))
3029 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3030 fold_ignored_result (omitted
), t
);
3032 return non_lvalue_loc (loc
, t
);
3035 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3038 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3041 tree t
= fold_convert_loc (loc
, type
, result
);
3043 /* If the resulting operand is an empty statement, just return the omitted
3044 statement casted to void. */
3045 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3046 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3047 fold_ignored_result (omitted
));
3049 if (TREE_SIDE_EFFECTS (omitted
))
3050 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3051 fold_ignored_result (omitted
), t
);
3053 return pedantic_non_lvalue_loc (loc
, t
);
3056 /* Return a tree for the case when the result of an expression is RESULT
3057 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3058 of the expression but are now not needed.
3060 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3061 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3062 evaluated before OMITTED2. Otherwise, if neither has side effects,
3063 just do the conversion of RESULT to TYPE. */
3066 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3067 tree omitted1
, tree omitted2
)
3069 tree t
= fold_convert_loc (loc
, type
, result
);
3071 if (TREE_SIDE_EFFECTS (omitted2
))
3072 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3073 if (TREE_SIDE_EFFECTS (omitted1
))
3074 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3076 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3080 /* Return a simplified tree node for the truth-negation of ARG. This
3081 never alters ARG itself. We assume that ARG is an operation that
3082 returns a truth value (0 or 1).
3084 FIXME: one would think we would fold the result, but it causes
3085 problems with the dominator optimizer. */
3088 fold_truth_not_expr (location_t loc
, tree arg
)
3090 tree type
= TREE_TYPE (arg
);
3091 enum tree_code code
= TREE_CODE (arg
);
3092 location_t loc1
, loc2
;
3094 /* If this is a comparison, we can simply invert it, except for
3095 floating-point non-equality comparisons, in which case we just
3096 enclose a TRUTH_NOT_EXPR around what we have. */
3098 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3100 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3101 if (FLOAT_TYPE_P (op_type
)
3102 && flag_trapping_math
3103 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3104 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3107 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3108 if (code
== ERROR_MARK
)
3111 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3112 TREE_OPERAND (arg
, 1));
3118 return constant_boolean_node (integer_zerop (arg
), type
);
3120 case TRUTH_AND_EXPR
:
3121 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3122 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3123 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3124 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3125 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3128 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3129 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3130 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3131 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3132 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3134 case TRUTH_XOR_EXPR
:
3135 /* Here we can invert either operand. We invert the first operand
3136 unless the second operand is a TRUTH_NOT_EXPR in which case our
3137 result is the XOR of the first operand with the inside of the
3138 negation of the second operand. */
3140 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3141 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3142 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3144 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3145 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3146 TREE_OPERAND (arg
, 1));
3148 case TRUTH_ANDIF_EXPR
:
3149 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3150 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3151 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3152 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3153 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3155 case TRUTH_ORIF_EXPR
:
3156 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3157 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3158 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3159 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3160 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3162 case TRUTH_NOT_EXPR
:
3163 return TREE_OPERAND (arg
, 0);
3167 tree arg1
= TREE_OPERAND (arg
, 1);
3168 tree arg2
= TREE_OPERAND (arg
, 2);
3170 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3171 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3173 /* A COND_EXPR may have a throw as one operand, which
3174 then has void type. Just leave void operands
3176 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3177 VOID_TYPE_P (TREE_TYPE (arg1
))
3178 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3179 VOID_TYPE_P (TREE_TYPE (arg2
))
3180 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3184 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3185 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3186 TREE_OPERAND (arg
, 0),
3187 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3189 case NON_LVALUE_EXPR
:
3190 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3191 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3194 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3195 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3197 /* ... fall through ... */
3200 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3201 return build1_loc (loc
, TREE_CODE (arg
), type
,
3202 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3205 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3207 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3210 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3212 case CLEANUP_POINT_EXPR
:
3213 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3214 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3215 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3222 /* Return a simplified tree node for the truth-negation of ARG. This
3223 never alters ARG itself. We assume that ARG is an operation that
3224 returns a truth value (0 or 1).
3226 FIXME: one would think we would fold the result, but it causes
3227 problems with the dominator optimizer. */
3230 invert_truthvalue_loc (location_t loc
, tree arg
)
3234 if (TREE_CODE (arg
) == ERROR_MARK
)
3237 tem
= fold_truth_not_expr (loc
, arg
);
3239 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3244 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3245 operands are another bit-wise operation with a common input. If so,
3246 distribute the bit operations to save an operation and possibly two if
3247 constants are involved. For example, convert
3248 (A | B) & (A | C) into A | (B & C)
3249 Further simplification will occur if B and C are constants.
3251 If this optimization cannot be done, 0 will be returned. */
3254 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3255 tree arg0
, tree arg1
)
3260 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3261 || TREE_CODE (arg0
) == code
3262 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3263 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3266 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3268 common
= TREE_OPERAND (arg0
, 0);
3269 left
= TREE_OPERAND (arg0
, 1);
3270 right
= TREE_OPERAND (arg1
, 1);
3272 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3274 common
= TREE_OPERAND (arg0
, 0);
3275 left
= TREE_OPERAND (arg0
, 1);
3276 right
= TREE_OPERAND (arg1
, 0);
3278 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3280 common
= TREE_OPERAND (arg0
, 1);
3281 left
= TREE_OPERAND (arg0
, 0);
3282 right
= TREE_OPERAND (arg1
, 1);
3284 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3286 common
= TREE_OPERAND (arg0
, 1);
3287 left
= TREE_OPERAND (arg0
, 0);
3288 right
= TREE_OPERAND (arg1
, 0);
3293 common
= fold_convert_loc (loc
, type
, common
);
3294 left
= fold_convert_loc (loc
, type
, left
);
3295 right
= fold_convert_loc (loc
, type
, right
);
3296 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3297 fold_build2_loc (loc
, code
, type
, left
, right
));
3300 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3301 with code CODE. This optimization is unsafe. */
3303 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3304 tree arg0
, tree arg1
)
3306 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3307 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3309 /* (A / C) +- (B / C) -> (A +- B) / C. */
3311 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3312 TREE_OPERAND (arg1
, 1), 0))
3313 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3314 fold_build2_loc (loc
, code
, type
,
3315 TREE_OPERAND (arg0
, 0),
3316 TREE_OPERAND (arg1
, 0)),
3317 TREE_OPERAND (arg0
, 1));
3319 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3320 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3321 TREE_OPERAND (arg1
, 0), 0)
3322 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3323 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3325 REAL_VALUE_TYPE r0
, r1
;
3326 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3327 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3329 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3331 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3332 real_arithmetic (&r0
, code
, &r0
, &r1
);
3333 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3334 TREE_OPERAND (arg0
, 0),
3335 build_real (type
, r0
));
3341 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3342 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3345 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3346 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3348 tree result
, bftype
;
3352 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3353 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3354 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3355 && host_integerp (size
, 0)
3356 && tree_low_cst (size
, 0) == bitsize
)
3357 return fold_convert_loc (loc
, type
, inner
);
3361 if (TYPE_PRECISION (bftype
) != bitsize
3362 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3363 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3365 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3366 size_int (bitsize
), bitsize_int (bitpos
));
3369 result
= fold_convert_loc (loc
, type
, result
);
3374 /* Optimize a bit-field compare.
3376 There are two cases: First is a compare against a constant and the
3377 second is a comparison of two items where the fields are at the same
3378 bit position relative to the start of a chunk (byte, halfword, word)
3379 large enough to contain it. In these cases we can avoid the shift
3380 implicit in bitfield extractions.
3382 For constants, we emit a compare of the shifted constant with the
3383 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3384 compared. For two fields at the same position, we do the ANDs with the
3385 similar mask and compare the result of the ANDs.
3387 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3388 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3389 are the left and right operands of the comparison, respectively.
3391 If the optimization described above can be done, we return the resulting
3392 tree. Otherwise we return zero. */
3395 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3396 tree compare_type
, tree lhs
, tree rhs
)
3398 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3399 tree type
= TREE_TYPE (lhs
);
3400 tree signed_type
, unsigned_type
;
3401 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3402 enum machine_mode lmode
, rmode
, nmode
;
3403 int lunsignedp
, runsignedp
;
3404 int lvolatilep
= 0, rvolatilep
= 0;
3405 tree linner
, rinner
= NULL_TREE
;
3409 /* In the strict volatile bitfields case, doing code changes here may prevent
3410 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3411 if (flag_strict_volatile_bitfields
> 0)
3414 /* Get all the information about the extractions being done. If the bit size
3415 if the same as the size of the underlying object, we aren't doing an
3416 extraction at all and so can do nothing. We also don't want to
3417 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3418 then will no longer be able to replace it. */
3419 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3420 &lunsignedp
, &lvolatilep
, false);
3421 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3422 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3427 /* If this is not a constant, we can only do something if bit positions,
3428 sizes, and signedness are the same. */
3429 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3430 &runsignedp
, &rvolatilep
, false);
3432 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3433 || lunsignedp
!= runsignedp
|| offset
!= 0
3434 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3438 /* See if we can find a mode to refer to this field. We should be able to,
3439 but fail if we can't. */
3441 && GET_MODE_BITSIZE (lmode
) > 0
3442 && flag_strict_volatile_bitfields
> 0)
3445 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3446 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3447 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3448 TYPE_ALIGN (TREE_TYPE (rinner
))),
3449 word_mode
, lvolatilep
|| rvolatilep
);
3450 if (nmode
== VOIDmode
)
3453 /* Set signed and unsigned types of the precision of this mode for the
3455 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3456 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3458 /* Compute the bit position and size for the new reference and our offset
3459 within it. If the new reference is the same size as the original, we
3460 won't optimize anything, so return zero. */
3461 nbitsize
= GET_MODE_BITSIZE (nmode
);
3462 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3464 if (nbitsize
== lbitsize
)
3467 if (BYTES_BIG_ENDIAN
)
3468 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3470 /* Make the mask to be used against the extracted field. */
3471 mask
= build_int_cst_type (unsigned_type
, -1);
3472 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3473 mask
= const_binop (RSHIFT_EXPR
, mask
,
3474 size_int (nbitsize
- lbitsize
- lbitpos
));
3477 /* If not comparing with constant, just rework the comparison
3479 return fold_build2_loc (loc
, code
, compare_type
,
3480 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3481 make_bit_field_ref (loc
, linner
,
3486 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3487 make_bit_field_ref (loc
, rinner
,
3493 /* Otherwise, we are handling the constant case. See if the constant is too
3494 big for the field. Warn and return a tree of for 0 (false) if so. We do
3495 this not only for its own sake, but to avoid having to test for this
3496 error case below. If we didn't, we might generate wrong code.
3498 For unsigned fields, the constant shifted right by the field length should
3499 be all zero. For signed fields, the high-order bits should agree with
3504 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3505 fold_convert_loc (loc
,
3506 unsigned_type
, rhs
),
3507 size_int (lbitsize
))))
3509 warning (0, "comparison is always %d due to width of bit-field",
3511 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3516 tree tem
= const_binop (RSHIFT_EXPR
,
3517 fold_convert_loc (loc
, signed_type
, rhs
),
3518 size_int (lbitsize
- 1));
3519 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3521 warning (0, "comparison is always %d due to width of bit-field",
3523 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3527 /* Single-bit compares should always be against zero. */
3528 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3530 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3531 rhs
= build_int_cst (type
, 0);
3534 /* Make a new bitfield reference, shift the constant over the
3535 appropriate number of bits and mask it with the computed mask
3536 (in case this was a signed field). If we changed it, make a new one. */
3537 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3540 TREE_SIDE_EFFECTS (lhs
) = 1;
3541 TREE_THIS_VOLATILE (lhs
) = 1;
3544 rhs
= const_binop (BIT_AND_EXPR
,
3545 const_binop (LSHIFT_EXPR
,
3546 fold_convert_loc (loc
, unsigned_type
, rhs
),
3547 size_int (lbitpos
)),
3550 lhs
= build2_loc (loc
, code
, compare_type
,
3551 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3555 /* Subroutine for fold_truth_andor_1: decode a field reference.
3557 If EXP is a comparison reference, we return the innermost reference.
3559 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3560 set to the starting bit number.
3562 If the innermost field can be completely contained in a mode-sized
3563 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3565 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3566 otherwise it is not changed.
3568 *PUNSIGNEDP is set to the signedness of the field.
3570 *PMASK is set to the mask used. This is either contained in a
3571 BIT_AND_EXPR or derived from the width of the field.
3573 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3575 Return 0 if this is not a component reference or is one that we can't
3576 do anything with. */
3579 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3580 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3581 int *punsignedp
, int *pvolatilep
,
3582 tree
*pmask
, tree
*pand_mask
)
3584 tree outer_type
= 0;
3586 tree mask
, inner
, offset
;
3588 unsigned int precision
;
3590 /* All the optimizations using this function assume integer fields.
3591 There are problems with FP fields since the type_for_size call
3592 below can fail for, e.g., XFmode. */
3593 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3596 /* We are interested in the bare arrangement of bits, so strip everything
3597 that doesn't affect the machine mode. However, record the type of the
3598 outermost expression if it may matter below. */
3599 if (CONVERT_EXPR_P (exp
)
3600 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3601 outer_type
= TREE_TYPE (exp
);
3604 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3606 and_mask
= TREE_OPERAND (exp
, 1);
3607 exp
= TREE_OPERAND (exp
, 0);
3608 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3609 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3613 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3614 punsignedp
, pvolatilep
, false);
3615 if ((inner
== exp
&& and_mask
== 0)
3616 || *pbitsize
< 0 || offset
!= 0
3617 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3620 /* If the number of bits in the reference is the same as the bitsize of
3621 the outer type, then the outer type gives the signedness. Otherwise
3622 (in case of a small bitfield) the signedness is unchanged. */
3623 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3624 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3626 /* Compute the mask to access the bitfield. */
3627 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3628 precision
= TYPE_PRECISION (unsigned_type
);
3630 mask
= build_int_cst_type (unsigned_type
, -1);
3632 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3633 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3635 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3637 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3638 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3641 *pand_mask
= and_mask
;
3645 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3649 all_ones_mask_p (const_tree mask
, int size
)
3651 tree type
= TREE_TYPE (mask
);
3652 unsigned int precision
= TYPE_PRECISION (type
);
3655 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3658 tree_int_cst_equal (mask
,
3659 const_binop (RSHIFT_EXPR
,
3660 const_binop (LSHIFT_EXPR
, tmask
,
3661 size_int (precision
- size
)),
3662 size_int (precision
- size
)));
3665 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3666 represents the sign bit of EXP's type. If EXP represents a sign
3667 or zero extension, also test VAL against the unextended type.
3668 The return value is the (sub)expression whose sign bit is VAL,
3669 or NULL_TREE otherwise. */
3672 sign_bit_p (tree exp
, const_tree val
)
3674 unsigned HOST_WIDE_INT mask_lo
, lo
;
3675 HOST_WIDE_INT mask_hi
, hi
;
3679 /* Tree EXP must have an integral type. */
3680 t
= TREE_TYPE (exp
);
3681 if (! INTEGRAL_TYPE_P (t
))
3684 /* Tree VAL must be an integer constant. */
3685 if (TREE_CODE (val
) != INTEGER_CST
3686 || TREE_OVERFLOW (val
))
3689 width
= TYPE_PRECISION (t
);
3690 if (width
> HOST_BITS_PER_WIDE_INT
)
3692 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3695 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3696 >> (HOST_BITS_PER_DOUBLE_INT
- width
));
3702 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3705 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3706 >> (HOST_BITS_PER_WIDE_INT
- width
));
3709 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3710 treat VAL as if it were unsigned. */
3711 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3712 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3715 /* Handle extension from a narrower type. */
3716 if (TREE_CODE (exp
) == NOP_EXPR
3717 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3718 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3723 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3724 to be evaluated unconditionally. */
3727 simple_operand_p (const_tree exp
)
3729 /* Strip any conversions that don't change the machine mode. */
3732 return (CONSTANT_CLASS_P (exp
)
3733 || TREE_CODE (exp
) == SSA_NAME
3735 && ! TREE_ADDRESSABLE (exp
)
3736 && ! TREE_THIS_VOLATILE (exp
)
3737 && ! DECL_NONLOCAL (exp
)
3738 /* Don't regard global variables as simple. They may be
3739 allocated in ways unknown to the compiler (shared memory,
3740 #pragma weak, etc). */
3741 && ! TREE_PUBLIC (exp
)
3742 && ! DECL_EXTERNAL (exp
)
3743 /* Loading a static variable is unduly expensive, but global
3744 registers aren't expensive. */
3745 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3748 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3749 to be evaluated unconditionally.
3750 I addition to simple_operand_p, we assume that comparisons, conversions,
3751 and logic-not operations are simple, if their operands are simple, too. */
3754 simple_operand_p_2 (tree exp
)
3756 enum tree_code code
;
3758 if (TREE_SIDE_EFFECTS (exp
)
3759 || tree_could_trap_p (exp
))
3762 while (CONVERT_EXPR_P (exp
))
3763 exp
= TREE_OPERAND (exp
, 0);
3765 code
= TREE_CODE (exp
);
3767 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3768 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3769 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3771 if (code
== TRUTH_NOT_EXPR
)
3772 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3774 return simple_operand_p (exp
);
3778 /* The following functions are subroutines to fold_range_test and allow it to
3779 try to change a logical combination of comparisons into a range test.
3782 X == 2 || X == 3 || X == 4 || X == 5
3786 (unsigned) (X - 2) <= 3
3788 We describe each set of comparisons as being either inside or outside
3789 a range, using a variable named like IN_P, and then describe the
3790 range with a lower and upper bound. If one of the bounds is omitted,
3791 it represents either the highest or lowest value of the type.
3793 In the comments below, we represent a range by two numbers in brackets
3794 preceded by a "+" to designate being inside that range, or a "-" to
3795 designate being outside that range, so the condition can be inverted by
3796 flipping the prefix. An omitted bound is represented by a "-". For
3797 example, "- [-, 10]" means being outside the range starting at the lowest
3798 possible value and ending at 10, in other words, being greater than 10.
3799 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3802 We set up things so that the missing bounds are handled in a consistent
3803 manner so neither a missing bound nor "true" and "false" need to be
3804 handled using a special case. */
3806 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3807 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3808 and UPPER1_P are nonzero if the respective argument is an upper bound
3809 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3810 must be specified for a comparison. ARG1 will be converted to ARG0's
3811 type if both are specified. */
3814 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3815 tree arg1
, int upper1_p
)
3821 /* If neither arg represents infinity, do the normal operation.
3822 Else, if not a comparison, return infinity. Else handle the special
3823 comparison rules. Note that most of the cases below won't occur, but
3824 are handled for consistency. */
3826 if (arg0
!= 0 && arg1
!= 0)
3828 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3829 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3831 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3834 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3837 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3838 for neither. In real maths, we cannot assume open ended ranges are
3839 the same. But, this is computer arithmetic, where numbers are finite.
3840 We can therefore make the transformation of any unbounded range with
3841 the value Z, Z being greater than any representable number. This permits
3842 us to treat unbounded ranges as equal. */
3843 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3844 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3848 result
= sgn0
== sgn1
;
3851 result
= sgn0
!= sgn1
;
3854 result
= sgn0
< sgn1
;
3857 result
= sgn0
<= sgn1
;
3860 result
= sgn0
> sgn1
;
3863 result
= sgn0
>= sgn1
;
3869 return constant_boolean_node (result
, type
);
3872 /* Helper routine for make_range. Perform one step for it, return
3873 new expression if the loop should continue or NULL_TREE if it should
3877 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3878 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3879 bool *strict_overflow_p
)
3881 tree arg0_type
= TREE_TYPE (arg0
);
3882 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3883 int in_p
= *p_in_p
, n_in_p
;
3887 case TRUTH_NOT_EXPR
:
3888 /* We can only do something if the range is testing for zero. */
3889 if (low
== NULL_TREE
|| high
== NULL_TREE
3890 || ! integer_zerop (low
) || ! integer_zerop (high
))
3895 case EQ_EXPR
: case NE_EXPR
:
3896 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3897 /* We can only do something if the range is testing for zero
3898 and if the second operand is an integer constant. Note that
3899 saying something is "in" the range we make is done by
3900 complementing IN_P since it will set in the initial case of
3901 being not equal to zero; "out" is leaving it alone. */
3902 if (low
== NULL_TREE
|| high
== NULL_TREE
3903 || ! integer_zerop (low
) || ! integer_zerop (high
)
3904 || TREE_CODE (arg1
) != INTEGER_CST
)
3909 case NE_EXPR
: /* - [c, c] */
3912 case EQ_EXPR
: /* + [c, c] */
3913 in_p
= ! in_p
, low
= high
= arg1
;
3915 case GT_EXPR
: /* - [-, c] */
3916 low
= 0, high
= arg1
;
3918 case GE_EXPR
: /* + [c, -] */
3919 in_p
= ! in_p
, low
= arg1
, high
= 0;
3921 case LT_EXPR
: /* - [c, -] */
3922 low
= arg1
, high
= 0;
3924 case LE_EXPR
: /* + [-, c] */
3925 in_p
= ! in_p
, low
= 0, high
= arg1
;
3931 /* If this is an unsigned comparison, we also know that EXP is
3932 greater than or equal to zero. We base the range tests we make
3933 on that fact, so we record it here so we can parse existing
3934 range tests. We test arg0_type since often the return type
3935 of, e.g. EQ_EXPR, is boolean. */
3936 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3938 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3940 build_int_cst (arg0_type
, 0),
3944 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3946 /* If the high bound is missing, but we have a nonzero low
3947 bound, reverse the range so it goes from zero to the low bound
3949 if (high
== 0 && low
&& ! integer_zerop (low
))
3952 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3953 integer_one_node
, 0);
3954 low
= build_int_cst (arg0_type
, 0);
3964 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3965 low and high are non-NULL, then normalize will DTRT. */
3966 if (!TYPE_UNSIGNED (arg0_type
)
3967 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3969 if (low
== NULL_TREE
)
3970 low
= TYPE_MIN_VALUE (arg0_type
);
3971 if (high
== NULL_TREE
)
3972 high
= TYPE_MAX_VALUE (arg0_type
);
3975 /* (-x) IN [a,b] -> x in [-b, -a] */
3976 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3977 build_int_cst (exp_type
, 0),
3979 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3980 build_int_cst (exp_type
, 0),
3982 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3988 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3989 build_int_cst (exp_type
, 1));
3993 if (TREE_CODE (arg1
) != INTEGER_CST
)
3996 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3997 move a constant to the other side. */
3998 if (!TYPE_UNSIGNED (arg0_type
)
3999 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4002 /* If EXP is signed, any overflow in the computation is undefined,
4003 so we don't worry about it so long as our computations on
4004 the bounds don't overflow. For unsigned, overflow is defined
4005 and this is exactly the right thing. */
4006 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4007 arg0_type
, low
, 0, arg1
, 0);
4008 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4009 arg0_type
, high
, 1, arg1
, 0);
4010 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4011 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4014 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4015 *strict_overflow_p
= true;
4018 /* Check for an unsigned range which has wrapped around the maximum
4019 value thus making n_high < n_low, and normalize it. */
4020 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4022 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4023 integer_one_node
, 0);
4024 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4025 integer_one_node
, 0);
4027 /* If the range is of the form +/- [ x+1, x ], we won't
4028 be able to normalize it. But then, it represents the
4029 whole range or the empty set, so make it
4031 if (tree_int_cst_equal (n_low
, low
)
4032 && tree_int_cst_equal (n_high
, high
))
4038 low
= n_low
, high
= n_high
;
4046 case NON_LVALUE_EXPR
:
4047 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4050 if (! INTEGRAL_TYPE_P (arg0_type
)
4051 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4052 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4055 n_low
= low
, n_high
= high
;
4058 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4061 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4063 /* If we're converting arg0 from an unsigned type, to exp,
4064 a signed type, we will be doing the comparison as unsigned.
4065 The tests above have already verified that LOW and HIGH
4068 So we have to ensure that we will handle large unsigned
4069 values the same way that the current signed bounds treat
4072 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4076 /* For fixed-point modes, we need to pass the saturating flag
4077 as the 2nd parameter. */
4078 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4080 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4081 TYPE_SATURATING (arg0_type
));
4084 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4086 /* A range without an upper bound is, naturally, unbounded.
4087 Since convert would have cropped a very large value, use
4088 the max value for the destination type. */
4090 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4091 : TYPE_MAX_VALUE (arg0_type
);
4093 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4094 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4095 fold_convert_loc (loc
, arg0_type
,
4097 build_int_cst (arg0_type
, 1));
4099 /* If the low bound is specified, "and" the range with the
4100 range for which the original unsigned value will be
4104 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4105 1, fold_convert_loc (loc
, arg0_type
,
4110 in_p
= (n_in_p
== in_p
);
4114 /* Otherwise, "or" the range with the range of the input
4115 that will be interpreted as negative. */
4116 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4117 1, fold_convert_loc (loc
, arg0_type
,
4122 in_p
= (in_p
!= n_in_p
);
4136 /* Given EXP, a logical expression, set the range it is testing into
4137 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4138 actually being tested. *PLOW and *PHIGH will be made of the same
4139 type as the returned expression. If EXP is not a comparison, we
4140 will most likely not be returning a useful value and range. Set
4141 *STRICT_OVERFLOW_P to true if the return value is only valid
4142 because signed overflow is undefined; otherwise, do not change
4143 *STRICT_OVERFLOW_P. */
4146 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4147 bool *strict_overflow_p
)
4149 enum tree_code code
;
4150 tree arg0
, arg1
= NULL_TREE
;
4151 tree exp_type
, nexp
;
4154 location_t loc
= EXPR_LOCATION (exp
);
4156 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4157 and see if we can refine the range. Some of the cases below may not
4158 happen, but it doesn't seem worth worrying about this. We "continue"
4159 the outer loop when we've changed something; otherwise we "break"
4160 the switch, which will "break" the while. */
4163 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4167 code
= TREE_CODE (exp
);
4168 exp_type
= TREE_TYPE (exp
);
4171 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4173 if (TREE_OPERAND_LENGTH (exp
) > 0)
4174 arg0
= TREE_OPERAND (exp
, 0);
4175 if (TREE_CODE_CLASS (code
) == tcc_binary
4176 || TREE_CODE_CLASS (code
) == tcc_comparison
4177 || (TREE_CODE_CLASS (code
) == tcc_expression
4178 && TREE_OPERAND_LENGTH (exp
) > 1))
4179 arg1
= TREE_OPERAND (exp
, 1);
4181 if (arg0
== NULL_TREE
)
4184 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4185 &high
, &in_p
, strict_overflow_p
);
4186 if (nexp
== NULL_TREE
)
4191 /* If EXP is a constant, we can evaluate whether this is true or false. */
4192 if (TREE_CODE (exp
) == INTEGER_CST
)
4194 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4196 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4202 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4206 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4207 type, TYPE, return an expression to test if EXP is in (or out of, depending
4208 on IN_P) the range. Return 0 if the test couldn't be created. */
4211 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4212 tree low
, tree high
)
4214 tree etype
= TREE_TYPE (exp
), value
;
4216 #ifdef HAVE_canonicalize_funcptr_for_compare
4217 /* Disable this optimization for function pointer expressions
4218 on targets that require function pointer canonicalization. */
4219 if (HAVE_canonicalize_funcptr_for_compare
4220 && TREE_CODE (etype
) == POINTER_TYPE
4221 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4227 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4229 return invert_truthvalue_loc (loc
, value
);
4234 if (low
== 0 && high
== 0)
4235 return build_int_cst (type
, 1);
4238 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4239 fold_convert_loc (loc
, etype
, high
));
4242 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4243 fold_convert_loc (loc
, etype
, low
));
4245 if (operand_equal_p (low
, high
, 0))
4246 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4247 fold_convert_loc (loc
, etype
, low
));
4249 if (integer_zerop (low
))
4251 if (! TYPE_UNSIGNED (etype
))
4253 etype
= unsigned_type_for (etype
);
4254 high
= fold_convert_loc (loc
, etype
, high
);
4255 exp
= fold_convert_loc (loc
, etype
, exp
);
4257 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4260 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4261 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4263 unsigned HOST_WIDE_INT lo
;
4267 prec
= TYPE_PRECISION (etype
);
4268 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4271 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4275 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4276 lo
= (unsigned HOST_WIDE_INT
) -1;
4279 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4281 if (TYPE_UNSIGNED (etype
))
4283 tree signed_etype
= signed_type_for (etype
);
4284 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4286 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4288 etype
= signed_etype
;
4289 exp
= fold_convert_loc (loc
, etype
, exp
);
4291 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4292 build_int_cst (etype
, 0));
4296 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4297 This requires wrap-around arithmetics for the type of the expression.
4298 First make sure that arithmetics in this type is valid, then make sure
4299 that it wraps around. */
4300 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4301 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4302 TYPE_UNSIGNED (etype
));
4304 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4306 tree utype
, minv
, maxv
;
4308 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4309 for the type in question, as we rely on this here. */
4310 utype
= unsigned_type_for (etype
);
4311 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4312 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4313 integer_one_node
, 1);
4314 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4316 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4323 high
= fold_convert_loc (loc
, etype
, high
);
4324 low
= fold_convert_loc (loc
, etype
, low
);
4325 exp
= fold_convert_loc (loc
, etype
, exp
);
4327 value
= const_binop (MINUS_EXPR
, high
, low
);
4330 if (POINTER_TYPE_P (etype
))
4332 if (value
!= 0 && !TREE_OVERFLOW (value
))
4334 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4335 return build_range_check (loc
, type
,
4336 fold_build_pointer_plus_loc (loc
, exp
, low
),
4337 1, build_int_cst (etype
, 0), value
);
4342 if (value
!= 0 && !TREE_OVERFLOW (value
))
4343 return build_range_check (loc
, type
,
4344 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4345 1, build_int_cst (etype
, 0), value
);
4350 /* Return the predecessor of VAL in its type, handling the infinite case. */
4353 range_predecessor (tree val
)
4355 tree type
= TREE_TYPE (val
);
4357 if (INTEGRAL_TYPE_P (type
)
4358 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4361 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4364 /* Return the successor of VAL in its type, handling the infinite case. */
4367 range_successor (tree val
)
4369 tree type
= TREE_TYPE (val
);
4371 if (INTEGRAL_TYPE_P (type
)
4372 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4375 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4378 /* Given two ranges, see if we can merge them into one. Return 1 if we
4379 can, 0 if we can't. Set the output range into the specified parameters. */
4382 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4383 tree high0
, int in1_p
, tree low1
, tree high1
)
4391 int lowequal
= ((low0
== 0 && low1
== 0)
4392 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4393 low0
, 0, low1
, 0)));
4394 int highequal
= ((high0
== 0 && high1
== 0)
4395 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4396 high0
, 1, high1
, 1)));
4398 /* Make range 0 be the range that starts first, or ends last if they
4399 start at the same value. Swap them if it isn't. */
4400 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4403 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4404 high1
, 1, high0
, 1))))
4406 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4407 tem
= low0
, low0
= low1
, low1
= tem
;
4408 tem
= high0
, high0
= high1
, high1
= tem
;
4411 /* Now flag two cases, whether the ranges are disjoint or whether the
4412 second range is totally subsumed in the first. Note that the tests
4413 below are simplified by the ones above. */
4414 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4415 high0
, 1, low1
, 0));
4416 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4417 high1
, 1, high0
, 1));
4419 /* We now have four cases, depending on whether we are including or
4420 excluding the two ranges. */
4423 /* If they don't overlap, the result is false. If the second range
4424 is a subset it is the result. Otherwise, the range is from the start
4425 of the second to the end of the first. */
4427 in_p
= 0, low
= high
= 0;
4429 in_p
= 1, low
= low1
, high
= high1
;
4431 in_p
= 1, low
= low1
, high
= high0
;
4434 else if (in0_p
&& ! in1_p
)
4436 /* If they don't overlap, the result is the first range. If they are
4437 equal, the result is false. If the second range is a subset of the
4438 first, and the ranges begin at the same place, we go from just after
4439 the end of the second range to the end of the first. If the second
4440 range is not a subset of the first, or if it is a subset and both
4441 ranges end at the same place, the range starts at the start of the
4442 first range and ends just before the second range.
4443 Otherwise, we can't describe this as a single range. */
4445 in_p
= 1, low
= low0
, high
= high0
;
4446 else if (lowequal
&& highequal
)
4447 in_p
= 0, low
= high
= 0;
4448 else if (subset
&& lowequal
)
4450 low
= range_successor (high1
);
4455 /* We are in the weird situation where high0 > high1 but
4456 high1 has no successor. Punt. */
4460 else if (! subset
|| highequal
)
4463 high
= range_predecessor (low1
);
4467 /* low0 < low1 but low1 has no predecessor. Punt. */
4475 else if (! in0_p
&& in1_p
)
4477 /* If they don't overlap, the result is the second range. If the second
4478 is a subset of the first, the result is false. Otherwise,
4479 the range starts just after the first range and ends at the
4480 end of the second. */
4482 in_p
= 1, low
= low1
, high
= high1
;
4483 else if (subset
|| highequal
)
4484 in_p
= 0, low
= high
= 0;
4487 low
= range_successor (high0
);
4492 /* high1 > high0 but high0 has no successor. Punt. */
4500 /* The case where we are excluding both ranges. Here the complex case
4501 is if they don't overlap. In that case, the only time we have a
4502 range is if they are adjacent. If the second is a subset of the
4503 first, the result is the first. Otherwise, the range to exclude
4504 starts at the beginning of the first range and ends at the end of the
4508 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4509 range_successor (high0
),
4511 in_p
= 0, low
= low0
, high
= high1
;
4514 /* Canonicalize - [min, x] into - [-, x]. */
4515 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4516 switch (TREE_CODE (TREE_TYPE (low0
)))
4519 if (TYPE_PRECISION (TREE_TYPE (low0
))
4520 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4524 if (tree_int_cst_equal (low0
,
4525 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4529 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4530 && integer_zerop (low0
))
4537 /* Canonicalize - [x, max] into - [x, -]. */
4538 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4539 switch (TREE_CODE (TREE_TYPE (high1
)))
4542 if (TYPE_PRECISION (TREE_TYPE (high1
))
4543 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4547 if (tree_int_cst_equal (high1
,
4548 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4552 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4553 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4555 integer_one_node
, 1)))
4562 /* The ranges might be also adjacent between the maximum and
4563 minimum values of the given type. For
4564 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4565 return + [x + 1, y - 1]. */
4566 if (low0
== 0 && high1
== 0)
4568 low
= range_successor (high0
);
4569 high
= range_predecessor (low1
);
4570 if (low
== 0 || high
== 0)
4580 in_p
= 0, low
= low0
, high
= high0
;
4582 in_p
= 0, low
= low0
, high
= high1
;
4585 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4590 /* Subroutine of fold, looking inside expressions of the form
4591 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4592 of the COND_EXPR. This function is being used also to optimize
4593 A op B ? C : A, by reversing the comparison first.
4595 Return a folded expression whose code is not a COND_EXPR
4596 anymore, or NULL_TREE if no folding opportunity is found. */
4599 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4600 tree arg0
, tree arg1
, tree arg2
)
4602 enum tree_code comp_code
= TREE_CODE (arg0
);
4603 tree arg00
= TREE_OPERAND (arg0
, 0);
4604 tree arg01
= TREE_OPERAND (arg0
, 1);
4605 tree arg1_type
= TREE_TYPE (arg1
);
4611 /* If we have A op 0 ? A : -A, consider applying the following
4614 A == 0? A : -A same as -A
4615 A != 0? A : -A same as A
4616 A >= 0? A : -A same as abs (A)
4617 A > 0? A : -A same as abs (A)
4618 A <= 0? A : -A same as -abs (A)
4619 A < 0? A : -A same as -abs (A)
4621 None of these transformations work for modes with signed
4622 zeros. If A is +/-0, the first two transformations will
4623 change the sign of the result (from +0 to -0, or vice
4624 versa). The last four will fix the sign of the result,
4625 even though the original expressions could be positive or
4626 negative, depending on the sign of A.
4628 Note that all these transformations are correct if A is
4629 NaN, since the two alternatives (A and -A) are also NaNs. */
4630 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4631 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4632 ? real_zerop (arg01
)
4633 : integer_zerop (arg01
))
4634 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4635 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4636 /* In the case that A is of the form X-Y, '-A' (arg2) may
4637 have already been folded to Y-X, check for that. */
4638 || (TREE_CODE (arg1
) == MINUS_EXPR
4639 && TREE_CODE (arg2
) == MINUS_EXPR
4640 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4641 TREE_OPERAND (arg2
, 1), 0)
4642 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4643 TREE_OPERAND (arg2
, 0), 0))))
4648 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4649 return pedantic_non_lvalue_loc (loc
,
4650 fold_convert_loc (loc
, type
,
4651 negate_expr (tem
)));
4654 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4657 if (flag_trapping_math
)
4662 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4663 arg1
= fold_convert_loc (loc
, signed_type_for
4664 (TREE_TYPE (arg1
)), arg1
);
4665 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4666 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4669 if (flag_trapping_math
)
4673 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4674 arg1
= fold_convert_loc (loc
, signed_type_for
4675 (TREE_TYPE (arg1
)), arg1
);
4676 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4677 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4679 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4683 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4684 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4685 both transformations are correct when A is NaN: A != 0
4686 is then true, and A == 0 is false. */
4688 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4689 && integer_zerop (arg01
) && integer_zerop (arg2
))
4691 if (comp_code
== NE_EXPR
)
4692 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4693 else if (comp_code
== EQ_EXPR
)
4694 return build_zero_cst (type
);
4697 /* Try some transformations of A op B ? A : B.
4699 A == B? A : B same as B
4700 A != B? A : B same as A
4701 A >= B? A : B same as max (A, B)
4702 A > B? A : B same as max (B, A)
4703 A <= B? A : B same as min (A, B)
4704 A < B? A : B same as min (B, A)
4706 As above, these transformations don't work in the presence
4707 of signed zeros. For example, if A and B are zeros of
4708 opposite sign, the first two transformations will change
4709 the sign of the result. In the last four, the original
4710 expressions give different results for (A=+0, B=-0) and
4711 (A=-0, B=+0), but the transformed expressions do not.
4713 The first two transformations are correct if either A or B
4714 is a NaN. In the first transformation, the condition will
4715 be false, and B will indeed be chosen. In the case of the
4716 second transformation, the condition A != B will be true,
4717 and A will be chosen.
4719 The conversions to max() and min() are not correct if B is
4720 a number and A is not. The conditions in the original
4721 expressions will be false, so all four give B. The min()
4722 and max() versions would give a NaN instead. */
4723 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4724 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4725 /* Avoid these transformations if the COND_EXPR may be used
4726 as an lvalue in the C++ front-end. PR c++/19199. */
4728 || VECTOR_TYPE_P (type
)
4729 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4730 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4731 || ! maybe_lvalue_p (arg1
)
4732 || ! maybe_lvalue_p (arg2
)))
4734 tree comp_op0
= arg00
;
4735 tree comp_op1
= arg01
;
4736 tree comp_type
= TREE_TYPE (comp_op0
);
4738 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4739 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4749 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4751 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4756 /* In C++ a ?: expression can be an lvalue, so put the
4757 operand which will be used if they are equal first
4758 so that we can convert this back to the
4759 corresponding COND_EXPR. */
4760 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4762 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4763 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4764 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4765 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4766 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4767 comp_op1
, comp_op0
);
4768 return pedantic_non_lvalue_loc (loc
,
4769 fold_convert_loc (loc
, type
, tem
));
4776 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4778 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4779 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4780 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4781 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4782 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4783 comp_op1
, comp_op0
);
4784 return pedantic_non_lvalue_loc (loc
,
4785 fold_convert_loc (loc
, type
, tem
));
4789 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4790 return pedantic_non_lvalue_loc (loc
,
4791 fold_convert_loc (loc
, type
, arg2
));
4794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4795 return pedantic_non_lvalue_loc (loc
,
4796 fold_convert_loc (loc
, type
, arg1
));
4799 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4804 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4805 we might still be able to simplify this. For example,
4806 if C1 is one less or one more than C2, this might have started
4807 out as a MIN or MAX and been transformed by this function.
4808 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4810 if (INTEGRAL_TYPE_P (type
)
4811 && TREE_CODE (arg01
) == INTEGER_CST
4812 && TREE_CODE (arg2
) == INTEGER_CST
)
4816 if (TREE_CODE (arg1
) == INTEGER_CST
)
4818 /* We can replace A with C1 in this case. */
4819 arg1
= fold_convert_loc (loc
, type
, arg01
);
4820 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4823 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4824 MIN_EXPR, to preserve the signedness of the comparison. */
4825 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4827 && operand_equal_p (arg01
,
4828 const_binop (PLUS_EXPR
, arg2
,
4829 build_int_cst (type
, 1)),
4832 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4833 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4835 return pedantic_non_lvalue_loc (loc
,
4836 fold_convert_loc (loc
, type
, tem
));
4841 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4843 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4845 && operand_equal_p (arg01
,
4846 const_binop (MINUS_EXPR
, arg2
,
4847 build_int_cst (type
, 1)),
4850 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4851 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4853 return pedantic_non_lvalue_loc (loc
,
4854 fold_convert_loc (loc
, type
, tem
));
4859 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4860 MAX_EXPR, to preserve the signedness of the comparison. */
4861 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4863 && operand_equal_p (arg01
,
4864 const_binop (MINUS_EXPR
, arg2
,
4865 build_int_cst (type
, 1)),
4868 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4869 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4871 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4876 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4877 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4879 && operand_equal_p (arg01
,
4880 const_binop (PLUS_EXPR
, arg2
,
4881 build_int_cst (type
, 1)),
4884 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4885 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4887 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4901 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4902 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4903 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4907 /* EXP is some logical combination of boolean tests. See if we can
4908 merge it into some range test. Return the new tree if so. */
4911 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4914 int or_op
= (code
== TRUTH_ORIF_EXPR
4915 || code
== TRUTH_OR_EXPR
);
4916 int in0_p
, in1_p
, in_p
;
4917 tree low0
, low1
, low
, high0
, high1
, high
;
4918 bool strict_overflow_p
= false;
4919 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4920 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4922 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4923 "when simplifying range test");
4925 /* If this is an OR operation, invert both sides; we will invert
4926 again at the end. */
4928 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4930 /* If both expressions are the same, if we can merge the ranges, and we
4931 can build the range test, return it or it inverted. If one of the
4932 ranges is always true or always false, consider it to be the same
4933 expression as the other. */
4934 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4935 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4937 && 0 != (tem
= (build_range_check (loc
, type
,
4939 : rhs
!= 0 ? rhs
: integer_zero_node
,
4942 if (strict_overflow_p
)
4943 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4944 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4947 /* On machines where the branch cost is expensive, if this is a
4948 short-circuited branch and the underlying object on both sides
4949 is the same, make a non-short-circuit operation. */
4950 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4951 && lhs
!= 0 && rhs
!= 0
4952 && (code
== TRUTH_ANDIF_EXPR
4953 || code
== TRUTH_ORIF_EXPR
)
4954 && operand_equal_p (lhs
, rhs
, 0))
4956 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4957 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4958 which cases we can't do this. */
4959 if (simple_operand_p (lhs
))
4960 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4961 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4964 else if (!lang_hooks
.decls
.global_bindings_p ()
4965 && !CONTAINS_PLACEHOLDER_P (lhs
))
4967 tree common
= save_expr (lhs
);
4969 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4970 or_op
? ! in0_p
: in0_p
,
4972 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4973 or_op
? ! in1_p
: in1_p
,
4976 if (strict_overflow_p
)
4977 fold_overflow_warning (warnmsg
,
4978 WARN_STRICT_OVERFLOW_COMPARISON
);
4979 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4980 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4989 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4990 bit value. Arrange things so the extra bits will be set to zero if and
4991 only if C is signed-extended to its full width. If MASK is nonzero,
4992 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4995 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4997 tree type
= TREE_TYPE (c
);
4998 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5001 if (p
== modesize
|| unsignedp
)
5004 /* We work by getting just the sign bit into the low-order bit, then
5005 into the high-order bit, then sign-extend. We then XOR that value
5007 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
5008 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
5010 /* We must use a signed type in order to get an arithmetic right shift.
5011 However, we must also avoid introducing accidental overflows, so that
5012 a subsequent call to integer_zerop will work. Hence we must
5013 do the type conversion here. At this point, the constant is either
5014 zero or one, and the conversion to a signed type can never overflow.
5015 We could get an overflow if this conversion is done anywhere else. */
5016 if (TYPE_UNSIGNED (type
))
5017 temp
= fold_convert (signed_type_for (type
), temp
);
5019 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5020 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5022 temp
= const_binop (BIT_AND_EXPR
, temp
,
5023 fold_convert (TREE_TYPE (c
), mask
));
5024 /* If necessary, convert the type back to match the type of C. */
5025 if (TYPE_UNSIGNED (type
))
5026 temp
= fold_convert (type
, temp
);
5028 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5031 /* For an expression that has the form
5035 we can drop one of the inner expressions and simplify to
5039 LOC is the location of the resulting expression. OP is the inner
5040 logical operation; the left-hand side in the examples above, while CMPOP
5041 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5042 removing a condition that guards another, as in
5043 (A != NULL && A->...) || A == NULL
5044 which we must not transform. If RHS_ONLY is true, only eliminate the
5045 right-most operand of the inner logical operation. */
5048 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5051 tree type
= TREE_TYPE (cmpop
);
5052 enum tree_code code
= TREE_CODE (cmpop
);
5053 enum tree_code truthop_code
= TREE_CODE (op
);
5054 tree lhs
= TREE_OPERAND (op
, 0);
5055 tree rhs
= TREE_OPERAND (op
, 1);
5056 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5057 enum tree_code rhs_code
= TREE_CODE (rhs
);
5058 enum tree_code lhs_code
= TREE_CODE (lhs
);
5059 enum tree_code inv_code
;
5061 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5064 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5067 if (rhs_code
== truthop_code
)
5069 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5070 if (newrhs
!= NULL_TREE
)
5073 rhs_code
= TREE_CODE (rhs
);
5076 if (lhs_code
== truthop_code
&& !rhs_only
)
5078 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5079 if (newlhs
!= NULL_TREE
)
5082 lhs_code
= TREE_CODE (lhs
);
5086 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5087 if (inv_code
== rhs_code
5088 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5089 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5091 if (!rhs_only
&& inv_code
== lhs_code
5092 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5095 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5096 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5101 /* Find ways of folding logical expressions of LHS and RHS:
5102 Try to merge two comparisons to the same innermost item.
5103 Look for range tests like "ch >= '0' && ch <= '9'".
5104 Look for combinations of simple terms on machines with expensive branches
5105 and evaluate the RHS unconditionally.
5107 For example, if we have p->a == 2 && p->b == 4 and we can make an
5108 object large enough to span both A and B, we can do this with a comparison
5109 against the object ANDed with the a mask.
5111 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5112 operations to do this with one comparison.
5114 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5115 function and the one above.
5117 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5118 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5120 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5123 We return the simplified tree or 0 if no optimization is possible. */
5126 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5129 /* If this is the "or" of two comparisons, we can do something if
5130 the comparisons are NE_EXPR. If this is the "and", we can do something
5131 if the comparisons are EQ_EXPR. I.e.,
5132 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5134 WANTED_CODE is this operation code. For single bit fields, we can
5135 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5136 comparison for one-bit fields. */
5138 enum tree_code wanted_code
;
5139 enum tree_code lcode
, rcode
;
5140 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5141 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5142 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5143 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5144 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5145 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5146 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5147 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5148 enum machine_mode lnmode
, rnmode
;
5149 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5150 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5151 tree l_const
, r_const
;
5152 tree lntype
, rntype
, result
;
5153 HOST_WIDE_INT first_bit
, end_bit
;
5156 /* Start by getting the comparison codes. Fail if anything is volatile.
5157 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5158 it were surrounded with a NE_EXPR. */
5160 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5163 lcode
= TREE_CODE (lhs
);
5164 rcode
= TREE_CODE (rhs
);
5166 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5168 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5169 build_int_cst (TREE_TYPE (lhs
), 0));
5173 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5175 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5176 build_int_cst (TREE_TYPE (rhs
), 0));
5180 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5181 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5184 ll_arg
= TREE_OPERAND (lhs
, 0);
5185 lr_arg
= TREE_OPERAND (lhs
, 1);
5186 rl_arg
= TREE_OPERAND (rhs
, 0);
5187 rr_arg
= TREE_OPERAND (rhs
, 1);
5189 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5190 if (simple_operand_p (ll_arg
)
5191 && simple_operand_p (lr_arg
))
5193 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5194 && operand_equal_p (lr_arg
, rr_arg
, 0))
5196 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5197 truth_type
, ll_arg
, lr_arg
);
5201 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5202 && operand_equal_p (lr_arg
, rl_arg
, 0))
5204 result
= combine_comparisons (loc
, code
, lcode
,
5205 swap_tree_comparison (rcode
),
5206 truth_type
, ll_arg
, lr_arg
);
5212 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5213 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5215 /* If the RHS can be evaluated unconditionally and its operands are
5216 simple, it wins to evaluate the RHS unconditionally on machines
5217 with expensive branches. In this case, this isn't a comparison
5218 that can be merged. */
5220 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5222 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5223 && simple_operand_p (rl_arg
)
5224 && simple_operand_p (rr_arg
))
5226 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5227 if (code
== TRUTH_OR_EXPR
5228 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5229 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5230 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5231 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5232 return build2_loc (loc
, NE_EXPR
, truth_type
,
5233 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5235 build_int_cst (TREE_TYPE (ll_arg
), 0));
5237 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5238 if (code
== TRUTH_AND_EXPR
5239 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5240 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5241 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5243 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5244 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5246 build_int_cst (TREE_TYPE (ll_arg
), 0));
5249 /* See if the comparisons can be merged. Then get all the parameters for
5252 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5253 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5257 ll_inner
= decode_field_reference (loc
, ll_arg
,
5258 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5259 &ll_unsignedp
, &volatilep
, &ll_mask
,
5261 lr_inner
= decode_field_reference (loc
, lr_arg
,
5262 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5263 &lr_unsignedp
, &volatilep
, &lr_mask
,
5265 rl_inner
= decode_field_reference (loc
, rl_arg
,
5266 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5267 &rl_unsignedp
, &volatilep
, &rl_mask
,
5269 rr_inner
= decode_field_reference (loc
, rr_arg
,
5270 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5271 &rr_unsignedp
, &volatilep
, &rr_mask
,
5274 /* It must be true that the inner operation on the lhs of each
5275 comparison must be the same if we are to be able to do anything.
5276 Then see if we have constants. If not, the same must be true for
5278 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5279 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5282 if (TREE_CODE (lr_arg
) == INTEGER_CST
5283 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5284 l_const
= lr_arg
, r_const
= rr_arg
;
5285 else if (lr_inner
== 0 || rr_inner
== 0
5286 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5289 l_const
= r_const
= 0;
5291 /* If either comparison code is not correct for our logical operation,
5292 fail. However, we can convert a one-bit comparison against zero into
5293 the opposite comparison against that bit being set in the field. */
5295 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5296 if (lcode
!= wanted_code
)
5298 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5300 /* Make the left operand unsigned, since we are only interested
5301 in the value of one bit. Otherwise we are doing the wrong
5310 /* This is analogous to the code for l_const above. */
5311 if (rcode
!= wanted_code
)
5313 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5322 /* See if we can find a mode that contains both fields being compared on
5323 the left. If we can't, fail. Otherwise, update all constants and masks
5324 to be relative to a field of that size. */
5325 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5326 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5327 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5328 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5330 if (lnmode
== VOIDmode
)
5333 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5334 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5335 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5336 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5338 if (BYTES_BIG_ENDIAN
)
5340 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5341 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5344 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5345 size_int (xll_bitpos
));
5346 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5347 size_int (xrl_bitpos
));
5351 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5352 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5353 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5354 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5355 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5358 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5360 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5365 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5366 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5367 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5368 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5369 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5372 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5374 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5378 /* If the right sides are not constant, do the same for it. Also,
5379 disallow this optimization if a size or signedness mismatch occurs
5380 between the left and right sides. */
5383 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5384 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5385 /* Make sure the two fields on the right
5386 correspond to the left without being swapped. */
5387 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5390 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5391 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5392 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5393 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5395 if (rnmode
== VOIDmode
)
5398 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5399 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5400 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5401 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5403 if (BYTES_BIG_ENDIAN
)
5405 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5406 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5409 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5411 size_int (xlr_bitpos
));
5412 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5414 size_int (xrr_bitpos
));
5416 /* Make a mask that corresponds to both fields being compared.
5417 Do this for both items being compared. If the operands are the
5418 same size and the bits being compared are in the same position
5419 then we can do this by masking both and comparing the masked
5421 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5422 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5423 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5425 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5426 ll_unsignedp
|| rl_unsignedp
);
5427 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5428 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5430 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5431 lr_unsignedp
|| rr_unsignedp
);
5432 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5433 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5435 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5438 /* There is still another way we can do something: If both pairs of
5439 fields being compared are adjacent, we may be able to make a wider
5440 field containing them both.
5442 Note that we still must mask the lhs/rhs expressions. Furthermore,
5443 the mask must be shifted to account for the shift done by
5444 make_bit_field_ref. */
5445 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5446 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5447 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5448 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5452 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5453 ll_bitsize
+ rl_bitsize
,
5454 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5455 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5456 lr_bitsize
+ rr_bitsize
,
5457 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5459 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5460 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5461 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5462 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5464 /* Convert to the smaller type before masking out unwanted bits. */
5466 if (lntype
!= rntype
)
5468 if (lnbitsize
> rnbitsize
)
5470 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5471 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5474 else if (lnbitsize
< rnbitsize
)
5476 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5477 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5482 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5483 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5485 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5486 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5488 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5494 /* Handle the case of comparisons with constants. If there is something in
5495 common between the masks, those bits of the constants must be the same.
5496 If not, the condition is always false. Test for this to avoid generating
5497 incorrect code below. */
5498 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5499 if (! integer_zerop (result
)
5500 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5501 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5503 if (wanted_code
== NE_EXPR
)
5505 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5506 return constant_boolean_node (true, truth_type
);
5510 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5511 return constant_boolean_node (false, truth_type
);
5515 /* Construct the expression we will return. First get the component
5516 reference we will make. Unless the mask is all ones the width of
5517 that field, perform the mask operation. Then compare with the
5519 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5520 ll_unsignedp
|| rl_unsignedp
);
5522 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5523 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5524 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5526 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5527 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5530 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5534 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5538 enum tree_code op_code
;
5541 int consts_equal
, consts_lt
;
5544 STRIP_SIGN_NOPS (arg0
);
5546 op_code
= TREE_CODE (arg0
);
5547 minmax_const
= TREE_OPERAND (arg0
, 1);
5548 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5549 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5550 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5551 inner
= TREE_OPERAND (arg0
, 0);
5553 /* If something does not permit us to optimize, return the original tree. */
5554 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5555 || TREE_CODE (comp_const
) != INTEGER_CST
5556 || TREE_OVERFLOW (comp_const
)
5557 || TREE_CODE (minmax_const
) != INTEGER_CST
5558 || TREE_OVERFLOW (minmax_const
))
5561 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5562 and GT_EXPR, doing the rest with recursive calls using logical
5566 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5569 = optimize_minmax_comparison (loc
,
5570 invert_tree_comparison (code
, false),
5573 return invert_truthvalue_loc (loc
, tem
);
5579 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5580 optimize_minmax_comparison
5581 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5582 optimize_minmax_comparison
5583 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5586 if (op_code
== MAX_EXPR
&& consts_equal
)
5587 /* MAX (X, 0) == 0 -> X <= 0 */
5588 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5590 else if (op_code
== MAX_EXPR
&& consts_lt
)
5591 /* MAX (X, 0) == 5 -> X == 5 */
5592 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5594 else if (op_code
== MAX_EXPR
)
5595 /* MAX (X, 0) == -1 -> false */
5596 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5598 else if (consts_equal
)
5599 /* MIN (X, 0) == 0 -> X >= 0 */
5600 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5603 /* MIN (X, 0) == 5 -> false */
5604 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5607 /* MIN (X, 0) == -1 -> X == -1 */
5608 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5611 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5612 /* MAX (X, 0) > 0 -> X > 0
5613 MAX (X, 0) > 5 -> X > 5 */
5614 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5616 else if (op_code
== MAX_EXPR
)
5617 /* MAX (X, 0) > -1 -> true */
5618 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5620 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5621 /* MIN (X, 0) > 0 -> false
5622 MIN (X, 0) > 5 -> false */
5623 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5626 /* MIN (X, 0) > -1 -> X > -1 */
5627 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5634 /* T is an integer expression that is being multiplied, divided, or taken a
5635 modulus (CODE says which and what kind of divide or modulus) by a
5636 constant C. See if we can eliminate that operation by folding it with
5637 other operations already in T. WIDE_TYPE, if non-null, is a type that
5638 should be used for the computation if wider than our type.
5640 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5641 (X * 2) + (Y * 4). We must, however, be assured that either the original
5642 expression would not overflow or that overflow is undefined for the type
5643 in the language in question.
5645 If we return a non-null expression, it is an equivalent form of the
5646 original computation, but need not be in the original type.
5648 We set *STRICT_OVERFLOW_P to true if the return values depends on
5649 signed overflow being undefined. Otherwise we do not change
5650 *STRICT_OVERFLOW_P. */
5653 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5654 bool *strict_overflow_p
)
5656 /* To avoid exponential search depth, refuse to allow recursion past
5657 three levels. Beyond that (1) it's highly unlikely that we'll find
5658 something interesting and (2) we've probably processed it before
5659 when we built the inner expression. */
5668 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5675 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5676 bool *strict_overflow_p
)
5678 tree type
= TREE_TYPE (t
);
5679 enum tree_code tcode
= TREE_CODE (t
);
5680 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5681 > GET_MODE_SIZE (TYPE_MODE (type
)))
5682 ? wide_type
: type
);
5684 int same_p
= tcode
== code
;
5685 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5686 bool sub_strict_overflow_p
;
5688 /* Don't deal with constants of zero here; they confuse the code below. */
5689 if (integer_zerop (c
))
5692 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5693 op0
= TREE_OPERAND (t
, 0);
5695 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5696 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5698 /* Note that we need not handle conditional operations here since fold
5699 already handles those cases. So just do arithmetic here. */
5703 /* For a constant, we can always simplify if we are a multiply
5704 or (for divide and modulus) if it is a multiple of our constant. */
5705 if (code
== MULT_EXPR
5706 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5707 return const_binop (code
, fold_convert (ctype
, t
),
5708 fold_convert (ctype
, c
));
5711 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5712 /* If op0 is an expression ... */
5713 if ((COMPARISON_CLASS_P (op0
)
5714 || UNARY_CLASS_P (op0
)
5715 || BINARY_CLASS_P (op0
)
5716 || VL_EXP_CLASS_P (op0
)
5717 || EXPRESSION_CLASS_P (op0
))
5718 /* ... and has wrapping overflow, and its type is smaller
5719 than ctype, then we cannot pass through as widening. */
5720 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5721 && (TYPE_PRECISION (ctype
)
5722 > TYPE_PRECISION (TREE_TYPE (op0
))))
5723 /* ... or this is a truncation (t is narrower than op0),
5724 then we cannot pass through this narrowing. */
5725 || (TYPE_PRECISION (type
)
5726 < TYPE_PRECISION (TREE_TYPE (op0
)))
5727 /* ... or signedness changes for division or modulus,
5728 then we cannot pass through this conversion. */
5729 || (code
!= MULT_EXPR
5730 && (TYPE_UNSIGNED (ctype
)
5731 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5732 /* ... or has undefined overflow while the converted to
5733 type has not, we cannot do the operation in the inner type
5734 as that would introduce undefined overflow. */
5735 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5736 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5739 /* Pass the constant down and see if we can make a simplification. If
5740 we can, replace this expression with the inner simplification for
5741 possible later conversion to our or some other type. */
5742 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5743 && TREE_CODE (t2
) == INTEGER_CST
5744 && !TREE_OVERFLOW (t2
)
5745 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5747 ? ctype
: NULL_TREE
,
5748 strict_overflow_p
))))
5753 /* If widening the type changes it from signed to unsigned, then we
5754 must avoid building ABS_EXPR itself as unsigned. */
5755 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5757 tree cstype
= (*signed_type_for
) (ctype
);
5758 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5761 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5762 return fold_convert (ctype
, t1
);
5766 /* If the constant is negative, we cannot simplify this. */
5767 if (tree_int_cst_sgn (c
) == -1)
5771 /* For division and modulus, type can't be unsigned, as e.g.
5772 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5773 For signed types, even with wrapping overflow, this is fine. */
5774 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5776 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5778 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5781 case MIN_EXPR
: case MAX_EXPR
:
5782 /* If widening the type changes the signedness, then we can't perform
5783 this optimization as that changes the result. */
5784 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5787 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5788 sub_strict_overflow_p
= false;
5789 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5790 &sub_strict_overflow_p
)) != 0
5791 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5792 &sub_strict_overflow_p
)) != 0)
5794 if (tree_int_cst_sgn (c
) < 0)
5795 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5796 if (sub_strict_overflow_p
)
5797 *strict_overflow_p
= true;
5798 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5799 fold_convert (ctype
, t2
));
5803 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5804 /* If the second operand is constant, this is a multiplication
5805 or floor division, by a power of two, so we can treat it that
5806 way unless the multiplier or divisor overflows. Signed
5807 left-shift overflow is implementation-defined rather than
5808 undefined in C90, so do not convert signed left shift into
5810 if (TREE_CODE (op1
) == INTEGER_CST
5811 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5812 /* const_binop may not detect overflow correctly,
5813 so check for it explicitly here. */
5814 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5815 && TREE_INT_CST_HIGH (op1
) == 0
5816 && 0 != (t1
= fold_convert (ctype
,
5817 const_binop (LSHIFT_EXPR
,
5820 && !TREE_OVERFLOW (t1
))
5821 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5822 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5824 fold_convert (ctype
, op0
),
5826 c
, code
, wide_type
, strict_overflow_p
);
5829 case PLUS_EXPR
: case MINUS_EXPR
:
5830 /* See if we can eliminate the operation on both sides. If we can, we
5831 can return a new PLUS or MINUS. If we can't, the only remaining
5832 cases where we can do anything are if the second operand is a
5834 sub_strict_overflow_p
= false;
5835 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5836 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5837 if (t1
!= 0 && t2
!= 0
5838 && (code
== MULT_EXPR
5839 /* If not multiplication, we can only do this if both operands
5840 are divisible by c. */
5841 || (multiple_of_p (ctype
, op0
, c
)
5842 && multiple_of_p (ctype
, op1
, c
))))
5844 if (sub_strict_overflow_p
)
5845 *strict_overflow_p
= true;
5846 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5847 fold_convert (ctype
, t2
));
5850 /* If this was a subtraction, negate OP1 and set it to be an addition.
5851 This simplifies the logic below. */
5852 if (tcode
== MINUS_EXPR
)
5854 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5855 /* If OP1 was not easily negatable, the constant may be OP0. */
5856 if (TREE_CODE (op0
) == INTEGER_CST
)
5867 if (TREE_CODE (op1
) != INTEGER_CST
)
5870 /* If either OP1 or C are negative, this optimization is not safe for
5871 some of the division and remainder types while for others we need
5872 to change the code. */
5873 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5875 if (code
== CEIL_DIV_EXPR
)
5876 code
= FLOOR_DIV_EXPR
;
5877 else if (code
== FLOOR_DIV_EXPR
)
5878 code
= CEIL_DIV_EXPR
;
5879 else if (code
!= MULT_EXPR
5880 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5884 /* If it's a multiply or a division/modulus operation of a multiple
5885 of our constant, do the operation and verify it doesn't overflow. */
5886 if (code
== MULT_EXPR
5887 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5889 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5890 fold_convert (ctype
, c
));
5891 /* We allow the constant to overflow with wrapping semantics. */
5893 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5899 /* If we have an unsigned type, we cannot widen the operation since it
5900 will change the result if the original computation overflowed. */
5901 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5904 /* If we were able to eliminate our operation from the first side,
5905 apply our operation to the second side and reform the PLUS. */
5906 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5907 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5909 /* The last case is if we are a multiply. In that case, we can
5910 apply the distributive law to commute the multiply and addition
5911 if the multiplication of the constants doesn't overflow
5912 and overflow is defined. With undefined overflow
5913 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5914 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5915 return fold_build2 (tcode
, ctype
,
5916 fold_build2 (code
, ctype
,
5917 fold_convert (ctype
, op0
),
5918 fold_convert (ctype
, c
)),
5924 /* We have a special case here if we are doing something like
5925 (C * 8) % 4 since we know that's zero. */
5926 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5927 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5928 /* If the multiplication can overflow we cannot optimize this. */
5929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5930 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5931 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5933 *strict_overflow_p
= true;
5934 return omit_one_operand (type
, integer_zero_node
, op0
);
5937 /* ... fall through ... */
5939 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5940 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5945 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5946 strict_overflow_p
)) != 0)
5947 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5948 fold_convert (ctype
, op1
));
5949 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5950 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5951 strict_overflow_p
)) != 0)
5952 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5953 fold_convert (ctype
, t1
));
5954 else if (TREE_CODE (op1
) != INTEGER_CST
)
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5963 unsigned prec
= TYPE_PRECISION (ctype
);
5964 bool uns
= TYPE_UNSIGNED (ctype
);
5965 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
5966 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
5967 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
5968 overflow_p
= ((!uns
&& overflow_p
)
5969 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5970 if (!double_int_fits_to_tree_p (ctype
, mul
)
5971 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
5974 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5975 double_int_to_tree (ctype
, mul
));
5978 /* If these operations "cancel" each other, we have the main
5979 optimizations of this pass, which occur when either constant is a
5980 multiple of the other, in which case we replace this with either an
5981 operation or CODE or TCODE.
5983 If we have an unsigned type, we cannot do this since it will change
5984 the result if the original computation overflowed. */
5985 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5986 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5987 || (tcode
== MULT_EXPR
5988 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5989 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5990 && code
!= MULT_EXPR
)))
5992 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5994 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5995 *strict_overflow_p
= true;
5996 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5997 fold_convert (ctype
,
5998 const_binop (TRUNC_DIV_EXPR
,
6001 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
6003 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6004 *strict_overflow_p
= true;
6005 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6006 fold_convert (ctype
,
6007 const_binop (TRUNC_DIV_EXPR
,
6020 /* Return a node which has the indicated constant VALUE (either 0 or
6021 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6022 and is of the indicated TYPE. */
6025 constant_boolean_node (bool value
, tree type
)
6027 if (type
== integer_type_node
)
6028 return value
? integer_one_node
: integer_zero_node
;
6029 else if (type
== boolean_type_node
)
6030 return value
? boolean_true_node
: boolean_false_node
;
6031 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6032 return build_vector_from_val (type
,
6033 build_int_cst (TREE_TYPE (type
),
6036 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6040 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6041 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6042 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6043 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6044 COND is the first argument to CODE; otherwise (as in the example
6045 given here), it is the second argument. TYPE is the type of the
6046 original expression. Return NULL_TREE if no simplification is
6050 fold_binary_op_with_conditional_arg (location_t loc
,
6051 enum tree_code code
,
6052 tree type
, tree op0
, tree op1
,
6053 tree cond
, tree arg
, int cond_first_p
)
6055 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6056 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6057 tree test
, true_value
, false_value
;
6058 tree lhs
= NULL_TREE
;
6059 tree rhs
= NULL_TREE
;
6060 enum tree_code cond_code
= COND_EXPR
;
6062 if (TREE_CODE (cond
) == COND_EXPR
6063 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6065 test
= TREE_OPERAND (cond
, 0);
6066 true_value
= TREE_OPERAND (cond
, 1);
6067 false_value
= TREE_OPERAND (cond
, 2);
6068 /* If this operand throws an expression, then it does not make
6069 sense to try to perform a logical or arithmetic operation
6071 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6073 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6078 tree testtype
= TREE_TYPE (cond
);
6080 true_value
= constant_boolean_node (true, testtype
);
6081 false_value
= constant_boolean_node (false, testtype
);
6084 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6085 cond_code
= VEC_COND_EXPR
;
6087 /* This transformation is only worthwhile if we don't have to wrap ARG
6088 in a SAVE_EXPR and the operation can be simplified without recursing
6089 on at least one of the branches once its pushed inside the COND_EXPR. */
6090 if (!TREE_CONSTANT (arg
)
6091 && (TREE_SIDE_EFFECTS (arg
)
6092 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6093 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6096 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6099 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6101 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6103 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6107 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6109 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6111 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6114 /* Check that we have simplified at least one of the branches. */
6115 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6118 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6122 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6124 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6125 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6126 ADDEND is the same as X.
6128 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6129 and finite. The problematic cases are when X is zero, and its mode
6130 has signed zeros. In the case of rounding towards -infinity,
6131 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6132 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6135 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6137 if (!real_zerop (addend
))
6140 /* Don't allow the fold with -fsignaling-nans. */
6141 if (HONOR_SNANS (TYPE_MODE (type
)))
6144 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6145 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6148 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6149 if (TREE_CODE (addend
) == REAL_CST
6150 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6153 /* The mode has signed zeros, and we have to honor their sign.
6154 In this situation, there is only one case we can return true for.
6155 X - 0 is the same as X unless rounding towards -infinity is
6157 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6160 /* Subroutine of fold() that checks comparisons of built-in math
6161 functions against real constants.
6163 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6164 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6165 is the type of the result and ARG0 and ARG1 are the operands of the
6166 comparison. ARG1 must be a TREE_REAL_CST.
6168 The function returns the constant folded tree if a simplification
6169 can be made, and NULL_TREE otherwise. */
6172 fold_mathfn_compare (location_t loc
,
6173 enum built_in_function fcode
, enum tree_code code
,
6174 tree type
, tree arg0
, tree arg1
)
6178 if (BUILTIN_SQRT_P (fcode
))
6180 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6181 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6183 c
= TREE_REAL_CST (arg1
);
6184 if (REAL_VALUE_NEGATIVE (c
))
6186 /* sqrt(x) < y is always false, if y is negative. */
6187 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6188 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6190 /* sqrt(x) > y is always true, if y is negative and we
6191 don't care about NaNs, i.e. negative values of x. */
6192 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6193 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6195 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6196 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6197 build_real (TREE_TYPE (arg
), dconst0
));
6199 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6203 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6204 real_convert (&c2
, mode
, &c2
);
6206 if (REAL_VALUE_ISINF (c2
))
6208 /* sqrt(x) > y is x == +Inf, when y is very large. */
6209 if (HONOR_INFINITIES (mode
))
6210 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6211 build_real (TREE_TYPE (arg
), c2
));
6213 /* sqrt(x) > y is always false, when y is very large
6214 and we don't care about infinities. */
6215 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6218 /* sqrt(x) > c is the same as x > c*c. */
6219 return fold_build2_loc (loc
, code
, type
, arg
,
6220 build_real (TREE_TYPE (arg
), c2
));
6222 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6226 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6227 real_convert (&c2
, mode
, &c2
);
6229 if (REAL_VALUE_ISINF (c2
))
6231 /* sqrt(x) < y is always true, when y is a very large
6232 value and we don't care about NaNs or Infinities. */
6233 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6234 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6236 /* sqrt(x) < y is x != +Inf when y is very large and we
6237 don't care about NaNs. */
6238 if (! HONOR_NANS (mode
))
6239 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6240 build_real (TREE_TYPE (arg
), c2
));
6242 /* sqrt(x) < y is x >= 0 when y is very large and we
6243 don't care about Infinities. */
6244 if (! HONOR_INFINITIES (mode
))
6245 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6246 build_real (TREE_TYPE (arg
), dconst0
));
6248 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6249 arg
= save_expr (arg
);
6250 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6251 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6252 build_real (TREE_TYPE (arg
),
6254 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6255 build_real (TREE_TYPE (arg
),
6259 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6260 if (! HONOR_NANS (mode
))
6261 return fold_build2_loc (loc
, code
, type
, arg
,
6262 build_real (TREE_TYPE (arg
), c2
));
6264 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6265 arg
= save_expr (arg
);
6266 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6267 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6268 build_real (TREE_TYPE (arg
),
6270 fold_build2_loc (loc
, code
, type
, arg
,
6271 build_real (TREE_TYPE (arg
),
6279 /* Subroutine of fold() that optimizes comparisons against Infinities,
6280 either +Inf or -Inf.
6282 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6283 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6284 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6286 The function returns the constant folded tree if a simplification
6287 can be made, and NULL_TREE otherwise. */
6290 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6291 tree arg0
, tree arg1
)
6293 enum machine_mode mode
;
6294 REAL_VALUE_TYPE max
;
6298 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6300 /* For negative infinity swap the sense of the comparison. */
6301 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6303 code
= swap_tree_comparison (code
);
6308 /* x > +Inf is always false, if with ignore sNANs. */
6309 if (HONOR_SNANS (mode
))
6311 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6314 /* x <= +Inf is always true, if we don't case about NaNs. */
6315 if (! HONOR_NANS (mode
))
6316 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6318 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6319 arg0
= save_expr (arg0
);
6320 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6324 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6325 real_maxval (&max
, neg
, mode
);
6326 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6327 arg0
, build_real (TREE_TYPE (arg0
), max
));
6330 /* x < +Inf is always equal to x <= DBL_MAX. */
6331 real_maxval (&max
, neg
, mode
);
6332 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6333 arg0
, build_real (TREE_TYPE (arg0
), max
));
6336 /* x != +Inf is always equal to !(x > DBL_MAX). */
6337 real_maxval (&max
, neg
, mode
);
6338 if (! HONOR_NANS (mode
))
6339 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6340 arg0
, build_real (TREE_TYPE (arg0
), max
));
6342 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6343 arg0
, build_real (TREE_TYPE (arg0
), max
));
6344 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6353 /* Subroutine of fold() that optimizes comparisons of a division by
6354 a nonzero integer constant against an integer constant, i.e.
6357 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6358 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6359 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6361 The function returns the constant folded tree if a simplification
6362 can be made, and NULL_TREE otherwise. */
6365 fold_div_compare (location_t loc
,
6366 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6368 tree prod
, tmp
, hi
, lo
;
6369 tree arg00
= TREE_OPERAND (arg0
, 0);
6370 tree arg01
= TREE_OPERAND (arg0
, 1);
6372 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6376 /* We have to do this the hard way to detect unsigned overflow.
6377 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6378 val
= TREE_INT_CST (arg01
)
6379 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6380 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6381 neg_overflow
= false;
6385 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6386 build_int_cst (TREE_TYPE (arg01
), 1));
6389 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6390 val
= TREE_INT_CST (prod
)
6391 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6392 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6393 -1, overflow
| TREE_OVERFLOW (prod
));
6395 else if (tree_int_cst_sgn (arg01
) >= 0)
6397 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6398 build_int_cst (TREE_TYPE (arg01
), 1));
6399 switch (tree_int_cst_sgn (arg1
))
6402 neg_overflow
= true;
6403 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6408 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6413 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6423 /* A negative divisor reverses the relational operators. */
6424 code
= swap_tree_comparison (code
);
6426 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6427 build_int_cst (TREE_TYPE (arg01
), 1));
6428 switch (tree_int_cst_sgn (arg1
))
6431 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6436 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6441 neg_overflow
= true;
6442 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6454 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6455 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6456 if (TREE_OVERFLOW (hi
))
6457 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6458 if (TREE_OVERFLOW (lo
))
6459 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6460 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6463 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6464 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6465 if (TREE_OVERFLOW (hi
))
6466 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6467 if (TREE_OVERFLOW (lo
))
6468 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6469 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6472 if (TREE_OVERFLOW (lo
))
6474 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6475 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6477 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6480 if (TREE_OVERFLOW (hi
))
6482 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6483 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6485 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6488 if (TREE_OVERFLOW (hi
))
6490 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6491 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6493 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6496 if (TREE_OVERFLOW (lo
))
6498 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6499 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6501 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6511 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6512 equality/inequality test, then return a simplified form of the test
6513 using a sign testing. Otherwise return NULL. TYPE is the desired
6517 fold_single_bit_test_into_sign_test (location_t loc
,
6518 enum tree_code code
, tree arg0
, tree arg1
,
6521 /* If this is testing a single bit, we can optimize the test. */
6522 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6523 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6524 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6526 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6527 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6528 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6530 if (arg00
!= NULL_TREE
6531 /* This is only a win if casting to a signed type is cheap,
6532 i.e. when arg00's type is not a partial mode. */
6533 && TYPE_PRECISION (TREE_TYPE (arg00
))
6534 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6536 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6537 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6539 fold_convert_loc (loc
, stype
, arg00
),
6540 build_int_cst (stype
, 0));
6547 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6548 equality/inequality test, then return a simplified form of
6549 the test using shifts and logical operations. Otherwise return
6550 NULL. TYPE is the desired result type. */
6553 fold_single_bit_test (location_t loc
, enum tree_code code
,
6554 tree arg0
, tree arg1
, tree result_type
)
6556 /* If this is testing a single bit, we can optimize the test. */
6557 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6558 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6559 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6561 tree inner
= TREE_OPERAND (arg0
, 0);
6562 tree type
= TREE_TYPE (arg0
);
6563 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6564 enum machine_mode operand_mode
= TYPE_MODE (type
);
6566 tree signed_type
, unsigned_type
, intermediate_type
;
6569 /* First, see if we can fold the single bit test into a sign-bit
6571 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6576 /* Otherwise we have (A & C) != 0 where C is a single bit,
6577 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6578 Similarly for (A & C) == 0. */
6580 /* If INNER is a right shift of a constant and it plus BITNUM does
6581 not overflow, adjust BITNUM and INNER. */
6582 if (TREE_CODE (inner
) == RSHIFT_EXPR
6583 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6584 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6585 && bitnum
< TYPE_PRECISION (type
)
6586 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6587 bitnum
- TYPE_PRECISION (type
)))
6589 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6590 inner
= TREE_OPERAND (inner
, 0);
6593 /* If we are going to be able to omit the AND below, we must do our
6594 operations as unsigned. If we must use the AND, we have a choice.
6595 Normally unsigned is faster, but for some machines signed is. */
6596 #ifdef LOAD_EXTEND_OP
6597 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6598 && !flag_syntax_only
) ? 0 : 1;
6603 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6604 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6605 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6606 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6609 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6610 inner
, size_int (bitnum
));
6612 one
= build_int_cst (intermediate_type
, 1);
6614 if (code
== EQ_EXPR
)
6615 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6617 /* Put the AND last so it can combine with more things. */
6618 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6620 /* Make sure to return the proper type. */
6621 inner
= fold_convert_loc (loc
, result_type
, inner
);
6628 /* Check whether we are allowed to reorder operands arg0 and arg1,
6629 such that the evaluation of arg1 occurs before arg0. */
6632 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6634 if (! flag_evaluation_order
)
6636 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6638 return ! TREE_SIDE_EFFECTS (arg0
)
6639 && ! TREE_SIDE_EFFECTS (arg1
);
6642 /* Test whether it is preferable two swap two operands, ARG0 and
6643 ARG1, for example because ARG0 is an integer constant and ARG1
6644 isn't. If REORDER is true, only recommend swapping if we can
6645 evaluate the operands in reverse order. */
6648 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6650 STRIP_SIGN_NOPS (arg0
);
6651 STRIP_SIGN_NOPS (arg1
);
6653 if (TREE_CODE (arg1
) == INTEGER_CST
)
6655 if (TREE_CODE (arg0
) == INTEGER_CST
)
6658 if (TREE_CODE (arg1
) == REAL_CST
)
6660 if (TREE_CODE (arg0
) == REAL_CST
)
6663 if (TREE_CODE (arg1
) == FIXED_CST
)
6665 if (TREE_CODE (arg0
) == FIXED_CST
)
6668 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6670 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6673 if (TREE_CONSTANT (arg1
))
6675 if (TREE_CONSTANT (arg0
))
6678 if (optimize_function_for_size_p (cfun
))
6681 if (reorder
&& flag_evaluation_order
6682 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6685 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6686 for commutative and comparison operators. Ensuring a canonical
6687 form allows the optimizers to find additional redundancies without
6688 having to explicitly check for both orderings. */
6689 if (TREE_CODE (arg0
) == SSA_NAME
6690 && TREE_CODE (arg1
) == SSA_NAME
6691 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6694 /* Put SSA_NAMEs last. */
6695 if (TREE_CODE (arg1
) == SSA_NAME
)
6697 if (TREE_CODE (arg0
) == SSA_NAME
)
6700 /* Put variables last. */
6709 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6710 ARG0 is extended to a wider type. */
6713 fold_widened_comparison (location_t loc
, enum tree_code code
,
6714 tree type
, tree arg0
, tree arg1
)
6716 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6718 tree shorter_type
, outer_type
;
6722 if (arg0_unw
== arg0
)
6724 shorter_type
= TREE_TYPE (arg0_unw
);
6726 #ifdef HAVE_canonicalize_funcptr_for_compare
6727 /* Disable this optimization if we're casting a function pointer
6728 type on targets that require function pointer canonicalization. */
6729 if (HAVE_canonicalize_funcptr_for_compare
6730 && TREE_CODE (shorter_type
) == POINTER_TYPE
6731 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6735 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6738 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6740 /* If possible, express the comparison in the shorter mode. */
6741 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6742 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6743 && (TREE_TYPE (arg1_unw
) == shorter_type
6744 || ((TYPE_PRECISION (shorter_type
)
6745 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6746 && (TYPE_UNSIGNED (shorter_type
)
6747 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6748 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6749 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6750 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6751 && int_fits_type_p (arg1_unw
, shorter_type
))))
6752 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6753 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6755 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6756 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6757 || !int_fits_type_p (arg1_unw
, shorter_type
))
6760 /* If we are comparing with the integer that does not fit into the range
6761 of the shorter type, the result is known. */
6762 outer_type
= TREE_TYPE (arg1_unw
);
6763 min
= lower_bound_in_type (outer_type
, shorter_type
);
6764 max
= upper_bound_in_type (outer_type
, shorter_type
);
6766 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6768 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6775 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6780 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6786 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6788 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6793 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6795 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6804 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6805 ARG0 just the signedness is changed. */
6808 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6809 tree arg0
, tree arg1
)
6812 tree inner_type
, outer_type
;
6814 if (!CONVERT_EXPR_P (arg0
))
6817 outer_type
= TREE_TYPE (arg0
);
6818 arg0_inner
= TREE_OPERAND (arg0
, 0);
6819 inner_type
= TREE_TYPE (arg0_inner
);
6821 #ifdef HAVE_canonicalize_funcptr_for_compare
6822 /* Disable this optimization if we're casting a function pointer
6823 type on targets that require function pointer canonicalization. */
6824 if (HAVE_canonicalize_funcptr_for_compare
6825 && TREE_CODE (inner_type
) == POINTER_TYPE
6826 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6830 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6833 if (TREE_CODE (arg1
) != INTEGER_CST
6834 && !(CONVERT_EXPR_P (arg1
)
6835 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6838 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6843 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6846 if (TREE_CODE (arg1
) == INTEGER_CST
)
6847 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6848 0, TREE_OVERFLOW (arg1
));
6850 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6852 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6855 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6856 step of the array. Reconstructs s and delta in the case of s *
6857 delta being an integer constant (and thus already folded). ADDR is
6858 the address. MULT is the multiplicative expression. If the
6859 function succeeds, the new address expression is returned.
6860 Otherwise NULL_TREE is returned. LOC is the location of the
6861 resulting expression. */
6864 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6866 tree s
, delta
, step
;
6867 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6872 /* Strip the nops that might be added when converting op1 to sizetype. */
6875 /* Canonicalize op1 into a possibly non-constant delta
6876 and an INTEGER_CST s. */
6877 if (TREE_CODE (op1
) == MULT_EXPR
)
6879 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6884 if (TREE_CODE (arg0
) == INTEGER_CST
)
6889 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6897 else if (TREE_CODE (op1
) == INTEGER_CST
)
6904 /* Simulate we are delta * 1. */
6906 s
= integer_one_node
;
6909 /* Handle &x.array the same as we would handle &x.array[0]. */
6910 if (TREE_CODE (ref
) == COMPONENT_REF
6911 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6915 /* Remember if this was a multi-dimensional array. */
6916 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6919 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6922 itype
= TREE_TYPE (domain
);
6924 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6925 if (TREE_CODE (step
) != INTEGER_CST
)
6930 if (! tree_int_cst_equal (step
, s
))
6935 /* Try if delta is a multiple of step. */
6936 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6942 /* Only fold here if we can verify we do not overflow one
6943 dimension of a multi-dimensional array. */
6948 if (!TYPE_MIN_VALUE (domain
)
6949 || !TYPE_MAX_VALUE (domain
)
6950 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6953 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6954 fold_convert_loc (loc
, itype
,
6955 TYPE_MIN_VALUE (domain
)),
6956 fold_convert_loc (loc
, itype
, delta
));
6957 if (TREE_CODE (tmp
) != INTEGER_CST
6958 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6962 /* We found a suitable component reference. */
6964 pref
= TREE_OPERAND (addr
, 0);
6965 ret
= copy_node (pref
);
6966 SET_EXPR_LOCATION (ret
, loc
);
6968 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6970 (loc
, PLUS_EXPR
, itype
,
6971 fold_convert_loc (loc
, itype
,
6973 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6974 fold_convert_loc (loc
, itype
, delta
)),
6975 NULL_TREE
, NULL_TREE
);
6976 return build_fold_addr_expr_loc (loc
, ret
);
6981 for (;; ref
= TREE_OPERAND (ref
, 0))
6983 if (TREE_CODE (ref
) == ARRAY_REF
)
6987 /* Remember if this was a multi-dimensional array. */
6988 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6991 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6994 itype
= TREE_TYPE (domain
);
6996 step
= array_ref_element_size (ref
);
6997 if (TREE_CODE (step
) != INTEGER_CST
)
7002 if (! tree_int_cst_equal (step
, s
))
7007 /* Try if delta is a multiple of step. */
7008 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7014 /* Only fold here if we can verify we do not overflow one
7015 dimension of a multi-dimensional array. */
7020 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7021 || !TYPE_MAX_VALUE (domain
)
7022 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7025 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7026 fold_convert_loc (loc
, itype
,
7027 TREE_OPERAND (ref
, 1)),
7028 fold_convert_loc (loc
, itype
, delta
));
7030 || TREE_CODE (tmp
) != INTEGER_CST
7031 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7040 if (!handled_component_p (ref
))
7044 /* We found the suitable array reference. So copy everything up to it,
7045 and replace the index. */
7047 pref
= TREE_OPERAND (addr
, 0);
7048 ret
= copy_node (pref
);
7049 SET_EXPR_LOCATION (ret
, loc
);
7054 pref
= TREE_OPERAND (pref
, 0);
7055 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7056 pos
= TREE_OPERAND (pos
, 0);
7059 TREE_OPERAND (pos
, 1)
7060 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7061 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7062 fold_convert_loc (loc
, itype
, delta
));
7063 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7067 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7068 means A >= Y && A != MAX, but in this case we know that
7069 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7072 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7074 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7076 if (TREE_CODE (bound
) == LT_EXPR
)
7077 a
= TREE_OPERAND (bound
, 0);
7078 else if (TREE_CODE (bound
) == GT_EXPR
)
7079 a
= TREE_OPERAND (bound
, 1);
7083 typea
= TREE_TYPE (a
);
7084 if (!INTEGRAL_TYPE_P (typea
)
7085 && !POINTER_TYPE_P (typea
))
7088 if (TREE_CODE (ineq
) == LT_EXPR
)
7090 a1
= TREE_OPERAND (ineq
, 1);
7091 y
= TREE_OPERAND (ineq
, 0);
7093 else if (TREE_CODE (ineq
) == GT_EXPR
)
7095 a1
= TREE_OPERAND (ineq
, 0);
7096 y
= TREE_OPERAND (ineq
, 1);
7101 if (TREE_TYPE (a1
) != typea
)
7104 if (POINTER_TYPE_P (typea
))
7106 /* Convert the pointer types into integer before taking the difference. */
7107 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7108 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7109 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7112 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7114 if (!diff
|| !integer_onep (diff
))
7117 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7120 /* Fold a sum or difference of at least one multiplication.
7121 Returns the folded tree or NULL if no simplification could be made. */
7124 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7125 tree arg0
, tree arg1
)
7127 tree arg00
, arg01
, arg10
, arg11
;
7128 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7130 /* (A * C) +- (B * C) -> (A+-B) * C.
7131 (A * C) +- A -> A * (C+-1).
7132 We are most concerned about the case where C is a constant,
7133 but other combinations show up during loop reduction. Since
7134 it is not difficult, try all four possibilities. */
7136 if (TREE_CODE (arg0
) == MULT_EXPR
)
7138 arg00
= TREE_OPERAND (arg0
, 0);
7139 arg01
= TREE_OPERAND (arg0
, 1);
7141 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7143 arg00
= build_one_cst (type
);
7148 /* We cannot generate constant 1 for fract. */
7149 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7152 arg01
= build_one_cst (type
);
7154 if (TREE_CODE (arg1
) == MULT_EXPR
)
7156 arg10
= TREE_OPERAND (arg1
, 0);
7157 arg11
= TREE_OPERAND (arg1
, 1);
7159 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7161 arg10
= build_one_cst (type
);
7162 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7163 the purpose of this canonicalization. */
7164 if (TREE_INT_CST_HIGH (arg1
) == -1
7165 && negate_expr_p (arg1
)
7166 && code
== PLUS_EXPR
)
7168 arg11
= negate_expr (arg1
);
7176 /* We cannot generate constant 1 for fract. */
7177 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7180 arg11
= build_one_cst (type
);
7184 if (operand_equal_p (arg01
, arg11
, 0))
7185 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7186 else if (operand_equal_p (arg00
, arg10
, 0))
7187 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7188 else if (operand_equal_p (arg00
, arg11
, 0))
7189 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7190 else if (operand_equal_p (arg01
, arg10
, 0))
7191 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7193 /* No identical multiplicands; see if we can find a common
7194 power-of-two factor in non-power-of-two multiplies. This
7195 can help in multi-dimensional array access. */
7196 else if (host_integerp (arg01
, 0)
7197 && host_integerp (arg11
, 0))
7199 HOST_WIDE_INT int01
, int11
, tmp
;
7202 int01
= TREE_INT_CST_LOW (arg01
);
7203 int11
= TREE_INT_CST_LOW (arg11
);
7205 /* Move min of absolute values to int11. */
7206 if (absu_hwi (int01
) < absu_hwi (int11
))
7208 tmp
= int01
, int01
= int11
, int11
= tmp
;
7209 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7216 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7217 /* The remainder should not be a constant, otherwise we
7218 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7219 increased the number of multiplications necessary. */
7220 && TREE_CODE (arg10
) != INTEGER_CST
)
7222 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7223 build_int_cst (TREE_TYPE (arg00
),
7228 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7233 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7234 fold_build2_loc (loc
, code
, type
,
7235 fold_convert_loc (loc
, type
, alt0
),
7236 fold_convert_loc (loc
, type
, alt1
)),
7237 fold_convert_loc (loc
, type
, same
));
7242 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7243 specified by EXPR into the buffer PTR of length LEN bytes.
7244 Return the number of bytes placed in the buffer, or zero
7248 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7250 tree type
= TREE_TYPE (expr
);
7251 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7252 int byte
, offset
, word
, words
;
7253 unsigned char value
;
7255 if (total_bytes
> len
)
7257 words
= total_bytes
/ UNITS_PER_WORD
;
7259 for (byte
= 0; byte
< total_bytes
; byte
++)
7261 int bitpos
= byte
* BITS_PER_UNIT
;
7262 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7263 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7265 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7266 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7268 if (total_bytes
> UNITS_PER_WORD
)
7270 word
= byte
/ UNITS_PER_WORD
;
7271 if (WORDS_BIG_ENDIAN
)
7272 word
= (words
- 1) - word
;
7273 offset
= word
* UNITS_PER_WORD
;
7274 if (BYTES_BIG_ENDIAN
)
7275 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7277 offset
+= byte
% UNITS_PER_WORD
;
7280 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7281 ptr
[offset
] = value
;
7287 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7288 specified by EXPR into the buffer PTR of length LEN bytes.
7289 Return the number of bytes placed in the buffer, or zero
7293 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7295 tree type
= TREE_TYPE (expr
);
7296 enum machine_mode mode
= TYPE_MODE (type
);
7297 int total_bytes
= GET_MODE_SIZE (mode
);
7298 FIXED_VALUE_TYPE value
;
7299 tree i_value
, i_type
;
7301 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7304 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7306 if (NULL_TREE
== i_type
7307 || TYPE_PRECISION (i_type
) != total_bytes
)
7310 value
= TREE_FIXED_CST (expr
);
7311 i_value
= double_int_to_tree (i_type
, value
.data
);
7313 return native_encode_int (i_value
, ptr
, len
);
7317 /* Subroutine of native_encode_expr. Encode the REAL_CST
7318 specified by EXPR into the buffer PTR of length LEN bytes.
7319 Return the number of bytes placed in the buffer, or zero
7323 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7325 tree type
= TREE_TYPE (expr
);
7326 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7327 int byte
, offset
, word
, words
, bitpos
;
7328 unsigned char value
;
7330 /* There are always 32 bits in each long, no matter the size of
7331 the hosts long. We handle floating point representations with
7335 if (total_bytes
> len
)
7337 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7339 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7341 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7342 bitpos
+= BITS_PER_UNIT
)
7344 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7345 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7347 if (UNITS_PER_WORD
< 4)
7349 word
= byte
/ UNITS_PER_WORD
;
7350 if (WORDS_BIG_ENDIAN
)
7351 word
= (words
- 1) - word
;
7352 offset
= word
* UNITS_PER_WORD
;
7353 if (BYTES_BIG_ENDIAN
)
7354 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7356 offset
+= byte
% UNITS_PER_WORD
;
7359 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7360 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7365 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7366 specified by EXPR into the buffer PTR of length LEN bytes.
7367 Return the number of bytes placed in the buffer, or zero
7371 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7376 part
= TREE_REALPART (expr
);
7377 rsize
= native_encode_expr (part
, ptr
, len
);
7380 part
= TREE_IMAGPART (expr
);
7381 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7384 return rsize
+ isize
;
7388 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7389 specified by EXPR into the buffer PTR of length LEN bytes.
7390 Return the number of bytes placed in the buffer, or zero
7394 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7401 count
= VECTOR_CST_NELTS (expr
);
7402 itype
= TREE_TYPE (TREE_TYPE (expr
));
7403 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7404 for (i
= 0; i
< count
; i
++)
7406 elem
= VECTOR_CST_ELT (expr
, i
);
7407 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7415 /* Subroutine of native_encode_expr. Encode the STRING_CST
7416 specified by EXPR into the buffer PTR of length LEN bytes.
7417 Return the number of bytes placed in the buffer, or zero
7421 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7423 tree type
= TREE_TYPE (expr
);
7424 HOST_WIDE_INT total_bytes
;
7426 if (TREE_CODE (type
) != ARRAY_TYPE
7427 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7428 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7429 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7431 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7432 if (total_bytes
> len
)
7434 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7436 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7437 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7438 total_bytes
- TREE_STRING_LENGTH (expr
));
7441 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7446 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7447 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7448 buffer PTR of length LEN bytes. Return the number of bytes
7449 placed in the buffer, or zero upon failure. */
7452 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7454 switch (TREE_CODE (expr
))
7457 return native_encode_int (expr
, ptr
, len
);
7460 return native_encode_real (expr
, ptr
, len
);
7463 return native_encode_fixed (expr
, ptr
, len
);
7466 return native_encode_complex (expr
, ptr
, len
);
7469 return native_encode_vector (expr
, ptr
, len
);
7472 return native_encode_string (expr
, ptr
, len
);
7480 /* Subroutine of native_interpret_expr. Interpret the contents of
7481 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7482 If the buffer cannot be interpreted, return NULL_TREE. */
7485 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7487 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7490 if (total_bytes
> len
7491 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7494 result
= double_int::from_buffer (ptr
, total_bytes
);
7496 return double_int_to_tree (type
, result
);
7500 /* Subroutine of native_interpret_expr. Interpret the contents of
7501 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7502 If the buffer cannot be interpreted, return NULL_TREE. */
7505 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7507 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7509 FIXED_VALUE_TYPE fixed_value
;
7511 if (total_bytes
> len
7512 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7515 result
= double_int::from_buffer (ptr
, total_bytes
);
7516 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7518 return build_fixed (type
, fixed_value
);
7522 /* Subroutine of native_interpret_expr. Interpret the contents of
7523 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7524 If the buffer cannot be interpreted, return NULL_TREE. */
7527 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7529 enum machine_mode mode
= TYPE_MODE (type
);
7530 int total_bytes
= GET_MODE_SIZE (mode
);
7531 int byte
, offset
, word
, words
, bitpos
;
7532 unsigned char value
;
7533 /* There are always 32 bits in each long, no matter the size of
7534 the hosts long. We handle floating point representations with
7539 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7540 if (total_bytes
> len
|| total_bytes
> 24)
7542 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7544 memset (tmp
, 0, sizeof (tmp
));
7545 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7546 bitpos
+= BITS_PER_UNIT
)
7548 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7549 if (UNITS_PER_WORD
< 4)
7551 word
= byte
/ UNITS_PER_WORD
;
7552 if (WORDS_BIG_ENDIAN
)
7553 word
= (words
- 1) - word
;
7554 offset
= word
* UNITS_PER_WORD
;
7555 if (BYTES_BIG_ENDIAN
)
7556 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7558 offset
+= byte
% UNITS_PER_WORD
;
7561 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7562 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7564 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7567 real_from_target (&r
, tmp
, mode
);
7568 return build_real (type
, r
);
7572 /* Subroutine of native_interpret_expr. Interpret the contents of
7573 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7574 If the buffer cannot be interpreted, return NULL_TREE. */
7577 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7579 tree etype
, rpart
, ipart
;
7582 etype
= TREE_TYPE (type
);
7583 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7586 rpart
= native_interpret_expr (etype
, ptr
, size
);
7589 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7592 return build_complex (type
, rpart
, ipart
);
7596 /* Subroutine of native_interpret_expr. Interpret the contents of
7597 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7598 If the buffer cannot be interpreted, return NULL_TREE. */
7601 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7607 etype
= TREE_TYPE (type
);
7608 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7609 count
= TYPE_VECTOR_SUBPARTS (type
);
7610 if (size
* count
> len
)
7613 elements
= XALLOCAVEC (tree
, count
);
7614 for (i
= count
- 1; i
>= 0; i
--)
7616 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7621 return build_vector (type
, elements
);
7625 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7626 the buffer PTR of length LEN as a constant of type TYPE. For
7627 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7628 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7629 return NULL_TREE. */
7632 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7634 switch (TREE_CODE (type
))
7640 case REFERENCE_TYPE
:
7641 return native_interpret_int (type
, ptr
, len
);
7644 return native_interpret_real (type
, ptr
, len
);
7646 case FIXED_POINT_TYPE
:
7647 return native_interpret_fixed (type
, ptr
, len
);
7650 return native_interpret_complex (type
, ptr
, len
);
7653 return native_interpret_vector (type
, ptr
, len
);
7660 /* Returns true if we can interpret the contents of a native encoding
7664 can_native_interpret_type_p (tree type
)
7666 switch (TREE_CODE (type
))
7672 case REFERENCE_TYPE
:
7673 case FIXED_POINT_TYPE
:
7683 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7684 TYPE at compile-time. If we're unable to perform the conversion
7685 return NULL_TREE. */
7688 fold_view_convert_expr (tree type
, tree expr
)
7690 /* We support up to 512-bit values (for V8DFmode). */
7691 unsigned char buffer
[64];
7694 /* Check that the host and target are sane. */
7695 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7698 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7702 return native_interpret_expr (type
, buffer
, len
);
7705 /* Build an expression for the address of T. Folds away INDIRECT_REF
7706 to avoid confusing the gimplify process. */
7709 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7711 /* The size of the object is not relevant when talking about its address. */
7712 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7713 t
= TREE_OPERAND (t
, 0);
7715 if (TREE_CODE (t
) == INDIRECT_REF
)
7717 t
= TREE_OPERAND (t
, 0);
7719 if (TREE_TYPE (t
) != ptrtype
)
7720 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7722 else if (TREE_CODE (t
) == MEM_REF
7723 && integer_zerop (TREE_OPERAND (t
, 1)))
7724 return TREE_OPERAND (t
, 0);
7725 else if (TREE_CODE (t
) == MEM_REF
7726 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7727 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7728 TREE_OPERAND (t
, 0),
7729 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7730 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7732 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7734 if (TREE_TYPE (t
) != ptrtype
)
7735 t
= fold_convert_loc (loc
, ptrtype
, t
);
7738 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7743 /* Build an expression for the address of T. */
7746 build_fold_addr_expr_loc (location_t loc
, tree t
)
7748 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7750 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7753 static bool vec_cst_ctor_to_array (tree
, tree
*);
7755 /* Fold a unary expression of code CODE and type TYPE with operand
7756 OP0. Return the folded expression if folding is successful.
7757 Otherwise, return NULL_TREE. */
7760 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7764 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7766 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7767 && TREE_CODE_LENGTH (code
) == 1);
7772 if (CONVERT_EXPR_CODE_P (code
)
7773 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7775 /* Don't use STRIP_NOPS, because signedness of argument type
7777 STRIP_SIGN_NOPS (arg0
);
7781 /* Strip any conversions that don't change the mode. This
7782 is safe for every expression, except for a comparison
7783 expression because its signedness is derived from its
7786 Note that this is done as an internal manipulation within
7787 the constant folder, in order to find the simplest
7788 representation of the arguments so that their form can be
7789 studied. In any cases, the appropriate type conversions
7790 should be put back in the tree that will get out of the
7796 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7798 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7799 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7800 fold_build1_loc (loc
, code
, type
,
7801 fold_convert_loc (loc
, TREE_TYPE (op0
),
7802 TREE_OPERAND (arg0
, 1))));
7803 else if (TREE_CODE (arg0
) == COND_EXPR
)
7805 tree arg01
= TREE_OPERAND (arg0
, 1);
7806 tree arg02
= TREE_OPERAND (arg0
, 2);
7807 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7808 arg01
= fold_build1_loc (loc
, code
, type
,
7809 fold_convert_loc (loc
,
7810 TREE_TYPE (op0
), arg01
));
7811 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7812 arg02
= fold_build1_loc (loc
, code
, type
,
7813 fold_convert_loc (loc
,
7814 TREE_TYPE (op0
), arg02
));
7815 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7818 /* If this was a conversion, and all we did was to move into
7819 inside the COND_EXPR, bring it back out. But leave it if
7820 it is a conversion from integer to integer and the
7821 result precision is no wider than a word since such a
7822 conversion is cheap and may be optimized away by combine,
7823 while it couldn't if it were outside the COND_EXPR. Then return
7824 so we don't get into an infinite recursion loop taking the
7825 conversion out and then back in. */
7827 if ((CONVERT_EXPR_CODE_P (code
)
7828 || code
== NON_LVALUE_EXPR
)
7829 && TREE_CODE (tem
) == COND_EXPR
7830 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7831 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7832 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7833 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7834 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7835 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7836 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7838 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7839 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7840 || flag_syntax_only
))
7841 tem
= build1_loc (loc
, code
, type
,
7843 TREE_TYPE (TREE_OPERAND
7844 (TREE_OPERAND (tem
, 1), 0)),
7845 TREE_OPERAND (tem
, 0),
7846 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7847 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7856 /* Re-association barriers around constants and other re-association
7857 barriers can be removed. */
7858 if (CONSTANT_CLASS_P (op0
)
7859 || TREE_CODE (op0
) == PAREN_EXPR
)
7860 return fold_convert_loc (loc
, type
, op0
);
7865 case FIX_TRUNC_EXPR
:
7866 if (TREE_TYPE (op0
) == type
)
7869 if (COMPARISON_CLASS_P (op0
))
7871 /* If we have (type) (a CMP b) and type is an integral type, return
7872 new expression involving the new type. Canonicalize
7873 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7875 Do not fold the result as that would not simplify further, also
7876 folding again results in recursions. */
7877 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7878 return build2_loc (loc
, TREE_CODE (op0
), type
,
7879 TREE_OPERAND (op0
, 0),
7880 TREE_OPERAND (op0
, 1));
7881 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7882 && TREE_CODE (type
) != VECTOR_TYPE
)
7883 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7884 constant_boolean_node (true, type
),
7885 constant_boolean_node (false, type
));
7888 /* Handle cases of two conversions in a row. */
7889 if (CONVERT_EXPR_P (op0
))
7891 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7892 tree inter_type
= TREE_TYPE (op0
);
7893 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7894 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7895 int inside_float
= FLOAT_TYPE_P (inside_type
);
7896 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7897 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7898 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7899 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7900 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7901 int inter_float
= FLOAT_TYPE_P (inter_type
);
7902 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7903 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7904 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7905 int final_int
= INTEGRAL_TYPE_P (type
);
7906 int final_ptr
= POINTER_TYPE_P (type
);
7907 int final_float
= FLOAT_TYPE_P (type
);
7908 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7909 unsigned int final_prec
= TYPE_PRECISION (type
);
7910 int final_unsignedp
= TYPE_UNSIGNED (type
);
7912 /* In addition to the cases of two conversions in a row
7913 handled below, if we are converting something to its own
7914 type via an object of identical or wider precision, neither
7915 conversion is needed. */
7916 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7917 && (((inter_int
|| inter_ptr
) && final_int
)
7918 || (inter_float
&& final_float
))
7919 && inter_prec
>= final_prec
)
7920 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7922 /* Likewise, if the intermediate and initial types are either both
7923 float or both integer, we don't need the middle conversion if the
7924 former is wider than the latter and doesn't change the signedness
7925 (for integers). Avoid this if the final type is a pointer since
7926 then we sometimes need the middle conversion. Likewise if the
7927 final type has a precision not equal to the size of its mode. */
7928 if (((inter_int
&& inside_int
)
7929 || (inter_float
&& inside_float
)
7930 || (inter_vec
&& inside_vec
))
7931 && inter_prec
>= inside_prec
7932 && (inter_float
|| inter_vec
7933 || inter_unsignedp
== inside_unsignedp
)
7934 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7935 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7937 && (! final_vec
|| inter_prec
== inside_prec
))
7938 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7940 /* If we have a sign-extension of a zero-extended value, we can
7941 replace that by a single zero-extension. Likewise if the
7942 final conversion does not change precision we can drop the
7943 intermediate conversion. */
7944 if (inside_int
&& inter_int
&& final_int
7945 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7946 && inside_unsignedp
&& !inter_unsignedp
)
7947 || final_prec
== inter_prec
))
7948 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7950 /* Two conversions in a row are not needed unless:
7951 - some conversion is floating-point (overstrict for now), or
7952 - some conversion is a vector (overstrict for now), or
7953 - the intermediate type is narrower than both initial and
7955 - the intermediate type and innermost type differ in signedness,
7956 and the outermost type is wider than the intermediate, or
7957 - the initial type is a pointer type and the precisions of the
7958 intermediate and final types differ, or
7959 - the final type is a pointer type and the precisions of the
7960 initial and intermediate types differ. */
7961 if (! inside_float
&& ! inter_float
&& ! final_float
7962 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7963 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7964 && ! (inside_int
&& inter_int
7965 && inter_unsignedp
!= inside_unsignedp
7966 && inter_prec
< final_prec
)
7967 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7968 == (final_unsignedp
&& final_prec
> inter_prec
))
7969 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7970 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7971 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7972 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7973 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7976 /* Handle (T *)&A.B.C for A being of type T and B and C
7977 living at offset zero. This occurs frequently in
7978 C++ upcasting and then accessing the base. */
7979 if (TREE_CODE (op0
) == ADDR_EXPR
7980 && POINTER_TYPE_P (type
)
7981 && handled_component_p (TREE_OPERAND (op0
, 0)))
7983 HOST_WIDE_INT bitsize
, bitpos
;
7985 enum machine_mode mode
;
7986 int unsignedp
, volatilep
;
7987 tree base
= TREE_OPERAND (op0
, 0);
7988 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7989 &mode
, &unsignedp
, &volatilep
, false);
7990 /* If the reference was to a (constant) zero offset, we can use
7991 the address of the base if it has the same base type
7992 as the result type and the pointer type is unqualified. */
7993 if (! offset
&& bitpos
== 0
7994 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7995 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7996 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7997 return fold_convert_loc (loc
, type
,
7998 build_fold_addr_expr_loc (loc
, base
));
8001 if (TREE_CODE (op0
) == MODIFY_EXPR
8002 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8003 /* Detect assigning a bitfield. */
8004 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8006 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8008 /* Don't leave an assignment inside a conversion
8009 unless assigning a bitfield. */
8010 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8011 /* First do the assignment, then return converted constant. */
8012 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8013 TREE_NO_WARNING (tem
) = 1;
8014 TREE_USED (tem
) = 1;
8018 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8019 constants (if x has signed type, the sign bit cannot be set
8020 in c). This folds extension into the BIT_AND_EXPR.
8021 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8022 very likely don't have maximal range for their precision and this
8023 transformation effectively doesn't preserve non-maximal ranges. */
8024 if (TREE_CODE (type
) == INTEGER_TYPE
8025 && TREE_CODE (op0
) == BIT_AND_EXPR
8026 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8028 tree and_expr
= op0
;
8029 tree and0
= TREE_OPERAND (and_expr
, 0);
8030 tree and1
= TREE_OPERAND (and_expr
, 1);
8033 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8034 || (TYPE_PRECISION (type
)
8035 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8037 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8038 <= HOST_BITS_PER_WIDE_INT
8039 && host_integerp (and1
, 1))
8041 unsigned HOST_WIDE_INT cst
;
8043 cst
= tree_low_cst (and1
, 1);
8044 cst
&= (HOST_WIDE_INT
) -1
8045 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8046 change
= (cst
== 0);
8047 #ifdef LOAD_EXTEND_OP
8049 && !flag_syntax_only
8050 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8053 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8054 and0
= fold_convert_loc (loc
, uns
, and0
);
8055 and1
= fold_convert_loc (loc
, uns
, and1
);
8061 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
8062 0, TREE_OVERFLOW (and1
));
8063 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8064 fold_convert_loc (loc
, type
, and0
), tem
);
8068 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8069 when one of the new casts will fold away. Conservatively we assume
8070 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8071 if (POINTER_TYPE_P (type
)
8072 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8073 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8074 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8075 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8076 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8078 tree arg00
= TREE_OPERAND (arg0
, 0);
8079 tree arg01
= TREE_OPERAND (arg0
, 1);
8081 return fold_build_pointer_plus_loc
8082 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8085 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8086 of the same precision, and X is an integer type not narrower than
8087 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8088 if (INTEGRAL_TYPE_P (type
)
8089 && TREE_CODE (op0
) == BIT_NOT_EXPR
8090 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8091 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8092 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8094 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8095 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8096 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8097 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8098 fold_convert_loc (loc
, type
, tem
));
8101 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8102 type of X and Y (integer types only). */
8103 if (INTEGRAL_TYPE_P (type
)
8104 && TREE_CODE (op0
) == MULT_EXPR
8105 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8106 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8108 /* Be careful not to introduce new overflows. */
8110 if (TYPE_OVERFLOW_WRAPS (type
))
8113 mult_type
= unsigned_type_for (type
);
8115 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8117 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8118 fold_convert_loc (loc
, mult_type
,
8119 TREE_OPERAND (op0
, 0)),
8120 fold_convert_loc (loc
, mult_type
,
8121 TREE_OPERAND (op0
, 1)));
8122 return fold_convert_loc (loc
, type
, tem
);
8126 tem
= fold_convert_const (code
, type
, op0
);
8127 return tem
? tem
: NULL_TREE
;
8129 case ADDR_SPACE_CONVERT_EXPR
:
8130 if (integer_zerop (arg0
))
8131 return fold_convert_const (code
, type
, arg0
);
8134 case FIXED_CONVERT_EXPR
:
8135 tem
= fold_convert_const (code
, type
, arg0
);
8136 return tem
? tem
: NULL_TREE
;
8138 case VIEW_CONVERT_EXPR
:
8139 if (TREE_TYPE (op0
) == type
)
8141 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8142 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8143 type
, TREE_OPERAND (op0
, 0));
8144 if (TREE_CODE (op0
) == MEM_REF
)
8145 return fold_build2_loc (loc
, MEM_REF
, type
,
8146 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8148 /* For integral conversions with the same precision or pointer
8149 conversions use a NOP_EXPR instead. */
8150 if ((INTEGRAL_TYPE_P (type
)
8151 || POINTER_TYPE_P (type
))
8152 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8153 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8154 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8155 return fold_convert_loc (loc
, type
, op0
);
8157 /* Strip inner integral conversions that do not change the precision. */
8158 if (CONVERT_EXPR_P (op0
)
8159 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8160 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8161 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8162 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8163 && (TYPE_PRECISION (TREE_TYPE (op0
))
8164 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8165 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8166 type
, TREE_OPERAND (op0
, 0));
8168 return fold_view_convert_expr (type
, op0
);
8171 tem
= fold_negate_expr (loc
, arg0
);
8173 return fold_convert_loc (loc
, type
, tem
);
8177 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8178 return fold_abs_const (arg0
, type
);
8179 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8180 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8181 /* Convert fabs((double)float) into (double)fabsf(float). */
8182 else if (TREE_CODE (arg0
) == NOP_EXPR
8183 && TREE_CODE (type
) == REAL_TYPE
)
8185 tree targ0
= strip_float_extensions (arg0
);
8187 return fold_convert_loc (loc
, type
,
8188 fold_build1_loc (loc
, ABS_EXPR
,
8192 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8193 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8195 else if (tree_expr_nonnegative_p (arg0
))
8198 /* Strip sign ops from argument. */
8199 if (TREE_CODE (type
) == REAL_TYPE
)
8201 tem
= fold_strip_sign_ops (arg0
);
8203 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8204 fold_convert_loc (loc
, type
, tem
));
8209 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8210 return fold_convert_loc (loc
, type
, arg0
);
8211 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8213 tree itype
= TREE_TYPE (type
);
8214 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8215 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8216 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8217 negate_expr (ipart
));
8219 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8221 tree itype
= TREE_TYPE (type
);
8222 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8223 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8224 return build_complex (type
, rpart
, negate_expr (ipart
));
8226 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8227 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8231 if (TREE_CODE (arg0
) == INTEGER_CST
)
8232 return fold_not_const (arg0
, type
);
8233 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8234 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8235 /* Convert ~ (-A) to A - 1. */
8236 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8237 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8238 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8239 build_int_cst (type
, 1));
8240 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8241 else if (INTEGRAL_TYPE_P (type
)
8242 && ((TREE_CODE (arg0
) == MINUS_EXPR
8243 && integer_onep (TREE_OPERAND (arg0
, 1)))
8244 || (TREE_CODE (arg0
) == PLUS_EXPR
8245 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8246 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8247 fold_convert_loc (loc
, type
,
8248 TREE_OPERAND (arg0
, 0)));
8249 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8250 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8251 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8252 fold_convert_loc (loc
, type
,
8253 TREE_OPERAND (arg0
, 0)))))
8254 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8255 fold_convert_loc (loc
, type
,
8256 TREE_OPERAND (arg0
, 1)));
8257 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8258 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8259 fold_convert_loc (loc
, type
,
8260 TREE_OPERAND (arg0
, 1)))))
8261 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8262 fold_convert_loc (loc
, type
,
8263 TREE_OPERAND (arg0
, 0)), tem
);
8264 /* Perform BIT_NOT_EXPR on each element individually. */
8265 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8269 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8271 elements
= XALLOCAVEC (tree
, count
);
8272 for (i
= 0; i
< count
; i
++)
8274 elem
= VECTOR_CST_ELT (arg0
, i
);
8275 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8276 if (elem
== NULL_TREE
)
8281 return build_vector (type
, elements
);
8286 case TRUTH_NOT_EXPR
:
8287 /* The argument to invert_truthvalue must have Boolean type. */
8288 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8289 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8291 /* Note that the operand of this must be an int
8292 and its values must be 0 or 1.
8293 ("true" is a fixed value perhaps depending on the language,
8294 but we don't handle values other than 1 correctly yet.) */
8295 tem
= fold_truth_not_expr (loc
, arg0
);
8298 return fold_convert_loc (loc
, type
, tem
);
8301 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8302 return fold_convert_loc (loc
, type
, arg0
);
8303 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8304 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8305 TREE_OPERAND (arg0
, 1));
8306 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8307 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8308 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8310 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8311 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8312 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8313 TREE_OPERAND (arg0
, 0)),
8314 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8315 TREE_OPERAND (arg0
, 1)));
8316 return fold_convert_loc (loc
, type
, tem
);
8318 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8320 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8321 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8322 TREE_OPERAND (arg0
, 0));
8323 return fold_convert_loc (loc
, type
, tem
);
8325 if (TREE_CODE (arg0
) == CALL_EXPR
)
8327 tree fn
= get_callee_fndecl (arg0
);
8328 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8329 switch (DECL_FUNCTION_CODE (fn
))
8331 CASE_FLT_FN (BUILT_IN_CEXPI
):
8332 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8334 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8344 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8345 return build_zero_cst (type
);
8346 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8347 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8348 TREE_OPERAND (arg0
, 0));
8349 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8350 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8351 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8353 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8354 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8355 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8356 TREE_OPERAND (arg0
, 0)),
8357 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8358 TREE_OPERAND (arg0
, 1)));
8359 return fold_convert_loc (loc
, type
, tem
);
8361 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8363 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8364 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8365 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8367 if (TREE_CODE (arg0
) == CALL_EXPR
)
8369 tree fn
= get_callee_fndecl (arg0
);
8370 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8371 switch (DECL_FUNCTION_CODE (fn
))
8373 CASE_FLT_FN (BUILT_IN_CEXPI
):
8374 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8376 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8386 /* Fold *&X to X if X is an lvalue. */
8387 if (TREE_CODE (op0
) == ADDR_EXPR
)
8389 tree op00
= TREE_OPERAND (op0
, 0);
8390 if ((TREE_CODE (op00
) == VAR_DECL
8391 || TREE_CODE (op00
) == PARM_DECL
8392 || TREE_CODE (op00
) == RESULT_DECL
)
8393 && !TREE_READONLY (op00
))
8398 case VEC_UNPACK_LO_EXPR
:
8399 case VEC_UNPACK_HI_EXPR
:
8400 case VEC_UNPACK_FLOAT_LO_EXPR
:
8401 case VEC_UNPACK_FLOAT_HI_EXPR
:
8403 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8405 enum tree_code subcode
;
8407 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8408 if (TREE_CODE (arg0
) != VECTOR_CST
)
8411 elts
= XALLOCAVEC (tree
, nelts
* 2);
8412 if (!vec_cst_ctor_to_array (arg0
, elts
))
8415 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8416 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8419 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8422 subcode
= FLOAT_EXPR
;
8424 for (i
= 0; i
< nelts
; i
++)
8426 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8427 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8431 return build_vector (type
, elts
);
8434 case REDUC_MIN_EXPR
:
8435 case REDUC_MAX_EXPR
:
8436 case REDUC_PLUS_EXPR
:
8438 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8440 enum tree_code subcode
;
8442 if (TREE_CODE (op0
) != VECTOR_CST
)
8445 elts
= XALLOCAVEC (tree
, nelts
);
8446 if (!vec_cst_ctor_to_array (op0
, elts
))
8451 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8452 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8453 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8454 default: gcc_unreachable ();
8457 for (i
= 1; i
< nelts
; i
++)
8459 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8460 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8462 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8465 return build_vector (type
, elts
);
8470 } /* switch (code) */
8474 /* If the operation was a conversion do _not_ mark a resulting constant
8475 with TREE_OVERFLOW if the original constant was not. These conversions
8476 have implementation defined behavior and retaining the TREE_OVERFLOW
8477 flag here would confuse later passes such as VRP. */
8479 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8480 tree type
, tree op0
)
8482 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8484 && TREE_CODE (res
) == INTEGER_CST
8485 && TREE_CODE (op0
) == INTEGER_CST
8486 && CONVERT_EXPR_CODE_P (code
))
8487 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8492 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8493 operands OP0 and OP1. LOC is the location of the resulting expression.
8494 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8495 Return the folded expression if folding is successful. Otherwise,
8496 return NULL_TREE. */
8498 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8499 tree arg0
, tree arg1
, tree op0
, tree op1
)
8503 /* We only do these simplifications if we are optimizing. */
8507 /* Check for things like (A || B) && (A || C). We can convert this
8508 to A || (B && C). Note that either operator can be any of the four
8509 truth and/or operations and the transformation will still be
8510 valid. Also note that we only care about order for the
8511 ANDIF and ORIF operators. If B contains side effects, this
8512 might change the truth-value of A. */
8513 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8514 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8515 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8516 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8517 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8518 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8520 tree a00
= TREE_OPERAND (arg0
, 0);
8521 tree a01
= TREE_OPERAND (arg0
, 1);
8522 tree a10
= TREE_OPERAND (arg1
, 0);
8523 tree a11
= TREE_OPERAND (arg1
, 1);
8524 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8525 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8526 && (code
== TRUTH_AND_EXPR
8527 || code
== TRUTH_OR_EXPR
));
8529 if (operand_equal_p (a00
, a10
, 0))
8530 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8531 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8532 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8533 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8534 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8535 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8536 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8537 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8539 /* This case if tricky because we must either have commutative
8540 operators or else A10 must not have side-effects. */
8542 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8543 && operand_equal_p (a01
, a11
, 0))
8544 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8545 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8549 /* See if we can build a range comparison. */
8550 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8553 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8554 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8556 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8558 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8561 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8562 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8564 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8566 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8569 /* Check for the possibility of merging component references. If our
8570 lhs is another similar operation, try to merge its rhs with our
8571 rhs. Then try to merge our lhs and rhs. */
8572 if (TREE_CODE (arg0
) == code
8573 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8574 TREE_OPERAND (arg0
, 1), arg1
)))
8575 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8577 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8580 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8581 && (code
== TRUTH_AND_EXPR
8582 || code
== TRUTH_ANDIF_EXPR
8583 || code
== TRUTH_OR_EXPR
8584 || code
== TRUTH_ORIF_EXPR
))
8586 enum tree_code ncode
, icode
;
8588 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8589 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8590 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8592 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8593 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8594 We don't want to pack more than two leafs to a non-IF AND/OR
8596 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8597 equal to IF-CODE, then we don't want to add right-hand operand.
8598 If the inner right-hand side of left-hand operand has
8599 side-effects, or isn't simple, then we can't add to it,
8600 as otherwise we might destroy if-sequence. */
8601 if (TREE_CODE (arg0
) == icode
8602 && simple_operand_p_2 (arg1
)
8603 /* Needed for sequence points to handle trappings, and
8605 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8607 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8609 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8612 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8613 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8614 else if (TREE_CODE (arg1
) == icode
8615 && simple_operand_p_2 (arg0
)
8616 /* Needed for sequence points to handle trappings, and
8618 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8620 tem
= fold_build2_loc (loc
, ncode
, type
,
8621 arg0
, TREE_OPERAND (arg1
, 0));
8622 return fold_build2_loc (loc
, icode
, type
, tem
,
8623 TREE_OPERAND (arg1
, 1));
8625 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8627 For sequence point consistancy, we need to check for trapping,
8628 and side-effects. */
8629 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8630 && simple_operand_p_2 (arg1
))
8631 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8637 /* Fold a binary expression of code CODE and type TYPE with operands
8638 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8639 Return the folded expression if folding is successful. Otherwise,
8640 return NULL_TREE. */
8643 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8645 enum tree_code compl_code
;
8647 if (code
== MIN_EXPR
)
8648 compl_code
= MAX_EXPR
;
8649 else if (code
== MAX_EXPR
)
8650 compl_code
= MIN_EXPR
;
8654 /* MIN (MAX (a, b), b) == b. */
8655 if (TREE_CODE (op0
) == compl_code
8656 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8657 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8659 /* MIN (MAX (b, a), b) == b. */
8660 if (TREE_CODE (op0
) == compl_code
8661 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8662 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8663 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8665 /* MIN (a, MAX (a, b)) == a. */
8666 if (TREE_CODE (op1
) == compl_code
8667 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8668 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8669 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8671 /* MIN (a, MAX (b, a)) == a. */
8672 if (TREE_CODE (op1
) == compl_code
8673 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8674 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8675 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8680 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8681 by changing CODE to reduce the magnitude of constants involved in
8682 ARG0 of the comparison.
8683 Returns a canonicalized comparison tree if a simplification was
8684 possible, otherwise returns NULL_TREE.
8685 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8686 valid if signed overflow is undefined. */
8689 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8690 tree arg0
, tree arg1
,
8691 bool *strict_overflow_p
)
8693 enum tree_code code0
= TREE_CODE (arg0
);
8694 tree t
, cst0
= NULL_TREE
;
8698 /* Match A +- CST code arg1 and CST code arg1. We can change the
8699 first form only if overflow is undefined. */
8700 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8701 /* In principle pointers also have undefined overflow behavior,
8702 but that causes problems elsewhere. */
8703 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8704 && (code0
== MINUS_EXPR
8705 || code0
== PLUS_EXPR
)
8706 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8707 || code0
== INTEGER_CST
))
8710 /* Identify the constant in arg0 and its sign. */
8711 if (code0
== INTEGER_CST
)
8714 cst0
= TREE_OPERAND (arg0
, 1);
8715 sgn0
= tree_int_cst_sgn (cst0
);
8717 /* Overflowed constants and zero will cause problems. */
8718 if (integer_zerop (cst0
)
8719 || TREE_OVERFLOW (cst0
))
8722 /* See if we can reduce the magnitude of the constant in
8723 arg0 by changing the comparison code. */
8724 if (code0
== INTEGER_CST
)
8726 /* CST <= arg1 -> CST-1 < arg1. */
8727 if (code
== LE_EXPR
&& sgn0
== 1)
8729 /* -CST < arg1 -> -CST-1 <= arg1. */
8730 else if (code
== LT_EXPR
&& sgn0
== -1)
8732 /* CST > arg1 -> CST-1 >= arg1. */
8733 else if (code
== GT_EXPR
&& sgn0
== 1)
8735 /* -CST >= arg1 -> -CST-1 > arg1. */
8736 else if (code
== GE_EXPR
&& sgn0
== -1)
8740 /* arg1 code' CST' might be more canonical. */
8745 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8747 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8749 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8750 else if (code
== GT_EXPR
8751 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8753 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8754 else if (code
== LE_EXPR
8755 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8757 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8758 else if (code
== GE_EXPR
8759 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8763 *strict_overflow_p
= true;
8766 /* Now build the constant reduced in magnitude. But not if that
8767 would produce one outside of its types range. */
8768 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8770 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8771 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8773 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8774 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8775 /* We cannot swap the comparison here as that would cause us to
8776 endlessly recurse. */
8779 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8780 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8781 if (code0
!= INTEGER_CST
)
8782 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8783 t
= fold_convert (TREE_TYPE (arg1
), t
);
8785 /* If swapping might yield to a more canonical form, do so. */
8787 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8789 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8792 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8793 overflow further. Try to decrease the magnitude of constants involved
8794 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8795 and put sole constants at the second argument position.
8796 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8799 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8800 tree arg0
, tree arg1
)
8803 bool strict_overflow_p
;
8804 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8805 "when reducing constant in comparison");
8807 /* Try canonicalization by simplifying arg0. */
8808 strict_overflow_p
= false;
8809 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8810 &strict_overflow_p
);
8813 if (strict_overflow_p
)
8814 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8818 /* Try canonicalization by simplifying arg1 using the swapped
8820 code
= swap_tree_comparison (code
);
8821 strict_overflow_p
= false;
8822 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8823 &strict_overflow_p
);
8824 if (t
&& strict_overflow_p
)
8825 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8829 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8830 space. This is used to avoid issuing overflow warnings for
8831 expressions like &p->x which can not wrap. */
8834 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8836 double_int di_offset
, total
;
8838 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8844 if (offset
== NULL_TREE
)
8845 di_offset
= double_int_zero
;
8846 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8849 di_offset
= TREE_INT_CST (offset
);
8852 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8853 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8857 if (total
.high
!= 0)
8860 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8864 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8866 if (TREE_CODE (base
) == ADDR_EXPR
)
8868 HOST_WIDE_INT base_size
;
8870 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8871 if (base_size
> 0 && size
< base_size
)
8875 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8878 /* Subroutine of fold_binary. This routine performs all of the
8879 transformations that are common to the equality/inequality
8880 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8881 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8882 fold_binary should call fold_binary. Fold a comparison with
8883 tree code CODE and type TYPE with operands OP0 and OP1. Return
8884 the folded comparison or NULL_TREE. */
8887 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8890 tree arg0
, arg1
, tem
;
8895 STRIP_SIGN_NOPS (arg0
);
8896 STRIP_SIGN_NOPS (arg1
);
8898 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8899 if (tem
!= NULL_TREE
)
8902 /* If one arg is a real or integer constant, put it last. */
8903 if (tree_swap_operands_p (arg0
, arg1
, true))
8904 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8906 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8907 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8908 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8909 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8910 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8911 && (TREE_CODE (arg1
) == INTEGER_CST
8912 && !TREE_OVERFLOW (arg1
)))
8914 tree const1
= TREE_OPERAND (arg0
, 1);
8916 tree variable
= TREE_OPERAND (arg0
, 0);
8919 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8921 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8922 TREE_TYPE (arg1
), const2
, const1
);
8924 /* If the constant operation overflowed this can be
8925 simplified as a comparison against INT_MAX/INT_MIN. */
8926 if (TREE_CODE (lhs
) == INTEGER_CST
8927 && TREE_OVERFLOW (lhs
))
8929 int const1_sgn
= tree_int_cst_sgn (const1
);
8930 enum tree_code code2
= code
;
8932 /* Get the sign of the constant on the lhs if the
8933 operation were VARIABLE + CONST1. */
8934 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8935 const1_sgn
= -const1_sgn
;
8937 /* The sign of the constant determines if we overflowed
8938 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8939 Canonicalize to the INT_MIN overflow by swapping the comparison
8941 if (const1_sgn
== -1)
8942 code2
= swap_tree_comparison (code
);
8944 /* We now can look at the canonicalized case
8945 VARIABLE + 1 CODE2 INT_MIN
8946 and decide on the result. */
8947 if (code2
== LT_EXPR
8949 || code2
== EQ_EXPR
)
8950 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8951 else if (code2
== NE_EXPR
8953 || code2
== GT_EXPR
)
8954 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8957 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8958 && (TREE_CODE (lhs
) != INTEGER_CST
8959 || !TREE_OVERFLOW (lhs
)))
8961 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8962 fold_overflow_warning ("assuming signed overflow does not occur "
8963 "when changing X +- C1 cmp C2 to "
8965 WARN_STRICT_OVERFLOW_COMPARISON
);
8966 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8970 /* For comparisons of pointers we can decompose it to a compile time
8971 comparison of the base objects and the offsets into the object.
8972 This requires at least one operand being an ADDR_EXPR or a
8973 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8974 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8975 && (TREE_CODE (arg0
) == ADDR_EXPR
8976 || TREE_CODE (arg1
) == ADDR_EXPR
8977 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8978 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8980 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8981 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8982 enum machine_mode mode
;
8983 int volatilep
, unsignedp
;
8984 bool indirect_base0
= false, indirect_base1
= false;
8986 /* Get base and offset for the access. Strip ADDR_EXPR for
8987 get_inner_reference, but put it back by stripping INDIRECT_REF
8988 off the base object if possible. indirect_baseN will be true
8989 if baseN is not an address but refers to the object itself. */
8991 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8993 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8994 &bitsize
, &bitpos0
, &offset0
, &mode
,
8995 &unsignedp
, &volatilep
, false);
8996 if (TREE_CODE (base0
) == INDIRECT_REF
)
8997 base0
= TREE_OPERAND (base0
, 0);
8999 indirect_base0
= true;
9001 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9003 base0
= TREE_OPERAND (arg0
, 0);
9004 STRIP_SIGN_NOPS (base0
);
9005 if (TREE_CODE (base0
) == ADDR_EXPR
)
9007 base0
= TREE_OPERAND (base0
, 0);
9008 indirect_base0
= true;
9010 offset0
= TREE_OPERAND (arg0
, 1);
9011 if (host_integerp (offset0
, 0))
9013 HOST_WIDE_INT off
= size_low_cst (offset0
);
9014 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9016 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9018 bitpos0
= off
* BITS_PER_UNIT
;
9019 offset0
= NULL_TREE
;
9025 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9027 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9028 &bitsize
, &bitpos1
, &offset1
, &mode
,
9029 &unsignedp
, &volatilep
, false);
9030 if (TREE_CODE (base1
) == INDIRECT_REF
)
9031 base1
= TREE_OPERAND (base1
, 0);
9033 indirect_base1
= true;
9035 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9037 base1
= TREE_OPERAND (arg1
, 0);
9038 STRIP_SIGN_NOPS (base1
);
9039 if (TREE_CODE (base1
) == ADDR_EXPR
)
9041 base1
= TREE_OPERAND (base1
, 0);
9042 indirect_base1
= true;
9044 offset1
= TREE_OPERAND (arg1
, 1);
9045 if (host_integerp (offset1
, 0))
9047 HOST_WIDE_INT off
= size_low_cst (offset1
);
9048 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9050 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9052 bitpos1
= off
* BITS_PER_UNIT
;
9053 offset1
= NULL_TREE
;
9058 /* A local variable can never be pointed to by
9059 the default SSA name of an incoming parameter. */
9060 if ((TREE_CODE (arg0
) == ADDR_EXPR
9062 && TREE_CODE (base0
) == VAR_DECL
9063 && auto_var_in_fn_p (base0
, current_function_decl
)
9065 && TREE_CODE (base1
) == SSA_NAME
9066 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9067 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9068 || (TREE_CODE (arg1
) == ADDR_EXPR
9070 && TREE_CODE (base1
) == VAR_DECL
9071 && auto_var_in_fn_p (base1
, current_function_decl
)
9073 && TREE_CODE (base0
) == SSA_NAME
9074 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9075 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9077 if (code
== NE_EXPR
)
9078 return constant_boolean_node (1, type
);
9079 else if (code
== EQ_EXPR
)
9080 return constant_boolean_node (0, type
);
9082 /* If we have equivalent bases we might be able to simplify. */
9083 else if (indirect_base0
== indirect_base1
9084 && operand_equal_p (base0
, base1
, 0))
9086 /* We can fold this expression to a constant if the non-constant
9087 offset parts are equal. */
9088 if ((offset0
== offset1
9089 || (offset0
&& offset1
9090 && operand_equal_p (offset0
, offset1
, 0)))
9093 || (indirect_base0
&& DECL_P (base0
))
9094 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9099 && bitpos0
!= bitpos1
9100 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9101 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9102 fold_overflow_warning (("assuming pointer wraparound does not "
9103 "occur when comparing P +- C1 with "
9105 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9110 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9112 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9114 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9116 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9118 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9120 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9124 /* We can simplify the comparison to a comparison of the variable
9125 offset parts if the constant offset parts are equal.
9126 Be careful to use signed sizetype here because otherwise we
9127 mess with array offsets in the wrong way. This is possible
9128 because pointer arithmetic is restricted to retain within an
9129 object and overflow on pointer differences is undefined as of
9130 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9131 else if (bitpos0
== bitpos1
9132 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9133 || (indirect_base0
&& DECL_P (base0
))
9134 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9136 /* By converting to signed sizetype we cover middle-end pointer
9137 arithmetic which operates on unsigned pointer types of size
9138 type size and ARRAY_REF offsets which are properly sign or
9139 zero extended from their type in case it is narrower than
9141 if (offset0
== NULL_TREE
)
9142 offset0
= build_int_cst (ssizetype
, 0);
9144 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9145 if (offset1
== NULL_TREE
)
9146 offset1
= build_int_cst (ssizetype
, 0);
9148 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9152 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9153 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9154 fold_overflow_warning (("assuming pointer wraparound does not "
9155 "occur when comparing P +- C1 with "
9157 WARN_STRICT_OVERFLOW_COMPARISON
);
9159 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9162 /* For non-equal bases we can simplify if they are addresses
9163 of local binding decls or constants. */
9164 else if (indirect_base0
&& indirect_base1
9165 /* We know that !operand_equal_p (base0, base1, 0)
9166 because the if condition was false. But make
9167 sure two decls are not the same. */
9169 && TREE_CODE (arg0
) == ADDR_EXPR
9170 && TREE_CODE (arg1
) == ADDR_EXPR
9171 && (((TREE_CODE (base0
) == VAR_DECL
9172 || TREE_CODE (base0
) == PARM_DECL
)
9173 && (targetm
.binds_local_p (base0
)
9174 || CONSTANT_CLASS_P (base1
)))
9175 || CONSTANT_CLASS_P (base0
))
9176 && (((TREE_CODE (base1
) == VAR_DECL
9177 || TREE_CODE (base1
) == PARM_DECL
)
9178 && (targetm
.binds_local_p (base1
)
9179 || CONSTANT_CLASS_P (base0
)))
9180 || CONSTANT_CLASS_P (base1
)))
9182 if (code
== EQ_EXPR
)
9183 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9185 else if (code
== NE_EXPR
)
9186 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9189 /* For equal offsets we can simplify to a comparison of the
9191 else if (bitpos0
== bitpos1
9193 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9195 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9196 && ((offset0
== offset1
)
9197 || (offset0
&& offset1
9198 && operand_equal_p (offset0
, offset1
, 0))))
9201 base0
= build_fold_addr_expr_loc (loc
, base0
);
9203 base1
= build_fold_addr_expr_loc (loc
, base1
);
9204 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9208 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9209 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9210 the resulting offset is smaller in absolute value than the
9212 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9213 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9214 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9215 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9216 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9217 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9218 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9220 tree const1
= TREE_OPERAND (arg0
, 1);
9221 tree const2
= TREE_OPERAND (arg1
, 1);
9222 tree variable1
= TREE_OPERAND (arg0
, 0);
9223 tree variable2
= TREE_OPERAND (arg1
, 0);
9225 const char * const warnmsg
= G_("assuming signed overflow does not "
9226 "occur when combining constants around "
9229 /* Put the constant on the side where it doesn't overflow and is
9230 of lower absolute value than before. */
9231 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9232 ? MINUS_EXPR
: PLUS_EXPR
,
9234 if (!TREE_OVERFLOW (cst
)
9235 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9237 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9238 return fold_build2_loc (loc
, code
, type
,
9240 fold_build2_loc (loc
,
9241 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9245 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9246 ? MINUS_EXPR
: PLUS_EXPR
,
9248 if (!TREE_OVERFLOW (cst
)
9249 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9251 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9252 return fold_build2_loc (loc
, code
, type
,
9253 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9259 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9260 signed arithmetic case. That form is created by the compiler
9261 often enough for folding it to be of value. One example is in
9262 computing loop trip counts after Operator Strength Reduction. */
9263 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9264 && TREE_CODE (arg0
) == MULT_EXPR
9265 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9266 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9267 && integer_zerop (arg1
))
9269 tree const1
= TREE_OPERAND (arg0
, 1);
9270 tree const2
= arg1
; /* zero */
9271 tree variable1
= TREE_OPERAND (arg0
, 0);
9272 enum tree_code cmp_code
= code
;
9274 /* Handle unfolded multiplication by zero. */
9275 if (integer_zerop (const1
))
9276 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9278 fold_overflow_warning (("assuming signed overflow does not occur when "
9279 "eliminating multiplication in comparison "
9281 WARN_STRICT_OVERFLOW_COMPARISON
);
9283 /* If const1 is negative we swap the sense of the comparison. */
9284 if (tree_int_cst_sgn (const1
) < 0)
9285 cmp_code
= swap_tree_comparison (cmp_code
);
9287 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9290 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9294 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9296 tree targ0
= strip_float_extensions (arg0
);
9297 tree targ1
= strip_float_extensions (arg1
);
9298 tree newtype
= TREE_TYPE (targ0
);
9300 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9301 newtype
= TREE_TYPE (targ1
);
9303 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9304 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9305 return fold_build2_loc (loc
, code
, type
,
9306 fold_convert_loc (loc
, newtype
, targ0
),
9307 fold_convert_loc (loc
, newtype
, targ1
));
9309 /* (-a) CMP (-b) -> b CMP a */
9310 if (TREE_CODE (arg0
) == NEGATE_EXPR
9311 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9312 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9313 TREE_OPERAND (arg0
, 0));
9315 if (TREE_CODE (arg1
) == REAL_CST
)
9317 REAL_VALUE_TYPE cst
;
9318 cst
= TREE_REAL_CST (arg1
);
9320 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9321 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9322 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9323 TREE_OPERAND (arg0
, 0),
9324 build_real (TREE_TYPE (arg1
),
9325 real_value_negate (&cst
)));
9327 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9328 /* a CMP (-0) -> a CMP 0 */
9329 if (REAL_VALUE_MINUS_ZERO (cst
))
9330 return fold_build2_loc (loc
, code
, type
, arg0
,
9331 build_real (TREE_TYPE (arg1
), dconst0
));
9333 /* x != NaN is always true, other ops are always false. */
9334 if (REAL_VALUE_ISNAN (cst
)
9335 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9337 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9338 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9341 /* Fold comparisons against infinity. */
9342 if (REAL_VALUE_ISINF (cst
)
9343 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9345 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9346 if (tem
!= NULL_TREE
)
9351 /* If this is a comparison of a real constant with a PLUS_EXPR
9352 or a MINUS_EXPR of a real constant, we can convert it into a
9353 comparison with a revised real constant as long as no overflow
9354 occurs when unsafe_math_optimizations are enabled. */
9355 if (flag_unsafe_math_optimizations
9356 && TREE_CODE (arg1
) == REAL_CST
9357 && (TREE_CODE (arg0
) == PLUS_EXPR
9358 || TREE_CODE (arg0
) == MINUS_EXPR
)
9359 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9360 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9361 ? MINUS_EXPR
: PLUS_EXPR
,
9362 arg1
, TREE_OPERAND (arg0
, 1)))
9363 && !TREE_OVERFLOW (tem
))
9364 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9366 /* Likewise, we can simplify a comparison of a real constant with
9367 a MINUS_EXPR whose first operand is also a real constant, i.e.
9368 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9369 floating-point types only if -fassociative-math is set. */
9370 if (flag_associative_math
9371 && TREE_CODE (arg1
) == REAL_CST
9372 && TREE_CODE (arg0
) == MINUS_EXPR
9373 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9374 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9376 && !TREE_OVERFLOW (tem
))
9377 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9378 TREE_OPERAND (arg0
, 1), tem
);
9380 /* Fold comparisons against built-in math functions. */
9381 if (TREE_CODE (arg1
) == REAL_CST
9382 && flag_unsafe_math_optimizations
9383 && ! flag_errno_math
)
9385 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9387 if (fcode
!= END_BUILTINS
)
9389 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9390 if (tem
!= NULL_TREE
)
9396 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9397 && CONVERT_EXPR_P (arg0
))
9399 /* If we are widening one operand of an integer comparison,
9400 see if the other operand is similarly being widened. Perhaps we
9401 can do the comparison in the narrower type. */
9402 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9406 /* Or if we are changing signedness. */
9407 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9412 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9413 constant, we can simplify it. */
9414 if (TREE_CODE (arg1
) == INTEGER_CST
9415 && (TREE_CODE (arg0
) == MIN_EXPR
9416 || TREE_CODE (arg0
) == MAX_EXPR
)
9417 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9419 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9424 /* Simplify comparison of something with itself. (For IEEE
9425 floating-point, we can only do some of these simplifications.) */
9426 if (operand_equal_p (arg0
, arg1
, 0))
9431 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9432 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9433 return constant_boolean_node (1, type
);
9438 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9439 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9440 return constant_boolean_node (1, type
);
9441 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9444 /* For NE, we can only do this simplification if integer
9445 or we don't honor IEEE floating point NaNs. */
9446 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9447 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9449 /* ... fall through ... */
9452 return constant_boolean_node (0, type
);
9458 /* If we are comparing an expression that just has comparisons
9459 of two integer values, arithmetic expressions of those comparisons,
9460 and constants, we can simplify it. There are only three cases
9461 to check: the two values can either be equal, the first can be
9462 greater, or the second can be greater. Fold the expression for
9463 those three values. Since each value must be 0 or 1, we have
9464 eight possibilities, each of which corresponds to the constant 0
9465 or 1 or one of the six possible comparisons.
9467 This handles common cases like (a > b) == 0 but also handles
9468 expressions like ((x > y) - (y > x)) > 0, which supposedly
9469 occur in macroized code. */
9471 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9473 tree cval1
= 0, cval2
= 0;
9476 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9477 /* Don't handle degenerate cases here; they should already
9478 have been handled anyway. */
9479 && cval1
!= 0 && cval2
!= 0
9480 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9481 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9482 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9483 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9484 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9485 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9486 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9488 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9489 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9491 /* We can't just pass T to eval_subst in case cval1 or cval2
9492 was the same as ARG1. */
9495 = fold_build2_loc (loc
, code
, type
,
9496 eval_subst (loc
, arg0
, cval1
, maxval
,
9500 = fold_build2_loc (loc
, code
, type
,
9501 eval_subst (loc
, arg0
, cval1
, maxval
,
9505 = fold_build2_loc (loc
, code
, type
,
9506 eval_subst (loc
, arg0
, cval1
, minval
,
9510 /* All three of these results should be 0 or 1. Confirm they are.
9511 Then use those values to select the proper code to use. */
9513 if (TREE_CODE (high_result
) == INTEGER_CST
9514 && TREE_CODE (equal_result
) == INTEGER_CST
9515 && TREE_CODE (low_result
) == INTEGER_CST
)
9517 /* Make a 3-bit mask with the high-order bit being the
9518 value for `>', the next for '=', and the low for '<'. */
9519 switch ((integer_onep (high_result
) * 4)
9520 + (integer_onep (equal_result
) * 2)
9521 + integer_onep (low_result
))
9525 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9546 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9551 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9552 SET_EXPR_LOCATION (tem
, loc
);
9555 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9560 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9561 into a single range test. */
9562 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9563 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9564 && TREE_CODE (arg1
) == INTEGER_CST
9565 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9566 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9567 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9568 && !TREE_OVERFLOW (arg1
))
9570 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9571 if (tem
!= NULL_TREE
)
9575 /* Fold ~X op ~Y as Y op X. */
9576 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9577 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9579 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9580 return fold_build2_loc (loc
, code
, type
,
9581 fold_convert_loc (loc
, cmp_type
,
9582 TREE_OPERAND (arg1
, 0)),
9583 TREE_OPERAND (arg0
, 0));
9586 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9587 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9588 && TREE_CODE (arg1
) == INTEGER_CST
)
9590 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9591 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9592 TREE_OPERAND (arg0
, 0),
9593 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9594 fold_convert_loc (loc
, cmp_type
, arg1
)));
9601 /* Subroutine of fold_binary. Optimize complex multiplications of the
9602 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9603 argument EXPR represents the expression "z" of type TYPE. */
9606 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9608 tree itype
= TREE_TYPE (type
);
9609 tree rpart
, ipart
, tem
;
9611 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9613 rpart
= TREE_OPERAND (expr
, 0);
9614 ipart
= TREE_OPERAND (expr
, 1);
9616 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9618 rpart
= TREE_REALPART (expr
);
9619 ipart
= TREE_IMAGPART (expr
);
9623 expr
= save_expr (expr
);
9624 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9625 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9628 rpart
= save_expr (rpart
);
9629 ipart
= save_expr (ipart
);
9630 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9631 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9632 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9633 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9634 build_zero_cst (itype
));
9638 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9639 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9640 guarantees that P and N have the same least significant log2(M) bits.
9641 N is not otherwise constrained. In particular, N is not normalized to
9642 0 <= N < M as is common. In general, the precise value of P is unknown.
9643 M is chosen as large as possible such that constant N can be determined.
9645 Returns M and sets *RESIDUE to N.
9647 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9648 account. This is not always possible due to PR 35705.
9651 static unsigned HOST_WIDE_INT
9652 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9653 bool allow_func_align
)
9655 enum tree_code code
;
9659 code
= TREE_CODE (expr
);
9660 if (code
== ADDR_EXPR
)
9662 unsigned int bitalign
;
9663 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9664 *residue
/= BITS_PER_UNIT
;
9665 return bitalign
/ BITS_PER_UNIT
;
9667 else if (code
== POINTER_PLUS_EXPR
)
9670 unsigned HOST_WIDE_INT modulus
;
9671 enum tree_code inner_code
;
9673 op0
= TREE_OPERAND (expr
, 0);
9675 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9678 op1
= TREE_OPERAND (expr
, 1);
9680 inner_code
= TREE_CODE (op1
);
9681 if (inner_code
== INTEGER_CST
)
9683 *residue
+= TREE_INT_CST_LOW (op1
);
9686 else if (inner_code
== MULT_EXPR
)
9688 op1
= TREE_OPERAND (op1
, 1);
9689 if (TREE_CODE (op1
) == INTEGER_CST
)
9691 unsigned HOST_WIDE_INT align
;
9693 /* Compute the greatest power-of-2 divisor of op1. */
9694 align
= TREE_INT_CST_LOW (op1
);
9697 /* If align is non-zero and less than *modulus, replace
9698 *modulus with align., If align is 0, then either op1 is 0
9699 or the greatest power-of-2 divisor of op1 doesn't fit in an
9700 unsigned HOST_WIDE_INT. In either case, no additional
9701 constraint is imposed. */
9703 modulus
= MIN (modulus
, align
);
9710 /* If we get here, we were unable to determine anything useful about the
9715 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9716 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9719 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9721 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9723 if (TREE_CODE (arg
) == VECTOR_CST
)
9725 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9726 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9728 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9730 constructor_elt
*elt
;
9732 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9733 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9736 elts
[i
] = elt
->value
;
9740 for (; i
< nelts
; i
++)
9742 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9746 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9747 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9748 NULL_TREE otherwise. */
9751 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9753 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9755 bool need_ctor
= false;
9757 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9758 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9759 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9760 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9763 elts
= XALLOCAVEC (tree
, nelts
* 3);
9764 if (!vec_cst_ctor_to_array (arg0
, elts
)
9765 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9768 for (i
= 0; i
< nelts
; i
++)
9770 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9772 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9777 vec
<constructor_elt
, va_gc
> *v
;
9778 vec_alloc (v
, nelts
);
9779 for (i
= 0; i
< nelts
; i
++)
9780 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9781 return build_constructor (type
, v
);
9784 return build_vector (type
, &elts
[2 * nelts
]);
9787 /* Try to fold a pointer difference of type TYPE two address expressions of
9788 array references AREF0 and AREF1 using location LOC. Return a
9789 simplified expression for the difference or NULL_TREE. */
9792 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9793 tree aref0
, tree aref1
)
9795 tree base0
= TREE_OPERAND (aref0
, 0);
9796 tree base1
= TREE_OPERAND (aref1
, 0);
9797 tree base_offset
= build_int_cst (type
, 0);
9799 /* If the bases are array references as well, recurse. If the bases
9800 are pointer indirections compute the difference of the pointers.
9801 If the bases are equal, we are set. */
9802 if ((TREE_CODE (base0
) == ARRAY_REF
9803 && TREE_CODE (base1
) == ARRAY_REF
9805 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9806 || (INDIRECT_REF_P (base0
)
9807 && INDIRECT_REF_P (base1
)
9808 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9809 TREE_OPERAND (base0
, 0),
9810 TREE_OPERAND (base1
, 0))))
9811 || operand_equal_p (base0
, base1
, 0))
9813 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9814 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9815 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9816 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9817 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9819 fold_build2_loc (loc
, MULT_EXPR
, type
,
9825 /* If the real or vector real constant CST of type TYPE has an exact
9826 inverse, return it, else return NULL. */
9829 exact_inverse (tree type
, tree cst
)
9832 tree unit_type
, *elts
;
9833 enum machine_mode mode
;
9834 unsigned vec_nelts
, i
;
9836 switch (TREE_CODE (cst
))
9839 r
= TREE_REAL_CST (cst
);
9841 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9842 return build_real (type
, r
);
9847 vec_nelts
= VECTOR_CST_NELTS (cst
);
9848 elts
= XALLOCAVEC (tree
, vec_nelts
);
9849 unit_type
= TREE_TYPE (type
);
9850 mode
= TYPE_MODE (unit_type
);
9852 for (i
= 0; i
< vec_nelts
; i
++)
9854 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9855 if (!exact_real_inverse (mode
, &r
))
9857 elts
[i
] = build_real (unit_type
, r
);
9860 return build_vector (type
, elts
);
9867 /* Fold a binary expression of code CODE and type TYPE with operands
9868 OP0 and OP1. LOC is the location of the resulting expression.
9869 Return the folded expression if folding is successful. Otherwise,
9870 return NULL_TREE. */
9873 fold_binary_loc (location_t loc
,
9874 enum tree_code code
, tree type
, tree op0
, tree op1
)
9876 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9877 tree arg0
, arg1
, tem
;
9878 tree t1
= NULL_TREE
;
9879 bool strict_overflow_p
;
9881 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9882 && TREE_CODE_LENGTH (code
) == 2
9884 && op1
!= NULL_TREE
);
9889 /* Strip any conversions that don't change the mode. This is
9890 safe for every expression, except for a comparison expression
9891 because its signedness is derived from its operands. So, in
9892 the latter case, only strip conversions that don't change the
9893 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9896 Note that this is done as an internal manipulation within the
9897 constant folder, in order to find the simplest representation
9898 of the arguments so that their form can be studied. In any
9899 cases, the appropriate type conversions should be put back in
9900 the tree that will get out of the constant folder. */
9902 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9904 STRIP_SIGN_NOPS (arg0
);
9905 STRIP_SIGN_NOPS (arg1
);
9913 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9914 constant but we can't do arithmetic on them. */
9915 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9916 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9917 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9918 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9919 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9920 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9921 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9923 if (kind
== tcc_binary
)
9925 /* Make sure type and arg0 have the same saturating flag. */
9926 gcc_assert (TYPE_SATURATING (type
)
9927 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9928 tem
= const_binop (code
, arg0
, arg1
);
9930 else if (kind
== tcc_comparison
)
9931 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9935 if (tem
!= NULL_TREE
)
9937 if (TREE_TYPE (tem
) != type
)
9938 tem
= fold_convert_loc (loc
, type
, tem
);
9943 /* If this is a commutative operation, and ARG0 is a constant, move it
9944 to ARG1 to reduce the number of tests below. */
9945 if (commutative_tree_code (code
)
9946 && tree_swap_operands_p (arg0
, arg1
, true))
9947 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9949 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9951 First check for cases where an arithmetic operation is applied to a
9952 compound, conditional, or comparison operation. Push the arithmetic
9953 operation inside the compound or conditional to see if any folding
9954 can then be done. Convert comparison to conditional for this purpose.
9955 The also optimizes non-constant cases that used to be done in
9958 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9959 one of the operands is a comparison and the other is a comparison, a
9960 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9961 code below would make the expression more complex. Change it to a
9962 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9963 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9965 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9966 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9967 && TREE_CODE (type
) != VECTOR_TYPE
9968 && ((truth_value_p (TREE_CODE (arg0
))
9969 && (truth_value_p (TREE_CODE (arg1
))
9970 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9971 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9972 || (truth_value_p (TREE_CODE (arg1
))
9973 && (truth_value_p (TREE_CODE (arg0
))
9974 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9975 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9977 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9978 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9981 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9982 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9984 if (code
== EQ_EXPR
)
9985 tem
= invert_truthvalue_loc (loc
, tem
);
9987 return fold_convert_loc (loc
, type
, tem
);
9990 if (TREE_CODE_CLASS (code
) == tcc_binary
9991 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9993 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9995 tem
= fold_build2_loc (loc
, code
, type
,
9996 fold_convert_loc (loc
, TREE_TYPE (op0
),
9997 TREE_OPERAND (arg0
, 1)), op1
);
9998 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10001 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10002 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10004 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10005 fold_convert_loc (loc
, TREE_TYPE (op1
),
10006 TREE_OPERAND (arg1
, 1)));
10007 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10011 if (TREE_CODE (arg0
) == COND_EXPR
10012 || TREE_CODE (arg0
) == VEC_COND_EXPR
10013 || COMPARISON_CLASS_P (arg0
))
10015 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10017 /*cond_first_p=*/1);
10018 if (tem
!= NULL_TREE
)
10022 if (TREE_CODE (arg1
) == COND_EXPR
10023 || TREE_CODE (arg1
) == VEC_COND_EXPR
10024 || COMPARISON_CLASS_P (arg1
))
10026 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10028 /*cond_first_p=*/0);
10029 if (tem
!= NULL_TREE
)
10037 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10038 if (TREE_CODE (arg0
) == ADDR_EXPR
10039 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10041 tree iref
= TREE_OPERAND (arg0
, 0);
10042 return fold_build2 (MEM_REF
, type
,
10043 TREE_OPERAND (iref
, 0),
10044 int_const_binop (PLUS_EXPR
, arg1
,
10045 TREE_OPERAND (iref
, 1)));
10048 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10049 if (TREE_CODE (arg0
) == ADDR_EXPR
10050 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10053 HOST_WIDE_INT coffset
;
10054 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10058 return fold_build2 (MEM_REF
, type
,
10059 build_fold_addr_expr (base
),
10060 int_const_binop (PLUS_EXPR
, arg1
,
10061 size_int (coffset
)));
10066 case POINTER_PLUS_EXPR
:
10067 /* 0 +p index -> (type)index */
10068 if (integer_zerop (arg0
))
10069 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10071 /* PTR +p 0 -> PTR */
10072 if (integer_zerop (arg1
))
10073 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10075 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10076 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10077 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10078 return fold_convert_loc (loc
, type
,
10079 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10080 fold_convert_loc (loc
, sizetype
,
10082 fold_convert_loc (loc
, sizetype
,
10085 /* (PTR +p B) +p A -> PTR +p (B + A) */
10086 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10089 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10090 tree arg00
= TREE_OPERAND (arg0
, 0);
10091 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10092 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10093 return fold_convert_loc (loc
, type
,
10094 fold_build_pointer_plus_loc (loc
,
10098 /* PTR_CST +p CST -> CST1 */
10099 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10100 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10101 fold_convert_loc (loc
, type
, arg1
));
10103 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10104 of the array. Loop optimizer sometimes produce this type of
10106 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10108 tem
= try_move_mult_to_index (loc
, arg0
,
10109 fold_convert_loc (loc
,
10112 return fold_convert_loc (loc
, type
, tem
);
10118 /* A + (-B) -> A - B */
10119 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10120 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10121 fold_convert_loc (loc
, type
, arg0
),
10122 fold_convert_loc (loc
, type
,
10123 TREE_OPERAND (arg1
, 0)));
10124 /* (-A) + B -> B - A */
10125 if (TREE_CODE (arg0
) == NEGATE_EXPR
10126 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10127 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10128 fold_convert_loc (loc
, type
, arg1
),
10129 fold_convert_loc (loc
, type
,
10130 TREE_OPERAND (arg0
, 0)));
10132 if (INTEGRAL_TYPE_P (type
))
10134 /* Convert ~A + 1 to -A. */
10135 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10136 && integer_onep (arg1
))
10137 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10138 fold_convert_loc (loc
, type
,
10139 TREE_OPERAND (arg0
, 0)));
10141 /* ~X + X is -1. */
10142 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10143 && !TYPE_OVERFLOW_TRAPS (type
))
10145 tree tem
= TREE_OPERAND (arg0
, 0);
10148 if (operand_equal_p (tem
, arg1
, 0))
10150 t1
= build_int_cst_type (type
, -1);
10151 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10155 /* X + ~X is -1. */
10156 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10157 && !TYPE_OVERFLOW_TRAPS (type
))
10159 tree tem
= TREE_OPERAND (arg1
, 0);
10162 if (operand_equal_p (arg0
, tem
, 0))
10164 t1
= build_int_cst_type (type
, -1);
10165 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10169 /* X + (X / CST) * -CST is X % CST. */
10170 if (TREE_CODE (arg1
) == MULT_EXPR
10171 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10172 && operand_equal_p (arg0
,
10173 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10175 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10176 tree cst1
= TREE_OPERAND (arg1
, 1);
10177 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10179 if (sum
&& integer_zerop (sum
))
10180 return fold_convert_loc (loc
, type
,
10181 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10182 TREE_TYPE (arg0
), arg0
,
10187 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10188 one. Make sure the type is not saturating and has the signedness of
10189 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10190 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10191 if ((TREE_CODE (arg0
) == MULT_EXPR
10192 || TREE_CODE (arg1
) == MULT_EXPR
)
10193 && !TYPE_SATURATING (type
)
10194 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10195 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10196 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10198 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10203 if (! FLOAT_TYPE_P (type
))
10205 if (integer_zerop (arg1
))
10206 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10208 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10209 with a constant, and the two constants have no bits in common,
10210 we should treat this as a BIT_IOR_EXPR since this may produce more
10211 simplifications. */
10212 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10213 && TREE_CODE (arg1
) == BIT_AND_EXPR
10214 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10215 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10216 && integer_zerop (const_binop (BIT_AND_EXPR
,
10217 TREE_OPERAND (arg0
, 1),
10218 TREE_OPERAND (arg1
, 1))))
10220 code
= BIT_IOR_EXPR
;
10224 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10225 (plus (plus (mult) (mult)) (foo)) so that we can
10226 take advantage of the factoring cases below. */
10227 if (TYPE_OVERFLOW_WRAPS (type
)
10228 && (((TREE_CODE (arg0
) == PLUS_EXPR
10229 || TREE_CODE (arg0
) == MINUS_EXPR
)
10230 && TREE_CODE (arg1
) == MULT_EXPR
)
10231 || ((TREE_CODE (arg1
) == PLUS_EXPR
10232 || TREE_CODE (arg1
) == MINUS_EXPR
)
10233 && TREE_CODE (arg0
) == MULT_EXPR
)))
10235 tree parg0
, parg1
, parg
, marg
;
10236 enum tree_code pcode
;
10238 if (TREE_CODE (arg1
) == MULT_EXPR
)
10239 parg
= arg0
, marg
= arg1
;
10241 parg
= arg1
, marg
= arg0
;
10242 pcode
= TREE_CODE (parg
);
10243 parg0
= TREE_OPERAND (parg
, 0);
10244 parg1
= TREE_OPERAND (parg
, 1);
10245 STRIP_NOPS (parg0
);
10246 STRIP_NOPS (parg1
);
10248 if (TREE_CODE (parg0
) == MULT_EXPR
10249 && TREE_CODE (parg1
) != MULT_EXPR
)
10250 return fold_build2_loc (loc
, pcode
, type
,
10251 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10252 fold_convert_loc (loc
, type
,
10254 fold_convert_loc (loc
, type
,
10256 fold_convert_loc (loc
, type
, parg1
));
10257 if (TREE_CODE (parg0
) != MULT_EXPR
10258 && TREE_CODE (parg1
) == MULT_EXPR
)
10260 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10261 fold_convert_loc (loc
, type
, parg0
),
10262 fold_build2_loc (loc
, pcode
, type
,
10263 fold_convert_loc (loc
, type
, marg
),
10264 fold_convert_loc (loc
, type
,
10270 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10271 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10272 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10274 /* Likewise if the operands are reversed. */
10275 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10276 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10278 /* Convert X + -C into X - C. */
10279 if (TREE_CODE (arg1
) == REAL_CST
10280 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10282 tem
= fold_negate_const (arg1
, type
);
10283 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10284 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10285 fold_convert_loc (loc
, type
, arg0
),
10286 fold_convert_loc (loc
, type
, tem
));
10289 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10290 to __complex__ ( x, y ). This is not the same for SNaNs or
10291 if signed zeros are involved. */
10292 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10293 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10294 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10296 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10297 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10298 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10299 bool arg0rz
= false, arg0iz
= false;
10300 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10301 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10303 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10304 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10305 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10307 tree rp
= arg1r
? arg1r
10308 : build1 (REALPART_EXPR
, rtype
, arg1
);
10309 tree ip
= arg0i
? arg0i
10310 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10311 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10313 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10315 tree rp
= arg0r
? arg0r
10316 : build1 (REALPART_EXPR
, rtype
, arg0
);
10317 tree ip
= arg1i
? arg1i
10318 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10319 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10324 if (flag_unsafe_math_optimizations
10325 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10326 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10327 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10330 /* Convert x+x into x*2.0. */
10331 if (operand_equal_p (arg0
, arg1
, 0)
10332 && SCALAR_FLOAT_TYPE_P (type
))
10333 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10334 build_real (type
, dconst2
));
10336 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10337 We associate floats only if the user has specified
10338 -fassociative-math. */
10339 if (flag_associative_math
10340 && TREE_CODE (arg1
) == PLUS_EXPR
10341 && TREE_CODE (arg0
) != MULT_EXPR
)
10343 tree tree10
= TREE_OPERAND (arg1
, 0);
10344 tree tree11
= TREE_OPERAND (arg1
, 1);
10345 if (TREE_CODE (tree11
) == MULT_EXPR
10346 && TREE_CODE (tree10
) == MULT_EXPR
)
10349 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10350 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10353 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10354 We associate floats only if the user has specified
10355 -fassociative-math. */
10356 if (flag_associative_math
10357 && TREE_CODE (arg0
) == PLUS_EXPR
10358 && TREE_CODE (arg1
) != MULT_EXPR
)
10360 tree tree00
= TREE_OPERAND (arg0
, 0);
10361 tree tree01
= TREE_OPERAND (arg0
, 1);
10362 if (TREE_CODE (tree01
) == MULT_EXPR
10363 && TREE_CODE (tree00
) == MULT_EXPR
)
10366 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10367 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10373 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10374 is a rotate of A by C1 bits. */
10375 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10376 is a rotate of A by B bits. */
10378 enum tree_code code0
, code1
;
10380 code0
= TREE_CODE (arg0
);
10381 code1
= TREE_CODE (arg1
);
10382 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10383 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10384 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10385 TREE_OPERAND (arg1
, 0), 0)
10386 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10387 TYPE_UNSIGNED (rtype
))
10388 /* Only create rotates in complete modes. Other cases are not
10389 expanded properly. */
10390 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10392 tree tree01
, tree11
;
10393 enum tree_code code01
, code11
;
10395 tree01
= TREE_OPERAND (arg0
, 1);
10396 tree11
= TREE_OPERAND (arg1
, 1);
10397 STRIP_NOPS (tree01
);
10398 STRIP_NOPS (tree11
);
10399 code01
= TREE_CODE (tree01
);
10400 code11
= TREE_CODE (tree11
);
10401 if (code01
== INTEGER_CST
10402 && code11
== INTEGER_CST
10403 && TREE_INT_CST_HIGH (tree01
) == 0
10404 && TREE_INT_CST_HIGH (tree11
) == 0
10405 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10406 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10408 tem
= build2_loc (loc
, LROTATE_EXPR
,
10409 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10410 TREE_OPERAND (arg0
, 0),
10411 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10412 return fold_convert_loc (loc
, type
, tem
);
10414 else if (code11
== MINUS_EXPR
)
10416 tree tree110
, tree111
;
10417 tree110
= TREE_OPERAND (tree11
, 0);
10418 tree111
= TREE_OPERAND (tree11
, 1);
10419 STRIP_NOPS (tree110
);
10420 STRIP_NOPS (tree111
);
10421 if (TREE_CODE (tree110
) == INTEGER_CST
10422 && 0 == compare_tree_int (tree110
,
10424 (TREE_TYPE (TREE_OPERAND
10426 && operand_equal_p (tree01
, tree111
, 0))
10428 fold_convert_loc (loc
, type
,
10429 build2 ((code0
== LSHIFT_EXPR
10432 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10433 TREE_OPERAND (arg0
, 0), tree01
));
10435 else if (code01
== MINUS_EXPR
)
10437 tree tree010
, tree011
;
10438 tree010
= TREE_OPERAND (tree01
, 0);
10439 tree011
= TREE_OPERAND (tree01
, 1);
10440 STRIP_NOPS (tree010
);
10441 STRIP_NOPS (tree011
);
10442 if (TREE_CODE (tree010
) == INTEGER_CST
10443 && 0 == compare_tree_int (tree010
,
10445 (TREE_TYPE (TREE_OPERAND
10447 && operand_equal_p (tree11
, tree011
, 0))
10448 return fold_convert_loc
10450 build2 ((code0
!= LSHIFT_EXPR
10453 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10454 TREE_OPERAND (arg0
, 0), tree11
));
10460 /* In most languages, can't associate operations on floats through
10461 parentheses. Rather than remember where the parentheses were, we
10462 don't associate floats at all, unless the user has specified
10463 -fassociative-math.
10464 And, we need to make sure type is not saturating. */
10466 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10467 && !TYPE_SATURATING (type
))
10469 tree var0
, con0
, lit0
, minus_lit0
;
10470 tree var1
, con1
, lit1
, minus_lit1
;
10474 /* Split both trees into variables, constants, and literals. Then
10475 associate each group together, the constants with literals,
10476 then the result with variables. This increases the chances of
10477 literals being recombined later and of generating relocatable
10478 expressions for the sum of a constant and literal. */
10479 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10480 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10481 code
== MINUS_EXPR
);
10483 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10484 if (code
== MINUS_EXPR
)
10487 /* With undefined overflow prefer doing association in a type
10488 which wraps on overflow, if that is one of the operand types. */
10489 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10490 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10492 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10493 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10494 atype
= TREE_TYPE (arg0
);
10495 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10496 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10497 atype
= TREE_TYPE (arg1
);
10498 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10501 /* With undefined overflow we can only associate constants with one
10502 variable, and constants whose association doesn't overflow. */
10503 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10504 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10511 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10512 tmp0
= TREE_OPERAND (tmp0
, 0);
10513 if (CONVERT_EXPR_P (tmp0
)
10514 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10515 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10516 <= TYPE_PRECISION (atype
)))
10517 tmp0
= TREE_OPERAND (tmp0
, 0);
10518 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10519 tmp1
= TREE_OPERAND (tmp1
, 0);
10520 if (CONVERT_EXPR_P (tmp1
)
10521 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10522 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10523 <= TYPE_PRECISION (atype
)))
10524 tmp1
= TREE_OPERAND (tmp1
, 0);
10525 /* The only case we can still associate with two variables
10526 is if they are the same, modulo negation and bit-pattern
10527 preserving conversions. */
10528 if (!operand_equal_p (tmp0
, tmp1
, 0))
10533 /* Only do something if we found more than two objects. Otherwise,
10534 nothing has changed and we risk infinite recursion. */
10536 && (2 < ((var0
!= 0) + (var1
!= 0)
10537 + (con0
!= 0) + (con1
!= 0)
10538 + (lit0
!= 0) + (lit1
!= 0)
10539 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10541 bool any_overflows
= false;
10542 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10543 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10544 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10545 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10546 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10547 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10548 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10549 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10552 /* Preserve the MINUS_EXPR if the negative part of the literal is
10553 greater than the positive part. Otherwise, the multiplicative
10554 folding code (i.e extract_muldiv) may be fooled in case
10555 unsigned constants are subtracted, like in the following
10556 example: ((X*2 + 4) - 8U)/2. */
10557 if (minus_lit0
&& lit0
)
10559 if (TREE_CODE (lit0
) == INTEGER_CST
10560 && TREE_CODE (minus_lit0
) == INTEGER_CST
10561 && tree_int_cst_lt (lit0
, minus_lit0
))
10563 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10564 MINUS_EXPR
, atype
);
10569 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10570 MINUS_EXPR
, atype
);
10575 /* Don't introduce overflows through reassociation. */
10577 && ((lit0
&& TREE_OVERFLOW (lit0
))
10578 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10585 fold_convert_loc (loc
, type
,
10586 associate_trees (loc
, var0
, minus_lit0
,
10587 MINUS_EXPR
, atype
));
10590 con0
= associate_trees (loc
, con0
, minus_lit0
,
10591 MINUS_EXPR
, atype
);
10593 fold_convert_loc (loc
, type
,
10594 associate_trees (loc
, var0
, con0
,
10595 PLUS_EXPR
, atype
));
10599 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10601 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10609 /* Pointer simplifications for subtraction, simple reassociations. */
10610 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10612 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10613 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10614 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10616 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10617 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10618 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10619 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10620 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10621 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10623 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10626 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10627 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10629 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10630 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10631 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10632 fold_convert_loc (loc
, type
, arg1
));
10634 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10637 /* A - (-B) -> A + B */
10638 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10639 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10640 fold_convert_loc (loc
, type
,
10641 TREE_OPERAND (arg1
, 0)));
10642 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10643 if (TREE_CODE (arg0
) == NEGATE_EXPR
10644 && (FLOAT_TYPE_P (type
)
10645 || INTEGRAL_TYPE_P (type
))
10646 && negate_expr_p (arg1
)
10647 && reorder_operands_p (arg0
, arg1
))
10648 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10649 fold_convert_loc (loc
, type
,
10650 negate_expr (arg1
)),
10651 fold_convert_loc (loc
, type
,
10652 TREE_OPERAND (arg0
, 0)));
10653 /* Convert -A - 1 to ~A. */
10654 if (INTEGRAL_TYPE_P (type
)
10655 && TREE_CODE (arg0
) == NEGATE_EXPR
10656 && integer_onep (arg1
)
10657 && !TYPE_OVERFLOW_TRAPS (type
))
10658 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10659 fold_convert_loc (loc
, type
,
10660 TREE_OPERAND (arg0
, 0)));
10662 /* Convert -1 - A to ~A. */
10663 if (INTEGRAL_TYPE_P (type
)
10664 && integer_all_onesp (arg0
))
10665 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10668 /* X - (X / CST) * CST is X % CST. */
10669 if (INTEGRAL_TYPE_P (type
)
10670 && TREE_CODE (arg1
) == MULT_EXPR
10671 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10672 && operand_equal_p (arg0
,
10673 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10674 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10675 TREE_OPERAND (arg1
, 1), 0))
10677 fold_convert_loc (loc
, type
,
10678 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10679 arg0
, TREE_OPERAND (arg1
, 1)));
10681 if (! FLOAT_TYPE_P (type
))
10683 if (integer_zerop (arg0
))
10684 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10685 if (integer_zerop (arg1
))
10686 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10688 /* Fold A - (A & B) into ~B & A. */
10689 if (!TREE_SIDE_EFFECTS (arg0
)
10690 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10692 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10694 tree arg10
= fold_convert_loc (loc
, type
,
10695 TREE_OPERAND (arg1
, 0));
10696 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10697 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10699 fold_convert_loc (loc
, type
, arg0
));
10701 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10703 tree arg11
= fold_convert_loc (loc
,
10704 type
, TREE_OPERAND (arg1
, 1));
10705 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10706 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10708 fold_convert_loc (loc
, type
, arg0
));
10712 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10713 any power of 2 minus 1. */
10714 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10715 && TREE_CODE (arg1
) == BIT_AND_EXPR
10716 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10717 TREE_OPERAND (arg1
, 0), 0))
10719 tree mask0
= TREE_OPERAND (arg0
, 1);
10720 tree mask1
= TREE_OPERAND (arg1
, 1);
10721 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10723 if (operand_equal_p (tem
, mask1
, 0))
10725 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10726 TREE_OPERAND (arg0
, 0), mask1
);
10727 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10732 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10733 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10734 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10736 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10737 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10738 (-ARG1 + ARG0) reduces to -ARG1. */
10739 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10740 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10742 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10743 __complex__ ( x, -y ). This is not the same for SNaNs or if
10744 signed zeros are involved. */
10745 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10746 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10747 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10749 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10750 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10751 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10752 bool arg0rz
= false, arg0iz
= false;
10753 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10754 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10756 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10757 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10758 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10760 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10762 : build1 (REALPART_EXPR
, rtype
, arg1
));
10763 tree ip
= arg0i
? arg0i
10764 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10765 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10767 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10769 tree rp
= arg0r
? arg0r
10770 : build1 (REALPART_EXPR
, rtype
, arg0
);
10771 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10773 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10774 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10779 /* Fold &x - &x. This can happen from &x.foo - &x.
10780 This is unsafe for certain floats even in non-IEEE formats.
10781 In IEEE, it is unsafe because it does wrong for NaNs.
10782 Also note that operand_equal_p is always false if an operand
10785 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10786 && operand_equal_p (arg0
, arg1
, 0))
10787 return build_zero_cst (type
);
10789 /* A - B -> A + (-B) if B is easily negatable. */
10790 if (negate_expr_p (arg1
)
10791 && ((FLOAT_TYPE_P (type
)
10792 /* Avoid this transformation if B is a positive REAL_CST. */
10793 && (TREE_CODE (arg1
) != REAL_CST
10794 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10795 || INTEGRAL_TYPE_P (type
)))
10796 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10797 fold_convert_loc (loc
, type
, arg0
),
10798 fold_convert_loc (loc
, type
,
10799 negate_expr (arg1
)));
10801 /* Try folding difference of addresses. */
10803 HOST_WIDE_INT diff
;
10805 if ((TREE_CODE (arg0
) == ADDR_EXPR
10806 || TREE_CODE (arg1
) == ADDR_EXPR
)
10807 && ptr_difference_const (arg0
, arg1
, &diff
))
10808 return build_int_cst_type (type
, diff
);
10811 /* Fold &a[i] - &a[j] to i-j. */
10812 if (TREE_CODE (arg0
) == ADDR_EXPR
10813 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10814 && TREE_CODE (arg1
) == ADDR_EXPR
10815 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10817 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10818 TREE_OPERAND (arg0
, 0),
10819 TREE_OPERAND (arg1
, 0));
10824 if (FLOAT_TYPE_P (type
)
10825 && flag_unsafe_math_optimizations
10826 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10827 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10828 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10831 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10832 one. Make sure the type is not saturating and has the signedness of
10833 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10834 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10835 if ((TREE_CODE (arg0
) == MULT_EXPR
10836 || TREE_CODE (arg1
) == MULT_EXPR
)
10837 && !TYPE_SATURATING (type
)
10838 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10839 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10840 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10842 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10850 /* (-A) * (-B) -> A * B */
10851 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10852 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10853 fold_convert_loc (loc
, type
,
10854 TREE_OPERAND (arg0
, 0)),
10855 fold_convert_loc (loc
, type
,
10856 negate_expr (arg1
)));
10857 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10858 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10859 fold_convert_loc (loc
, type
,
10860 negate_expr (arg0
)),
10861 fold_convert_loc (loc
, type
,
10862 TREE_OPERAND (arg1
, 0)));
10864 if (! FLOAT_TYPE_P (type
))
10866 if (integer_zerop (arg1
))
10867 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10868 if (integer_onep (arg1
))
10869 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10870 /* Transform x * -1 into -x. Make sure to do the negation
10871 on the original operand with conversions not stripped
10872 because we can only strip non-sign-changing conversions. */
10873 if (integer_all_onesp (arg1
))
10874 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10875 /* Transform x * -C into -x * C if x is easily negatable. */
10876 if (TREE_CODE (arg1
) == INTEGER_CST
10877 && tree_int_cst_sgn (arg1
) == -1
10878 && negate_expr_p (arg0
)
10879 && (tem
= negate_expr (arg1
)) != arg1
10880 && !TREE_OVERFLOW (tem
))
10881 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10882 fold_convert_loc (loc
, type
,
10883 negate_expr (arg0
)),
10886 /* (a * (1 << b)) is (a << b) */
10887 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10888 && integer_onep (TREE_OPERAND (arg1
, 0)))
10889 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10890 TREE_OPERAND (arg1
, 1));
10891 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10892 && integer_onep (TREE_OPERAND (arg0
, 0)))
10893 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10894 TREE_OPERAND (arg0
, 1));
10896 /* (A + A) * C -> A * 2 * C */
10897 if (TREE_CODE (arg0
) == PLUS_EXPR
10898 && TREE_CODE (arg1
) == INTEGER_CST
10899 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10900 TREE_OPERAND (arg0
, 1), 0))
10901 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10902 omit_one_operand_loc (loc
, type
,
10903 TREE_OPERAND (arg0
, 0),
10904 TREE_OPERAND (arg0
, 1)),
10905 fold_build2_loc (loc
, MULT_EXPR
, type
,
10906 build_int_cst (type
, 2) , arg1
));
10908 strict_overflow_p
= false;
10909 if (TREE_CODE (arg1
) == INTEGER_CST
10910 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10911 &strict_overflow_p
)))
10913 if (strict_overflow_p
)
10914 fold_overflow_warning (("assuming signed overflow does not "
10915 "occur when simplifying "
10917 WARN_STRICT_OVERFLOW_MISC
);
10918 return fold_convert_loc (loc
, type
, tem
);
10921 /* Optimize z * conj(z) for integer complex numbers. */
10922 if (TREE_CODE (arg0
) == CONJ_EXPR
10923 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10924 return fold_mult_zconjz (loc
, type
, arg1
);
10925 if (TREE_CODE (arg1
) == CONJ_EXPR
10926 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10927 return fold_mult_zconjz (loc
, type
, arg0
);
10931 /* Maybe fold x * 0 to 0. The expressions aren't the same
10932 when x is NaN, since x * 0 is also NaN. Nor are they the
10933 same in modes with signed zeros, since multiplying a
10934 negative value by 0 gives -0, not +0. */
10935 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10936 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10937 && real_zerop (arg1
))
10938 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10939 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10940 Likewise for complex arithmetic with signed zeros. */
10941 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10942 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10943 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10944 && real_onep (arg1
))
10945 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10947 /* Transform x * -1.0 into -x. */
10948 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10949 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10950 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10951 && real_minus_onep (arg1
))
10952 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10954 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10955 the result for floating point types due to rounding so it is applied
10956 only if -fassociative-math was specify. */
10957 if (flag_associative_math
10958 && TREE_CODE (arg0
) == RDIV_EXPR
10959 && TREE_CODE (arg1
) == REAL_CST
10960 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10962 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10965 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10966 TREE_OPERAND (arg0
, 1));
10969 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10970 if (operand_equal_p (arg0
, arg1
, 0))
10972 tree tem
= fold_strip_sign_ops (arg0
);
10973 if (tem
!= NULL_TREE
)
10975 tem
= fold_convert_loc (loc
, type
, tem
);
10976 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10980 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10981 This is not the same for NaNs or if signed zeros are
10983 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10984 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10985 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10986 && TREE_CODE (arg1
) == COMPLEX_CST
10987 && real_zerop (TREE_REALPART (arg1
)))
10989 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10990 if (real_onep (TREE_IMAGPART (arg1
)))
10992 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10993 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10995 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10996 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10998 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10999 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11000 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11004 /* Optimize z * conj(z) for floating point complex numbers.
11005 Guarded by flag_unsafe_math_optimizations as non-finite
11006 imaginary components don't produce scalar results. */
11007 if (flag_unsafe_math_optimizations
11008 && TREE_CODE (arg0
) == CONJ_EXPR
11009 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11010 return fold_mult_zconjz (loc
, type
, arg1
);
11011 if (flag_unsafe_math_optimizations
11012 && TREE_CODE (arg1
) == CONJ_EXPR
11013 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11014 return fold_mult_zconjz (loc
, type
, arg0
);
11016 if (flag_unsafe_math_optimizations
)
11018 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11019 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11021 /* Optimizations of root(...)*root(...). */
11022 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11025 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11026 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11028 /* Optimize sqrt(x)*sqrt(x) as x. */
11029 if (BUILTIN_SQRT_P (fcode0
)
11030 && operand_equal_p (arg00
, arg10
, 0)
11031 && ! HONOR_SNANS (TYPE_MODE (type
)))
11034 /* Optimize root(x)*root(y) as root(x*y). */
11035 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11036 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11037 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11040 /* Optimize expN(x)*expN(y) as expN(x+y). */
11041 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11043 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11044 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11045 CALL_EXPR_ARG (arg0
, 0),
11046 CALL_EXPR_ARG (arg1
, 0));
11047 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11050 /* Optimizations of pow(...)*pow(...). */
11051 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11052 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11053 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11055 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11056 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11057 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11058 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11060 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11061 if (operand_equal_p (arg01
, arg11
, 0))
11063 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11064 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11066 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11069 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11070 if (operand_equal_p (arg00
, arg10
, 0))
11072 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11073 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11075 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11079 /* Optimize tan(x)*cos(x) as sin(x). */
11080 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11081 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11082 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11083 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11084 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11085 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11086 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11087 CALL_EXPR_ARG (arg1
, 0), 0))
11089 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11091 if (sinfn
!= NULL_TREE
)
11092 return build_call_expr_loc (loc
, sinfn
, 1,
11093 CALL_EXPR_ARG (arg0
, 0));
11096 /* Optimize x*pow(x,c) as pow(x,c+1). */
11097 if (fcode1
== BUILT_IN_POW
11098 || fcode1
== BUILT_IN_POWF
11099 || fcode1
== BUILT_IN_POWL
)
11101 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11102 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11103 if (TREE_CODE (arg11
) == REAL_CST
11104 && !TREE_OVERFLOW (arg11
)
11105 && operand_equal_p (arg0
, arg10
, 0))
11107 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11111 c
= TREE_REAL_CST (arg11
);
11112 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11113 arg
= build_real (type
, c
);
11114 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11118 /* Optimize pow(x,c)*x as pow(x,c+1). */
11119 if (fcode0
== BUILT_IN_POW
11120 || fcode0
== BUILT_IN_POWF
11121 || fcode0
== BUILT_IN_POWL
)
11123 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11124 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11125 if (TREE_CODE (arg01
) == REAL_CST
11126 && !TREE_OVERFLOW (arg01
)
11127 && operand_equal_p (arg1
, arg00
, 0))
11129 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11133 c
= TREE_REAL_CST (arg01
);
11134 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11135 arg
= build_real (type
, c
);
11136 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11140 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11141 if (!in_gimple_form
11143 && operand_equal_p (arg0
, arg1
, 0))
11145 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11149 tree arg
= build_real (type
, dconst2
);
11150 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11159 if (integer_all_onesp (arg1
))
11160 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11161 if (integer_zerop (arg1
))
11162 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11163 if (operand_equal_p (arg0
, arg1
, 0))
11164 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11166 /* ~X | X is -1. */
11167 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11168 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11170 t1
= build_zero_cst (type
);
11171 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11172 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11175 /* X | ~X is -1. */
11176 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11177 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11179 t1
= build_zero_cst (type
);
11180 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11181 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11184 /* Canonicalize (X & C1) | C2. */
11185 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11186 && TREE_CODE (arg1
) == INTEGER_CST
11187 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11189 double_int c1
, c2
, c3
, msk
;
11190 int width
= TYPE_PRECISION (type
), w
;
11191 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11192 c2
= tree_to_double_int (arg1
);
11194 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11195 if ((c1
& c2
) == c1
)
11196 return omit_one_operand_loc (loc
, type
, arg1
,
11197 TREE_OPERAND (arg0
, 0));
11199 msk
= double_int::mask (width
);
11201 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11202 if (msk
.and_not (c1
| c2
).is_zero ())
11203 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11204 TREE_OPERAND (arg0
, 0), arg1
);
11206 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11207 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11208 mode which allows further optimizations. */
11211 c3
= c1
.and_not (c2
);
11212 for (w
= BITS_PER_UNIT
;
11213 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11216 unsigned HOST_WIDE_INT mask
11217 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11218 if (((c1
.low
| c2
.low
) & mask
) == mask
11219 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11221 c3
= double_int::from_uhwi (mask
);
11226 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11227 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11228 TREE_OPERAND (arg0
, 0),
11229 double_int_to_tree (type
,
11234 /* (X & Y) | Y is (X, Y). */
11235 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11236 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11237 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11238 /* (X & Y) | X is (Y, X). */
11239 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11240 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11241 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11242 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11243 /* X | (X & Y) is (Y, X). */
11244 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11245 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11246 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11247 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11248 /* X | (Y & X) is (Y, X). */
11249 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11250 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11251 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11252 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11254 /* (X & ~Y) | (~X & Y) is X ^ Y */
11255 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11256 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11258 tree a0
, a1
, l0
, l1
, n0
, n1
;
11260 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11261 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11263 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11264 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11266 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11267 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11269 if ((operand_equal_p (n0
, a0
, 0)
11270 && operand_equal_p (n1
, a1
, 0))
11271 || (operand_equal_p (n0
, a1
, 0)
11272 && operand_equal_p (n1
, a0
, 0)))
11273 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11276 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11277 if (t1
!= NULL_TREE
)
11280 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11282 This results in more efficient code for machines without a NAND
11283 instruction. Combine will canonicalize to the first form
11284 which will allow use of NAND instructions provided by the
11285 backend if they exist. */
11286 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11287 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11290 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11291 build2 (BIT_AND_EXPR
, type
,
11292 fold_convert_loc (loc
, type
,
11293 TREE_OPERAND (arg0
, 0)),
11294 fold_convert_loc (loc
, type
,
11295 TREE_OPERAND (arg1
, 0))));
11298 /* See if this can be simplified into a rotate first. If that
11299 is unsuccessful continue in the association code. */
11303 if (integer_zerop (arg1
))
11304 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11305 if (integer_all_onesp (arg1
))
11306 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11307 if (operand_equal_p (arg0
, arg1
, 0))
11308 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11310 /* ~X ^ X is -1. */
11311 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11312 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11314 t1
= build_zero_cst (type
);
11315 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11316 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11319 /* X ^ ~X is -1. */
11320 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11321 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11323 t1
= build_zero_cst (type
);
11324 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11325 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11328 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11329 with a constant, and the two constants have no bits in common,
11330 we should treat this as a BIT_IOR_EXPR since this may produce more
11331 simplifications. */
11332 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11333 && TREE_CODE (arg1
) == BIT_AND_EXPR
11334 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11335 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11336 && integer_zerop (const_binop (BIT_AND_EXPR
,
11337 TREE_OPERAND (arg0
, 1),
11338 TREE_OPERAND (arg1
, 1))))
11340 code
= BIT_IOR_EXPR
;
11344 /* (X | Y) ^ X -> Y & ~ X*/
11345 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11346 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11348 tree t2
= TREE_OPERAND (arg0
, 1);
11349 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11351 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11352 fold_convert_loc (loc
, type
, t2
),
11353 fold_convert_loc (loc
, type
, t1
));
11357 /* (Y | X) ^ X -> Y & ~ X*/
11358 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11359 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11361 tree t2
= TREE_OPERAND (arg0
, 0);
11362 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11364 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11365 fold_convert_loc (loc
, type
, t2
),
11366 fold_convert_loc (loc
, type
, t1
));
11370 /* X ^ (X | Y) -> Y & ~ X*/
11371 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11372 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11374 tree t2
= TREE_OPERAND (arg1
, 1);
11375 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11377 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11378 fold_convert_loc (loc
, type
, t2
),
11379 fold_convert_loc (loc
, type
, t1
));
11383 /* X ^ (Y | X) -> Y & ~ X*/
11384 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11385 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11387 tree t2
= TREE_OPERAND (arg1
, 0);
11388 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11390 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11391 fold_convert_loc (loc
, type
, t2
),
11392 fold_convert_loc (loc
, type
, t1
));
11396 /* Convert ~X ^ ~Y to X ^ Y. */
11397 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11398 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11399 return fold_build2_loc (loc
, code
, type
,
11400 fold_convert_loc (loc
, type
,
11401 TREE_OPERAND (arg0
, 0)),
11402 fold_convert_loc (loc
, type
,
11403 TREE_OPERAND (arg1
, 0)));
11405 /* Convert ~X ^ C to X ^ ~C. */
11406 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11407 && TREE_CODE (arg1
) == INTEGER_CST
)
11408 return fold_build2_loc (loc
, code
, type
,
11409 fold_convert_loc (loc
, type
,
11410 TREE_OPERAND (arg0
, 0)),
11411 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11413 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11414 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11415 && integer_onep (TREE_OPERAND (arg0
, 1))
11416 && integer_onep (arg1
))
11417 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11418 build_zero_cst (TREE_TYPE (arg0
)));
11420 /* Fold (X & Y) ^ Y as ~X & Y. */
11421 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11422 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11424 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11425 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11426 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11427 fold_convert_loc (loc
, type
, arg1
));
11429 /* Fold (X & Y) ^ X as ~Y & X. */
11430 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11431 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11432 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11434 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11435 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11436 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11437 fold_convert_loc (loc
, type
, arg1
));
11439 /* Fold X ^ (X & Y) as X & ~Y. */
11440 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11441 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11443 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11444 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11445 fold_convert_loc (loc
, type
, arg0
),
11446 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11448 /* Fold X ^ (Y & X) as ~Y & X. */
11449 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11450 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11451 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11453 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11454 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11455 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11456 fold_convert_loc (loc
, type
, arg0
));
11459 /* See if this can be simplified into a rotate first. If that
11460 is unsuccessful continue in the association code. */
11464 if (integer_all_onesp (arg1
))
11465 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11466 if (integer_zerop (arg1
))
11467 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11468 if (operand_equal_p (arg0
, arg1
, 0))
11469 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11471 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11472 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11473 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11474 || (TREE_CODE (arg0
) == EQ_EXPR
11475 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11476 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11477 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11479 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11480 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11481 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11482 || (TREE_CODE (arg1
) == EQ_EXPR
11483 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11484 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11485 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11487 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11488 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11489 && TREE_CODE (arg1
) == INTEGER_CST
11490 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11492 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11493 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11494 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11495 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11496 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11498 fold_convert_loc (loc
, type
,
11499 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11500 type
, tmp2
, tmp3
));
11503 /* (X | Y) & Y is (X, Y). */
11504 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11505 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11506 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11507 /* (X | Y) & X is (Y, X). */
11508 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11509 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11510 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11511 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11512 /* X & (X | Y) is (Y, X). */
11513 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11514 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11515 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11516 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11517 /* X & (Y | X) is (Y, X). */
11518 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11519 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11520 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11521 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11523 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11524 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11525 && integer_onep (TREE_OPERAND (arg0
, 1))
11526 && integer_onep (arg1
))
11529 tem
= TREE_OPERAND (arg0
, 0);
11530 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11531 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11533 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11534 build_zero_cst (TREE_TYPE (tem
)));
11536 /* Fold ~X & 1 as (X & 1) == 0. */
11537 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11538 && integer_onep (arg1
))
11541 tem
= TREE_OPERAND (arg0
, 0);
11542 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11543 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11545 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11546 build_zero_cst (TREE_TYPE (tem
)));
11548 /* Fold !X & 1 as X == 0. */
11549 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11550 && integer_onep (arg1
))
11552 tem
= TREE_OPERAND (arg0
, 0);
11553 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11554 build_zero_cst (TREE_TYPE (tem
)));
11557 /* Fold (X ^ Y) & Y as ~X & Y. */
11558 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11559 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11561 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11562 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11563 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11564 fold_convert_loc (loc
, type
, arg1
));
11566 /* Fold (X ^ Y) & X as ~Y & X. */
11567 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11568 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11569 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11571 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11572 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11573 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11574 fold_convert_loc (loc
, type
, arg1
));
11576 /* Fold X & (X ^ Y) as X & ~Y. */
11577 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11578 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11580 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11581 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11582 fold_convert_loc (loc
, type
, arg0
),
11583 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11585 /* Fold X & (Y ^ X) as ~Y & X. */
11586 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11587 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11588 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11590 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11591 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11592 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11593 fold_convert_loc (loc
, type
, arg0
));
11596 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11597 multiple of 1 << CST. */
11598 if (TREE_CODE (arg1
) == INTEGER_CST
)
11600 double_int cst1
= tree_to_double_int (arg1
);
11601 double_int ncst1
= (-cst1
).ext(TYPE_PRECISION (TREE_TYPE (arg1
)),
11602 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11603 if ((cst1
& ncst1
) == ncst1
11604 && multiple_of_p (type
, arg0
,
11605 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11606 return fold_convert_loc (loc
, type
, arg0
);
11609 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11611 if (TREE_CODE (arg1
) == INTEGER_CST
11612 && TREE_CODE (arg0
) == MULT_EXPR
11613 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11616 = tree_to_double_int (TREE_OPERAND (arg0
, 1)).trailing_zeros ();
11619 double_int arg1mask
, masked
;
11620 arg1mask
= ~double_int::mask (arg1tz
);
11621 arg1mask
= arg1mask
.ext (TYPE_PRECISION (type
),
11622 TYPE_UNSIGNED (type
));
11623 masked
= arg1mask
& tree_to_double_int (arg1
);
11624 if (masked
.is_zero ())
11625 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11627 else if (masked
!= tree_to_double_int (arg1
))
11628 return fold_build2_loc (loc
, code
, type
, op0
,
11629 double_int_to_tree (type
, masked
));
11633 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11634 ((A & N) + B) & M -> (A + B) & M
11635 Similarly if (N & M) == 0,
11636 ((A | N) + B) & M -> (A + B) & M
11637 and for - instead of + (or unary - instead of +)
11638 and/or ^ instead of |.
11639 If B is constant and (B & M) == 0, fold into A & M. */
11640 if (host_integerp (arg1
, 1))
11642 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11643 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11644 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11645 && (TREE_CODE (arg0
) == PLUS_EXPR
11646 || TREE_CODE (arg0
) == MINUS_EXPR
11647 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11648 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11649 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11653 unsigned HOST_WIDE_INT cst0
;
11655 /* Now we know that arg0 is (C + D) or (C - D) or
11656 -C and arg1 (M) is == (1LL << cst) - 1.
11657 Store C into PMOP[0] and D into PMOP[1]. */
11658 pmop
[0] = TREE_OPERAND (arg0
, 0);
11660 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11662 pmop
[1] = TREE_OPERAND (arg0
, 1);
11666 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11667 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11671 for (; which
>= 0; which
--)
11672 switch (TREE_CODE (pmop
[which
]))
11677 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11680 /* tree_low_cst not used, because we don't care about
11682 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11684 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11689 else if (cst0
!= 0)
11691 /* If C or D is of the form (A & N) where
11692 (N & M) == M, or of the form (A | N) or
11693 (A ^ N) where (N & M) == 0, replace it with A. */
11694 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11697 /* If C or D is a N where (N & M) == 0, it can be
11698 omitted (assumed 0). */
11699 if ((TREE_CODE (arg0
) == PLUS_EXPR
11700 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11701 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11702 pmop
[which
] = NULL
;
11708 /* Only build anything new if we optimized one or both arguments
11710 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11711 || (TREE_CODE (arg0
) != NEGATE_EXPR
11712 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11714 tree utype
= TREE_TYPE (arg0
);
11715 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11717 /* Perform the operations in a type that has defined
11718 overflow behavior. */
11719 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11720 if (pmop
[0] != NULL
)
11721 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11722 if (pmop
[1] != NULL
)
11723 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11726 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11727 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11728 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11730 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11731 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11733 else if (pmop
[0] != NULL
)
11735 else if (pmop
[1] != NULL
)
11738 return build_int_cst (type
, 0);
11740 else if (pmop
[0] == NULL
)
11741 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11743 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11745 /* TEM is now the new binary +, - or unary - replacement. */
11746 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11747 fold_convert_loc (loc
, utype
, arg1
));
11748 return fold_convert_loc (loc
, type
, tem
);
11753 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11754 if (t1
!= NULL_TREE
)
11756 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11757 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11758 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11761 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11763 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11764 && (~TREE_INT_CST_LOW (arg1
)
11765 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11767 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11770 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11772 This results in more efficient code for machines without a NOR
11773 instruction. Combine will canonicalize to the first form
11774 which will allow use of NOR instructions provided by the
11775 backend if they exist. */
11776 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11777 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11779 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11780 build2 (BIT_IOR_EXPR
, type
,
11781 fold_convert_loc (loc
, type
,
11782 TREE_OPERAND (arg0
, 0)),
11783 fold_convert_loc (loc
, type
,
11784 TREE_OPERAND (arg1
, 0))));
11787 /* If arg0 is derived from the address of an object or function, we may
11788 be able to fold this expression using the object or function's
11790 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11792 unsigned HOST_WIDE_INT modulus
, residue
;
11793 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11795 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11796 integer_onep (arg1
));
11798 /* This works because modulus is a power of 2. If this weren't the
11799 case, we'd have to replace it by its greatest power-of-2
11800 divisor: modulus & -modulus. */
11802 return build_int_cst (type
, residue
& low
);
11805 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11806 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11807 if the new mask might be further optimized. */
11808 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11809 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11810 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11811 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11812 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11813 < TYPE_PRECISION (TREE_TYPE (arg0
))
11814 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11815 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11817 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11818 unsigned HOST_WIDE_INT mask
11819 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11820 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11821 tree shift_type
= TREE_TYPE (arg0
);
11823 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11824 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11825 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11826 && TYPE_PRECISION (TREE_TYPE (arg0
))
11827 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11829 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11830 tree arg00
= TREE_OPERAND (arg0
, 0);
11831 /* See if more bits can be proven as zero because of
11833 if (TREE_CODE (arg00
) == NOP_EXPR
11834 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11836 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11837 if (TYPE_PRECISION (inner_type
)
11838 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11839 && TYPE_PRECISION (inner_type
) < prec
)
11841 prec
= TYPE_PRECISION (inner_type
);
11842 /* See if we can shorten the right shift. */
11844 shift_type
= inner_type
;
11847 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11848 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11849 zerobits
<<= prec
- shiftc
;
11850 /* For arithmetic shift if sign bit could be set, zerobits
11851 can contain actually sign bits, so no transformation is
11852 possible, unless MASK masks them all away. In that
11853 case the shift needs to be converted into logical shift. */
11854 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11855 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11857 if ((mask
& zerobits
) == 0)
11858 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11864 /* ((X << 16) & 0xff00) is (X, 0). */
11865 if ((mask
& zerobits
) == mask
)
11866 return omit_one_operand_loc (loc
, type
,
11867 build_int_cst (type
, 0), arg0
);
11869 newmask
= mask
| zerobits
;
11870 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11874 /* Only do the transformation if NEWMASK is some integer
11876 for (prec
= BITS_PER_UNIT
;
11877 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11878 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11880 if (prec
< HOST_BITS_PER_WIDE_INT
11881 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11885 if (shift_type
!= TREE_TYPE (arg0
))
11887 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11888 fold_convert_loc (loc
, shift_type
,
11889 TREE_OPERAND (arg0
, 0)),
11890 TREE_OPERAND (arg0
, 1));
11891 tem
= fold_convert_loc (loc
, type
, tem
);
11895 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11896 if (!tree_int_cst_equal (newmaskt
, arg1
))
11897 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11905 /* Don't touch a floating-point divide by zero unless the mode
11906 of the constant can represent infinity. */
11907 if (TREE_CODE (arg1
) == REAL_CST
11908 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11909 && real_zerop (arg1
))
11912 /* Optimize A / A to 1.0 if we don't care about
11913 NaNs or Infinities. Skip the transformation
11914 for non-real operands. */
11915 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11916 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11917 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11918 && operand_equal_p (arg0
, arg1
, 0))
11920 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11922 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11925 /* The complex version of the above A / A optimization. */
11926 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11927 && operand_equal_p (arg0
, arg1
, 0))
11929 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11930 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11931 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11933 tree r
= build_real (elem_type
, dconst1
);
11934 /* omit_two_operands will call fold_convert for us. */
11935 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11939 /* (-A) / (-B) -> A / B */
11940 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11941 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11942 TREE_OPERAND (arg0
, 0),
11943 negate_expr (arg1
));
11944 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11945 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11946 negate_expr (arg0
),
11947 TREE_OPERAND (arg1
, 0));
11949 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11950 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11951 && real_onep (arg1
))
11952 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11954 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11955 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11956 && real_minus_onep (arg1
))
11957 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11958 negate_expr (arg0
)));
11960 /* If ARG1 is a constant, we can convert this to a multiply by the
11961 reciprocal. This does not have the same rounding properties,
11962 so only do this if -freciprocal-math. We can actually
11963 always safely do it if ARG1 is a power of two, but it's hard to
11964 tell if it is or not in a portable manner. */
11966 && (TREE_CODE (arg1
) == REAL_CST
11967 || (TREE_CODE (arg1
) == COMPLEX_CST
11968 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11969 || (TREE_CODE (arg1
) == VECTOR_CST
11970 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11972 if (flag_reciprocal_math
11973 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11974 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11975 /* Find the reciprocal if optimizing and the result is exact.
11976 TODO: Complex reciprocal not implemented. */
11977 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11979 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11982 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11985 /* Convert A/B/C to A/(B*C). */
11986 if (flag_reciprocal_math
11987 && TREE_CODE (arg0
) == RDIV_EXPR
)
11988 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11989 fold_build2_loc (loc
, MULT_EXPR
, type
,
11990 TREE_OPERAND (arg0
, 1), arg1
));
11992 /* Convert A/(B/C) to (A/B)*C. */
11993 if (flag_reciprocal_math
11994 && TREE_CODE (arg1
) == RDIV_EXPR
)
11995 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11996 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11997 TREE_OPERAND (arg1
, 0)),
11998 TREE_OPERAND (arg1
, 1));
12000 /* Convert C1/(X*C2) into (C1/C2)/X. */
12001 if (flag_reciprocal_math
12002 && TREE_CODE (arg1
) == MULT_EXPR
12003 && TREE_CODE (arg0
) == REAL_CST
12004 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12006 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12007 TREE_OPERAND (arg1
, 1));
12009 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12010 TREE_OPERAND (arg1
, 0));
12013 if (flag_unsafe_math_optimizations
)
12015 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12016 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12018 /* Optimize sin(x)/cos(x) as tan(x). */
12019 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12020 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12021 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12022 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12023 CALL_EXPR_ARG (arg1
, 0), 0))
12025 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12027 if (tanfn
!= NULL_TREE
)
12028 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12031 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12032 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12033 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12034 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12035 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12036 CALL_EXPR_ARG (arg1
, 0), 0))
12038 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12040 if (tanfn
!= NULL_TREE
)
12042 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12043 CALL_EXPR_ARG (arg0
, 0));
12044 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12045 build_real (type
, dconst1
), tmp
);
12049 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12050 NaNs or Infinities. */
12051 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12052 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12053 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12055 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12056 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12058 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12059 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12060 && operand_equal_p (arg00
, arg01
, 0))
12062 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12064 if (cosfn
!= NULL_TREE
)
12065 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12069 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12070 NaNs or Infinities. */
12071 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12072 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12073 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12075 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12076 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12078 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12079 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12080 && operand_equal_p (arg00
, arg01
, 0))
12082 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12084 if (cosfn
!= NULL_TREE
)
12086 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12087 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12088 build_real (type
, dconst1
),
12094 /* Optimize pow(x,c)/x as pow(x,c-1). */
12095 if (fcode0
== BUILT_IN_POW
12096 || fcode0
== BUILT_IN_POWF
12097 || fcode0
== BUILT_IN_POWL
)
12099 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12100 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12101 if (TREE_CODE (arg01
) == REAL_CST
12102 && !TREE_OVERFLOW (arg01
)
12103 && operand_equal_p (arg1
, arg00
, 0))
12105 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12109 c
= TREE_REAL_CST (arg01
);
12110 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12111 arg
= build_real (type
, c
);
12112 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12116 /* Optimize a/root(b/c) into a*root(c/b). */
12117 if (BUILTIN_ROOT_P (fcode1
))
12119 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12121 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12123 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12124 tree b
= TREE_OPERAND (rootarg
, 0);
12125 tree c
= TREE_OPERAND (rootarg
, 1);
12127 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12129 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12130 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12134 /* Optimize x/expN(y) into x*expN(-y). */
12135 if (BUILTIN_EXPONENT_P (fcode1
))
12137 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12138 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12139 arg1
= build_call_expr_loc (loc
,
12141 fold_convert_loc (loc
, type
, arg
));
12142 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12145 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12146 if (fcode1
== BUILT_IN_POW
12147 || fcode1
== BUILT_IN_POWF
12148 || fcode1
== BUILT_IN_POWL
)
12150 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12151 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12152 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12153 tree neg11
= fold_convert_loc (loc
, type
,
12154 negate_expr (arg11
));
12155 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12156 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12161 case TRUNC_DIV_EXPR
:
12162 /* Optimize (X & (-A)) / A where A is a power of 2,
12164 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12165 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12166 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12168 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12169 arg1
, TREE_OPERAND (arg0
, 1));
12170 if (sum
&& integer_zerop (sum
)) {
12171 unsigned long pow2
;
12173 if (TREE_INT_CST_LOW (arg1
))
12174 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12176 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12177 + HOST_BITS_PER_WIDE_INT
;
12179 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12180 TREE_OPERAND (arg0
, 0),
12181 build_int_cst (integer_type_node
, pow2
));
12187 case FLOOR_DIV_EXPR
:
12188 /* Simplify A / (B << N) where A and B are positive and B is
12189 a power of 2, to A >> (N + log2(B)). */
12190 strict_overflow_p
= false;
12191 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12192 && (TYPE_UNSIGNED (type
)
12193 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12195 tree sval
= TREE_OPERAND (arg1
, 0);
12196 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12198 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12199 unsigned long pow2
;
12201 if (TREE_INT_CST_LOW (sval
))
12202 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12204 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12205 + HOST_BITS_PER_WIDE_INT
;
12207 if (strict_overflow_p
)
12208 fold_overflow_warning (("assuming signed overflow does not "
12209 "occur when simplifying A / (B << N)"),
12210 WARN_STRICT_OVERFLOW_MISC
);
12212 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12214 build_int_cst (TREE_TYPE (sh_cnt
),
12216 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12217 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12221 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12222 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12223 if (INTEGRAL_TYPE_P (type
)
12224 && TYPE_UNSIGNED (type
)
12225 && code
== FLOOR_DIV_EXPR
)
12226 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12230 case ROUND_DIV_EXPR
:
12231 case CEIL_DIV_EXPR
:
12232 case EXACT_DIV_EXPR
:
12233 if (integer_onep (arg1
))
12234 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12235 if (integer_zerop (arg1
))
12237 /* X / -1 is -X. */
12238 if (!TYPE_UNSIGNED (type
)
12239 && TREE_CODE (arg1
) == INTEGER_CST
12240 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12241 && TREE_INT_CST_HIGH (arg1
) == -1)
12242 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12244 /* Convert -A / -B to A / B when the type is signed and overflow is
12246 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12247 && TREE_CODE (arg0
) == NEGATE_EXPR
12248 && negate_expr_p (arg1
))
12250 if (INTEGRAL_TYPE_P (type
))
12251 fold_overflow_warning (("assuming signed overflow does not occur "
12252 "when distributing negation across "
12254 WARN_STRICT_OVERFLOW_MISC
);
12255 return fold_build2_loc (loc
, code
, type
,
12256 fold_convert_loc (loc
, type
,
12257 TREE_OPERAND (arg0
, 0)),
12258 fold_convert_loc (loc
, type
,
12259 negate_expr (arg1
)));
12261 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12262 && TREE_CODE (arg1
) == NEGATE_EXPR
12263 && negate_expr_p (arg0
))
12265 if (INTEGRAL_TYPE_P (type
))
12266 fold_overflow_warning (("assuming signed overflow does not occur "
12267 "when distributing negation across "
12269 WARN_STRICT_OVERFLOW_MISC
);
12270 return fold_build2_loc (loc
, code
, type
,
12271 fold_convert_loc (loc
, type
,
12272 negate_expr (arg0
)),
12273 fold_convert_loc (loc
, type
,
12274 TREE_OPERAND (arg1
, 0)));
12277 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12278 operation, EXACT_DIV_EXPR.
12280 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12281 At one time others generated faster code, it's not clear if they do
12282 after the last round to changes to the DIV code in expmed.c. */
12283 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12284 && multiple_of_p (type
, arg0
, arg1
))
12285 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12287 strict_overflow_p
= false;
12288 if (TREE_CODE (arg1
) == INTEGER_CST
12289 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12290 &strict_overflow_p
)))
12292 if (strict_overflow_p
)
12293 fold_overflow_warning (("assuming signed overflow does not occur "
12294 "when simplifying division"),
12295 WARN_STRICT_OVERFLOW_MISC
);
12296 return fold_convert_loc (loc
, type
, tem
);
12301 case CEIL_MOD_EXPR
:
12302 case FLOOR_MOD_EXPR
:
12303 case ROUND_MOD_EXPR
:
12304 case TRUNC_MOD_EXPR
:
12305 /* X % 1 is always zero, but be sure to preserve any side
12307 if (integer_onep (arg1
))
12308 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12310 /* X % 0, return X % 0 unchanged so that we can get the
12311 proper warnings and errors. */
12312 if (integer_zerop (arg1
))
12315 /* 0 % X is always zero, but be sure to preserve any side
12316 effects in X. Place this after checking for X == 0. */
12317 if (integer_zerop (arg0
))
12318 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12320 /* X % -1 is zero. */
12321 if (!TYPE_UNSIGNED (type
)
12322 && TREE_CODE (arg1
) == INTEGER_CST
12323 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12324 && TREE_INT_CST_HIGH (arg1
) == -1)
12325 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12327 /* X % -C is the same as X % C. */
12328 if (code
== TRUNC_MOD_EXPR
12329 && !TYPE_UNSIGNED (type
)
12330 && TREE_CODE (arg1
) == INTEGER_CST
12331 && !TREE_OVERFLOW (arg1
)
12332 && TREE_INT_CST_HIGH (arg1
) < 0
12333 && !TYPE_OVERFLOW_TRAPS (type
)
12334 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12335 && !sign_bit_p (arg1
, arg1
))
12336 return fold_build2_loc (loc
, code
, type
,
12337 fold_convert_loc (loc
, type
, arg0
),
12338 fold_convert_loc (loc
, type
,
12339 negate_expr (arg1
)));
12341 /* X % -Y is the same as X % Y. */
12342 if (code
== TRUNC_MOD_EXPR
12343 && !TYPE_UNSIGNED (type
)
12344 && TREE_CODE (arg1
) == NEGATE_EXPR
12345 && !TYPE_OVERFLOW_TRAPS (type
))
12346 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12347 fold_convert_loc (loc
, type
,
12348 TREE_OPERAND (arg1
, 0)));
12350 strict_overflow_p
= false;
12351 if (TREE_CODE (arg1
) == INTEGER_CST
12352 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12353 &strict_overflow_p
)))
12355 if (strict_overflow_p
)
12356 fold_overflow_warning (("assuming signed overflow does not occur "
12357 "when simplifying modulus"),
12358 WARN_STRICT_OVERFLOW_MISC
);
12359 return fold_convert_loc (loc
, type
, tem
);
12362 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12363 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12364 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12365 && (TYPE_UNSIGNED (type
)
12366 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12369 /* Also optimize A % (C << N) where C is a power of 2,
12370 to A & ((C << N) - 1). */
12371 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12372 c
= TREE_OPERAND (arg1
, 0);
12374 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12377 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12378 build_int_cst (TREE_TYPE (arg1
), 1));
12379 if (strict_overflow_p
)
12380 fold_overflow_warning (("assuming signed overflow does not "
12381 "occur when simplifying "
12382 "X % (power of two)"),
12383 WARN_STRICT_OVERFLOW_MISC
);
12384 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12385 fold_convert_loc (loc
, type
, arg0
),
12386 fold_convert_loc (loc
, type
, mask
));
12394 if (integer_all_onesp (arg0
))
12395 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12399 /* Optimize -1 >> x for arithmetic right shifts. */
12400 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12401 && tree_expr_nonnegative_p (arg1
))
12402 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12403 /* ... fall through ... */
12407 if (integer_zerop (arg1
))
12408 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12409 if (integer_zerop (arg0
))
12410 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12412 /* Since negative shift count is not well-defined,
12413 don't try to compute it in the compiler. */
12414 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12417 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12418 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12419 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12420 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12421 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12423 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12424 + TREE_INT_CST_LOW (arg1
));
12426 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12427 being well defined. */
12428 if (low
>= TYPE_PRECISION (type
))
12430 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12431 low
= low
% TYPE_PRECISION (type
);
12432 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12433 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12434 TREE_OPERAND (arg0
, 0));
12436 low
= TYPE_PRECISION (type
) - 1;
12439 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12440 build_int_cst (type
, low
));
12443 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12444 into x & ((unsigned)-1 >> c) for unsigned types. */
12445 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12446 || (TYPE_UNSIGNED (type
)
12447 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12448 && host_integerp (arg1
, false)
12449 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12450 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12451 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12453 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12454 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12460 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12462 lshift
= build_int_cst (type
, -1);
12463 lshift
= int_const_binop (code
, lshift
, arg1
);
12465 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12469 /* Rewrite an LROTATE_EXPR by a constant into an
12470 RROTATE_EXPR by a new constant. */
12471 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12473 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12474 TYPE_PRECISION (type
));
12475 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12476 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12479 /* If we have a rotate of a bit operation with the rotate count and
12480 the second operand of the bit operation both constant,
12481 permute the two operations. */
12482 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12483 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12484 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12485 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12486 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12487 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12488 fold_build2_loc (loc
, code
, type
,
12489 TREE_OPERAND (arg0
, 0), arg1
),
12490 fold_build2_loc (loc
, code
, type
,
12491 TREE_OPERAND (arg0
, 1), arg1
));
12493 /* Two consecutive rotates adding up to the precision of the
12494 type can be ignored. */
12495 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12496 && TREE_CODE (arg0
) == RROTATE_EXPR
12497 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12498 && TREE_INT_CST_HIGH (arg1
) == 0
12499 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12500 && ((TREE_INT_CST_LOW (arg1
)
12501 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12502 == (unsigned int) TYPE_PRECISION (type
)))
12503 return TREE_OPERAND (arg0
, 0);
12505 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12506 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12507 if the latter can be further optimized. */
12508 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12509 && TREE_CODE (arg0
) == BIT_AND_EXPR
12510 && TREE_CODE (arg1
) == INTEGER_CST
12511 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12513 tree mask
= fold_build2_loc (loc
, code
, type
,
12514 fold_convert_loc (loc
, type
,
12515 TREE_OPERAND (arg0
, 1)),
12517 tree shift
= fold_build2_loc (loc
, code
, type
,
12518 fold_convert_loc (loc
, type
,
12519 TREE_OPERAND (arg0
, 0)),
12521 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12529 if (operand_equal_p (arg0
, arg1
, 0))
12530 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12531 if (INTEGRAL_TYPE_P (type
)
12532 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12533 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12534 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12540 if (operand_equal_p (arg0
, arg1
, 0))
12541 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12542 if (INTEGRAL_TYPE_P (type
)
12543 && TYPE_MAX_VALUE (type
)
12544 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12545 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12546 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12551 case TRUTH_ANDIF_EXPR
:
12552 /* Note that the operands of this must be ints
12553 and their values must be 0 or 1.
12554 ("true" is a fixed value perhaps depending on the language.) */
12555 /* If first arg is constant zero, return it. */
12556 if (integer_zerop (arg0
))
12557 return fold_convert_loc (loc
, type
, arg0
);
12558 case TRUTH_AND_EXPR
:
12559 /* If either arg is constant true, drop it. */
12560 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12561 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12562 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12563 /* Preserve sequence points. */
12564 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12565 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12566 /* If second arg is constant zero, result is zero, but first arg
12567 must be evaluated. */
12568 if (integer_zerop (arg1
))
12569 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12570 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12571 case will be handled here. */
12572 if (integer_zerop (arg0
))
12573 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12575 /* !X && X is always false. */
12576 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12577 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12578 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12579 /* X && !X is always false. */
12580 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12581 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12582 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12584 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12585 means A >= Y && A != MAX, but in this case we know that
12588 if (!TREE_SIDE_EFFECTS (arg0
)
12589 && !TREE_SIDE_EFFECTS (arg1
))
12591 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12592 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12593 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12595 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12596 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12597 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12600 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12606 case TRUTH_ORIF_EXPR
:
12607 /* Note that the operands of this must be ints
12608 and their values must be 0 or true.
12609 ("true" is a fixed value perhaps depending on the language.) */
12610 /* If first arg is constant true, return it. */
12611 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12612 return fold_convert_loc (loc
, type
, arg0
);
12613 case TRUTH_OR_EXPR
:
12614 /* If either arg is constant zero, drop it. */
12615 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12616 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12617 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12618 /* Preserve sequence points. */
12619 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12620 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12621 /* If second arg is constant true, result is true, but we must
12622 evaluate first arg. */
12623 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12624 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12625 /* Likewise for first arg, but note this only occurs here for
12627 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12628 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12630 /* !X || X is always true. */
12631 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12632 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12633 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12634 /* X || !X is always true. */
12635 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12636 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12637 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12639 /* (X && !Y) || (!X && Y) is X ^ Y */
12640 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12641 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12643 tree a0
, a1
, l0
, l1
, n0
, n1
;
12645 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12646 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12648 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12649 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12651 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12652 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12654 if ((operand_equal_p (n0
, a0
, 0)
12655 && operand_equal_p (n1
, a1
, 0))
12656 || (operand_equal_p (n0
, a1
, 0)
12657 && operand_equal_p (n1
, a0
, 0)))
12658 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12661 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12667 case TRUTH_XOR_EXPR
:
12668 /* If the second arg is constant zero, drop it. */
12669 if (integer_zerop (arg1
))
12670 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12671 /* If the second arg is constant true, this is a logical inversion. */
12672 if (integer_onep (arg1
))
12674 /* Only call invert_truthvalue if operand is a truth value. */
12675 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12676 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12678 tem
= invert_truthvalue_loc (loc
, arg0
);
12679 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12681 /* Identical arguments cancel to zero. */
12682 if (operand_equal_p (arg0
, arg1
, 0))
12683 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12685 /* !X ^ X is always true. */
12686 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12687 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12688 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12690 /* X ^ !X is always true. */
12691 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12692 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12693 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12702 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12703 if (tem
!= NULL_TREE
)
12706 /* bool_var != 0 becomes bool_var. */
12707 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12708 && code
== NE_EXPR
)
12709 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12711 /* bool_var == 1 becomes bool_var. */
12712 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12713 && code
== EQ_EXPR
)
12714 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12716 /* bool_var != 1 becomes !bool_var. */
12717 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12718 && code
== NE_EXPR
)
12719 return fold_convert_loc (loc
, type
,
12720 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12721 TREE_TYPE (arg0
), arg0
));
12723 /* bool_var == 0 becomes !bool_var. */
12724 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12725 && code
== EQ_EXPR
)
12726 return fold_convert_loc (loc
, type
,
12727 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12728 TREE_TYPE (arg0
), arg0
));
12730 /* !exp != 0 becomes !exp */
12731 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12732 && code
== NE_EXPR
)
12733 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12735 /* If this is an equality comparison of the address of two non-weak,
12736 unaliased symbols neither of which are extern (since we do not
12737 have access to attributes for externs), then we know the result. */
12738 if (TREE_CODE (arg0
) == ADDR_EXPR
12739 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12740 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12741 && ! lookup_attribute ("alias",
12742 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12743 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12744 && TREE_CODE (arg1
) == ADDR_EXPR
12745 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12746 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12747 && ! lookup_attribute ("alias",
12748 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12749 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12751 /* We know that we're looking at the address of two
12752 non-weak, unaliased, static _DECL nodes.
12754 It is both wasteful and incorrect to call operand_equal_p
12755 to compare the two ADDR_EXPR nodes. It is wasteful in that
12756 all we need to do is test pointer equality for the arguments
12757 to the two ADDR_EXPR nodes. It is incorrect to use
12758 operand_equal_p as that function is NOT equivalent to a
12759 C equality test. It can in fact return false for two
12760 objects which would test as equal using the C equality
12762 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12763 return constant_boolean_node (equal
12764 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12768 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12769 a MINUS_EXPR of a constant, we can convert it into a comparison with
12770 a revised constant as long as no overflow occurs. */
12771 if (TREE_CODE (arg1
) == INTEGER_CST
12772 && (TREE_CODE (arg0
) == PLUS_EXPR
12773 || TREE_CODE (arg0
) == MINUS_EXPR
)
12774 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12775 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12776 ? MINUS_EXPR
: PLUS_EXPR
,
12777 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12779 TREE_OPERAND (arg0
, 1)))
12780 && !TREE_OVERFLOW (tem
))
12781 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12783 /* Similarly for a NEGATE_EXPR. */
12784 if (TREE_CODE (arg0
) == NEGATE_EXPR
12785 && TREE_CODE (arg1
) == INTEGER_CST
12786 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12788 && TREE_CODE (tem
) == INTEGER_CST
12789 && !TREE_OVERFLOW (tem
))
12790 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12792 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12793 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12794 && TREE_CODE (arg1
) == INTEGER_CST
12795 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12796 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12797 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12798 fold_convert_loc (loc
,
12801 TREE_OPERAND (arg0
, 1)));
12803 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12804 if ((TREE_CODE (arg0
) == PLUS_EXPR
12805 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12806 || TREE_CODE (arg0
) == MINUS_EXPR
)
12807 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12810 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12811 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12813 tree val
= TREE_OPERAND (arg0
, 1);
12814 return omit_two_operands_loc (loc
, type
,
12815 fold_build2_loc (loc
, code
, type
,
12817 build_int_cst (TREE_TYPE (val
),
12819 TREE_OPERAND (arg0
, 0), arg1
);
12822 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12823 if (TREE_CODE (arg0
) == MINUS_EXPR
12824 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12825 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12828 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12830 return omit_two_operands_loc (loc
, type
,
12832 ? boolean_true_node
: boolean_false_node
,
12833 TREE_OPERAND (arg0
, 1), arg1
);
12836 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12837 for !=. Don't do this for ordered comparisons due to overflow. */
12838 if (TREE_CODE (arg0
) == MINUS_EXPR
12839 && integer_zerop (arg1
))
12840 return fold_build2_loc (loc
, code
, type
,
12841 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12843 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12844 if (TREE_CODE (arg0
) == ABS_EXPR
12845 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12846 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12848 /* If this is an EQ or NE comparison with zero and ARG0 is
12849 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12850 two operations, but the latter can be done in one less insn
12851 on machines that have only two-operand insns or on which a
12852 constant cannot be the first operand. */
12853 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12854 && integer_zerop (arg1
))
12856 tree arg00
= TREE_OPERAND (arg0
, 0);
12857 tree arg01
= TREE_OPERAND (arg0
, 1);
12858 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12859 && integer_onep (TREE_OPERAND (arg00
, 0)))
12861 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12862 arg01
, TREE_OPERAND (arg00
, 1));
12863 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12864 build_int_cst (TREE_TYPE (arg0
), 1));
12865 return fold_build2_loc (loc
, code
, type
,
12866 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12869 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12870 && integer_onep (TREE_OPERAND (arg01
, 0)))
12872 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12873 arg00
, TREE_OPERAND (arg01
, 1));
12874 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12875 build_int_cst (TREE_TYPE (arg0
), 1));
12876 return fold_build2_loc (loc
, code
, type
,
12877 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12882 /* If this is an NE or EQ comparison of zero against the result of a
12883 signed MOD operation whose second operand is a power of 2, make
12884 the MOD operation unsigned since it is simpler and equivalent. */
12885 if (integer_zerop (arg1
)
12886 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12887 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12888 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12889 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12890 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12891 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12893 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12894 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12895 fold_convert_loc (loc
, newtype
,
12896 TREE_OPERAND (arg0
, 0)),
12897 fold_convert_loc (loc
, newtype
,
12898 TREE_OPERAND (arg0
, 1)));
12900 return fold_build2_loc (loc
, code
, type
, newmod
,
12901 fold_convert_loc (loc
, newtype
, arg1
));
12904 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12905 C1 is a valid shift constant, and C2 is a power of two, i.e.
12907 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12908 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12909 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12911 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12912 && integer_zerop (arg1
))
12914 tree itype
= TREE_TYPE (arg0
);
12915 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12916 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12918 /* Check for a valid shift count. */
12919 if (TREE_INT_CST_HIGH (arg001
) == 0
12920 && TREE_INT_CST_LOW (arg001
) < prec
)
12922 tree arg01
= TREE_OPERAND (arg0
, 1);
12923 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12924 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12925 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12926 can be rewritten as (X & (C2 << C1)) != 0. */
12927 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12929 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12930 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12931 return fold_build2_loc (loc
, code
, type
, tem
,
12932 fold_convert_loc (loc
, itype
, arg1
));
12934 /* Otherwise, for signed (arithmetic) shifts,
12935 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12936 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12937 else if (!TYPE_UNSIGNED (itype
))
12938 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12939 arg000
, build_int_cst (itype
, 0));
12940 /* Otherwise, of unsigned (logical) shifts,
12941 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12942 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12944 return omit_one_operand_loc (loc
, type
,
12945 code
== EQ_EXPR
? integer_one_node
12946 : integer_zero_node
,
12951 /* If we have (A & C) == C where C is a power of 2, convert this into
12952 (A & C) != 0. Similarly for NE_EXPR. */
12953 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12954 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12955 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12956 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12957 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12958 integer_zero_node
));
12960 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12961 bit, then fold the expression into A < 0 or A >= 0. */
12962 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12966 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12967 Similarly for NE_EXPR. */
12968 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12969 && TREE_CODE (arg1
) == INTEGER_CST
12970 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12972 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12973 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12974 TREE_OPERAND (arg0
, 1));
12976 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12977 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12979 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12980 if (integer_nonzerop (dandnotc
))
12981 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12984 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12985 Similarly for NE_EXPR. */
12986 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12987 && TREE_CODE (arg1
) == INTEGER_CST
12988 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12990 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12992 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12993 TREE_OPERAND (arg0
, 1),
12994 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12995 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12996 if (integer_nonzerop (candnotd
))
12997 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13000 /* If this is a comparison of a field, we may be able to simplify it. */
13001 if ((TREE_CODE (arg0
) == COMPONENT_REF
13002 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13003 /* Handle the constant case even without -O
13004 to make sure the warnings are given. */
13005 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13007 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13012 /* Optimize comparisons of strlen vs zero to a compare of the
13013 first character of the string vs zero. To wit,
13014 strlen(ptr) == 0 => *ptr == 0
13015 strlen(ptr) != 0 => *ptr != 0
13016 Other cases should reduce to one of these two (or a constant)
13017 due to the return value of strlen being unsigned. */
13018 if (TREE_CODE (arg0
) == CALL_EXPR
13019 && integer_zerop (arg1
))
13021 tree fndecl
= get_callee_fndecl (arg0
);
13024 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13025 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13026 && call_expr_nargs (arg0
) == 1
13027 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13029 tree iref
= build_fold_indirect_ref_loc (loc
,
13030 CALL_EXPR_ARG (arg0
, 0));
13031 return fold_build2_loc (loc
, code
, type
, iref
,
13032 build_int_cst (TREE_TYPE (iref
), 0));
13036 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13037 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13038 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13039 && integer_zerop (arg1
)
13040 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13042 tree arg00
= TREE_OPERAND (arg0
, 0);
13043 tree arg01
= TREE_OPERAND (arg0
, 1);
13044 tree itype
= TREE_TYPE (arg00
);
13045 if (TREE_INT_CST_HIGH (arg01
) == 0
13046 && TREE_INT_CST_LOW (arg01
)
13047 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
13049 if (TYPE_UNSIGNED (itype
))
13051 itype
= signed_type_for (itype
);
13052 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13054 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13055 type
, arg00
, build_zero_cst (itype
));
13059 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13060 if (integer_zerop (arg1
)
13061 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13062 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13063 TREE_OPERAND (arg0
, 1));
13065 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13066 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13067 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13068 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13069 build_zero_cst (TREE_TYPE (arg0
)));
13070 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13071 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13072 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13073 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13074 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13075 build_zero_cst (TREE_TYPE (arg0
)));
13077 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13078 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13079 && TREE_CODE (arg1
) == INTEGER_CST
13080 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13081 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13082 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13083 TREE_OPERAND (arg0
, 1), arg1
));
13085 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13086 (X & C) == 0 when C is a single bit. */
13087 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13088 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13089 && integer_zerop (arg1
)
13090 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13092 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13093 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13094 TREE_OPERAND (arg0
, 1));
13095 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13097 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13101 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13102 constant C is a power of two, i.e. a single bit. */
13103 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13104 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13105 && integer_zerop (arg1
)
13106 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13107 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13108 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13110 tree arg00
= TREE_OPERAND (arg0
, 0);
13111 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13112 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13115 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13116 when is C is a power of two, i.e. a single bit. */
13117 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13118 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13119 && integer_zerop (arg1
)
13120 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13121 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13122 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13124 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13125 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13126 arg000
, TREE_OPERAND (arg0
, 1));
13127 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13128 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13131 if (integer_zerop (arg1
)
13132 && tree_expr_nonzero_p (arg0
))
13134 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13135 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13138 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13139 if (TREE_CODE (arg0
) == NEGATE_EXPR
13140 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13141 return fold_build2_loc (loc
, code
, type
,
13142 TREE_OPERAND (arg0
, 0),
13143 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13144 TREE_OPERAND (arg1
, 0)));
13146 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13147 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13148 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13150 tree arg00
= TREE_OPERAND (arg0
, 0);
13151 tree arg01
= TREE_OPERAND (arg0
, 1);
13152 tree arg10
= TREE_OPERAND (arg1
, 0);
13153 tree arg11
= TREE_OPERAND (arg1
, 1);
13154 tree itype
= TREE_TYPE (arg0
);
13156 if (operand_equal_p (arg01
, arg11
, 0))
13157 return fold_build2_loc (loc
, code
, type
,
13158 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13159 fold_build2_loc (loc
,
13160 BIT_XOR_EXPR
, itype
,
13163 build_zero_cst (itype
));
13165 if (operand_equal_p (arg01
, arg10
, 0))
13166 return fold_build2_loc (loc
, code
, type
,
13167 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13168 fold_build2_loc (loc
,
13169 BIT_XOR_EXPR
, itype
,
13172 build_zero_cst (itype
));
13174 if (operand_equal_p (arg00
, arg11
, 0))
13175 return fold_build2_loc (loc
, code
, type
,
13176 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13177 fold_build2_loc (loc
,
13178 BIT_XOR_EXPR
, itype
,
13181 build_zero_cst (itype
));
13183 if (operand_equal_p (arg00
, arg10
, 0))
13184 return fold_build2_loc (loc
, code
, type
,
13185 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13186 fold_build2_loc (loc
,
13187 BIT_XOR_EXPR
, itype
,
13190 build_zero_cst (itype
));
13193 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13194 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13196 tree arg00
= TREE_OPERAND (arg0
, 0);
13197 tree arg01
= TREE_OPERAND (arg0
, 1);
13198 tree arg10
= TREE_OPERAND (arg1
, 0);
13199 tree arg11
= TREE_OPERAND (arg1
, 1);
13200 tree itype
= TREE_TYPE (arg0
);
13202 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13203 operand_equal_p guarantees no side-effects so we don't need
13204 to use omit_one_operand on Z. */
13205 if (operand_equal_p (arg01
, arg11
, 0))
13206 return fold_build2_loc (loc
, code
, type
, arg00
,
13207 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13209 if (operand_equal_p (arg01
, arg10
, 0))
13210 return fold_build2_loc (loc
, code
, type
, arg00
,
13211 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13213 if (operand_equal_p (arg00
, arg11
, 0))
13214 return fold_build2_loc (loc
, code
, type
, arg01
,
13215 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13217 if (operand_equal_p (arg00
, arg10
, 0))
13218 return fold_build2_loc (loc
, code
, type
, arg01
,
13219 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13222 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13223 if (TREE_CODE (arg01
) == INTEGER_CST
13224 && TREE_CODE (arg11
) == INTEGER_CST
)
13226 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13227 fold_convert_loc (loc
, itype
, arg11
));
13228 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13229 return fold_build2_loc (loc
, code
, type
, tem
,
13230 fold_convert_loc (loc
, itype
, arg10
));
13234 /* Attempt to simplify equality/inequality comparisons of complex
13235 values. Only lower the comparison if the result is known or
13236 can be simplified to a single scalar comparison. */
13237 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13238 || TREE_CODE (arg0
) == COMPLEX_CST
)
13239 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13240 || TREE_CODE (arg1
) == COMPLEX_CST
))
13242 tree real0
, imag0
, real1
, imag1
;
13245 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13247 real0
= TREE_OPERAND (arg0
, 0);
13248 imag0
= TREE_OPERAND (arg0
, 1);
13252 real0
= TREE_REALPART (arg0
);
13253 imag0
= TREE_IMAGPART (arg0
);
13256 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13258 real1
= TREE_OPERAND (arg1
, 0);
13259 imag1
= TREE_OPERAND (arg1
, 1);
13263 real1
= TREE_REALPART (arg1
);
13264 imag1
= TREE_IMAGPART (arg1
);
13267 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13268 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13270 if (integer_zerop (rcond
))
13272 if (code
== EQ_EXPR
)
13273 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13275 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13279 if (code
== NE_EXPR
)
13280 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13282 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13286 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13287 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13289 if (integer_zerop (icond
))
13291 if (code
== EQ_EXPR
)
13292 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13294 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13298 if (code
== NE_EXPR
)
13299 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13301 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13312 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13313 if (tem
!= NULL_TREE
)
13316 /* Transform comparisons of the form X +- C CMP X. */
13317 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13318 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13319 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13320 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13321 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13322 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13324 tree arg01
= TREE_OPERAND (arg0
, 1);
13325 enum tree_code code0
= TREE_CODE (arg0
);
13328 if (TREE_CODE (arg01
) == REAL_CST
)
13329 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13331 is_positive
= tree_int_cst_sgn (arg01
);
13333 /* (X - c) > X becomes false. */
13334 if (code
== GT_EXPR
13335 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13336 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13338 if (TREE_CODE (arg01
) == INTEGER_CST
13339 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13340 fold_overflow_warning (("assuming signed overflow does not "
13341 "occur when assuming that (X - c) > X "
13342 "is always false"),
13343 WARN_STRICT_OVERFLOW_ALL
);
13344 return constant_boolean_node (0, type
);
13347 /* Likewise (X + c) < X becomes false. */
13348 if (code
== LT_EXPR
13349 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13350 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13352 if (TREE_CODE (arg01
) == INTEGER_CST
13353 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13354 fold_overflow_warning (("assuming signed overflow does not "
13355 "occur when assuming that "
13356 "(X + c) < X is always false"),
13357 WARN_STRICT_OVERFLOW_ALL
);
13358 return constant_boolean_node (0, type
);
13361 /* Convert (X - c) <= X to true. */
13362 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13364 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13365 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13367 if (TREE_CODE (arg01
) == INTEGER_CST
13368 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13369 fold_overflow_warning (("assuming signed overflow does not "
13370 "occur when assuming that "
13371 "(X - c) <= X is always true"),
13372 WARN_STRICT_OVERFLOW_ALL
);
13373 return constant_boolean_node (1, type
);
13376 /* Convert (X + c) >= X to true. */
13377 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13379 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13380 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13382 if (TREE_CODE (arg01
) == INTEGER_CST
13383 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13384 fold_overflow_warning (("assuming signed overflow does not "
13385 "occur when assuming that "
13386 "(X + c) >= X is always true"),
13387 WARN_STRICT_OVERFLOW_ALL
);
13388 return constant_boolean_node (1, type
);
13391 if (TREE_CODE (arg01
) == INTEGER_CST
)
13393 /* Convert X + c > X and X - c < X to true for integers. */
13394 if (code
== GT_EXPR
13395 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13396 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13398 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13399 fold_overflow_warning (("assuming signed overflow does "
13400 "not occur when assuming that "
13401 "(X + c) > X is always true"),
13402 WARN_STRICT_OVERFLOW_ALL
);
13403 return constant_boolean_node (1, type
);
13406 if (code
== LT_EXPR
13407 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13408 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13410 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13411 fold_overflow_warning (("assuming signed overflow does "
13412 "not occur when assuming that "
13413 "(X - c) < X is always true"),
13414 WARN_STRICT_OVERFLOW_ALL
);
13415 return constant_boolean_node (1, type
);
13418 /* Convert X + c <= X and X - c >= X to false for integers. */
13419 if (code
== LE_EXPR
13420 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13421 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13423 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13424 fold_overflow_warning (("assuming signed overflow does "
13425 "not occur when assuming that "
13426 "(X + c) <= X is always false"),
13427 WARN_STRICT_OVERFLOW_ALL
);
13428 return constant_boolean_node (0, type
);
13431 if (code
== GE_EXPR
13432 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13433 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13435 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13436 fold_overflow_warning (("assuming signed overflow does "
13437 "not occur when assuming that "
13438 "(X - c) >= X is always false"),
13439 WARN_STRICT_OVERFLOW_ALL
);
13440 return constant_boolean_node (0, type
);
13445 /* Comparisons with the highest or lowest possible integer of
13446 the specified precision will have known values. */
13448 tree arg1_type
= TREE_TYPE (arg1
);
13449 unsigned int width
= TYPE_PRECISION (arg1_type
);
13451 if (TREE_CODE (arg1
) == INTEGER_CST
13452 && width
<= HOST_BITS_PER_DOUBLE_INT
13453 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13455 HOST_WIDE_INT signed_max_hi
;
13456 unsigned HOST_WIDE_INT signed_max_lo
;
13457 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13459 if (width
<= HOST_BITS_PER_WIDE_INT
)
13461 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13466 if (TYPE_UNSIGNED (arg1_type
))
13468 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13474 max_lo
= signed_max_lo
;
13475 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13481 width
-= HOST_BITS_PER_WIDE_INT
;
13482 signed_max_lo
= -1;
13483 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13488 if (TYPE_UNSIGNED (arg1_type
))
13490 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13495 max_hi
= signed_max_hi
;
13496 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13500 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13501 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13505 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13508 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13511 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13514 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13516 /* The GE_EXPR and LT_EXPR cases above are not normally
13517 reached because of previous transformations. */
13522 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13524 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13528 arg1
= const_binop (PLUS_EXPR
, arg1
,
13529 build_int_cst (TREE_TYPE (arg1
), 1));
13530 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13531 fold_convert_loc (loc
,
13532 TREE_TYPE (arg1
), arg0
),
13535 arg1
= const_binop (PLUS_EXPR
, arg1
,
13536 build_int_cst (TREE_TYPE (arg1
), 1));
13537 return fold_build2_loc (loc
, NE_EXPR
, type
,
13538 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13544 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13546 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13550 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13553 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13556 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13559 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13564 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13566 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13570 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13571 return fold_build2_loc (loc
, NE_EXPR
, type
,
13572 fold_convert_loc (loc
,
13573 TREE_TYPE (arg1
), arg0
),
13576 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13577 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13578 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13585 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13586 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13587 && TYPE_UNSIGNED (arg1_type
)
13588 /* We will flip the signedness of the comparison operator
13589 associated with the mode of arg1, so the sign bit is
13590 specified by this mode. Check that arg1 is the signed
13591 max associated with this sign bit. */
13592 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13593 /* signed_type does not work on pointer types. */
13594 && INTEGRAL_TYPE_P (arg1_type
))
13596 /* The following case also applies to X < signed_max+1
13597 and X >= signed_max+1 because previous transformations. */
13598 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13601 st
= signed_type_for (TREE_TYPE (arg1
));
13602 return fold_build2_loc (loc
,
13603 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13604 type
, fold_convert_loc (loc
, st
, arg0
),
13605 build_int_cst (st
, 0));
13611 /* If we are comparing an ABS_EXPR with a constant, we can
13612 convert all the cases into explicit comparisons, but they may
13613 well not be faster than doing the ABS and one comparison.
13614 But ABS (X) <= C is a range comparison, which becomes a subtraction
13615 and a comparison, and is probably faster. */
13616 if (code
== LE_EXPR
13617 && TREE_CODE (arg1
) == INTEGER_CST
13618 && TREE_CODE (arg0
) == ABS_EXPR
13619 && ! TREE_SIDE_EFFECTS (arg0
)
13620 && (0 != (tem
= negate_expr (arg1
)))
13621 && TREE_CODE (tem
) == INTEGER_CST
13622 && !TREE_OVERFLOW (tem
))
13623 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13624 build2 (GE_EXPR
, type
,
13625 TREE_OPERAND (arg0
, 0), tem
),
13626 build2 (LE_EXPR
, type
,
13627 TREE_OPERAND (arg0
, 0), arg1
));
13629 /* Convert ABS_EXPR<x> >= 0 to true. */
13630 strict_overflow_p
= false;
13631 if (code
== GE_EXPR
13632 && (integer_zerop (arg1
)
13633 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13634 && real_zerop (arg1
)))
13635 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13637 if (strict_overflow_p
)
13638 fold_overflow_warning (("assuming signed overflow does not occur "
13639 "when simplifying comparison of "
13640 "absolute value and zero"),
13641 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13642 return omit_one_operand_loc (loc
, type
,
13643 constant_boolean_node (true, type
),
13647 /* Convert ABS_EXPR<x> < 0 to false. */
13648 strict_overflow_p
= false;
13649 if (code
== LT_EXPR
13650 && (integer_zerop (arg1
) || real_zerop (arg1
))
13651 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13653 if (strict_overflow_p
)
13654 fold_overflow_warning (("assuming signed overflow does not occur "
13655 "when simplifying comparison of "
13656 "absolute value and zero"),
13657 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13658 return omit_one_operand_loc (loc
, type
,
13659 constant_boolean_node (false, type
),
13663 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13664 and similarly for >= into !=. */
13665 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13666 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13667 && TREE_CODE (arg1
) == LSHIFT_EXPR
13668 && integer_onep (TREE_OPERAND (arg1
, 0)))
13669 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13670 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13671 TREE_OPERAND (arg1
, 1)),
13672 build_zero_cst (TREE_TYPE (arg0
)));
13674 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13675 otherwise Y might be >= # of bits in X's type and thus e.g.
13676 (unsigned char) (1 << Y) for Y 15 might be 0.
13677 If the cast is widening, then 1 << Y should have unsigned type,
13678 otherwise if Y is number of bits in the signed shift type minus 1,
13679 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13680 31 might be 0xffffffff80000000. */
13681 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13682 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13683 && CONVERT_EXPR_P (arg1
)
13684 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13685 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13686 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13687 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13688 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13689 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13690 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13692 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13693 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13694 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13695 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13696 build_zero_cst (TREE_TYPE (arg0
)));
13701 case UNORDERED_EXPR
:
13709 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13711 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13712 if (t1
!= NULL_TREE
)
13716 /* If the first operand is NaN, the result is constant. */
13717 if (TREE_CODE (arg0
) == REAL_CST
13718 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13719 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13721 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13722 ? integer_zero_node
13723 : integer_one_node
;
13724 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13727 /* If the second operand is NaN, the result is constant. */
13728 if (TREE_CODE (arg1
) == REAL_CST
13729 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13730 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13732 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13733 ? integer_zero_node
13734 : integer_one_node
;
13735 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13738 /* Simplify unordered comparison of something with itself. */
13739 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13740 && operand_equal_p (arg0
, arg1
, 0))
13741 return constant_boolean_node (1, type
);
13743 if (code
== LTGT_EXPR
13744 && !flag_trapping_math
13745 && operand_equal_p (arg0
, arg1
, 0))
13746 return constant_boolean_node (0, type
);
13748 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13750 tree targ0
= strip_float_extensions (arg0
);
13751 tree targ1
= strip_float_extensions (arg1
);
13752 tree newtype
= TREE_TYPE (targ0
);
13754 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13755 newtype
= TREE_TYPE (targ1
);
13757 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13758 return fold_build2_loc (loc
, code
, type
,
13759 fold_convert_loc (loc
, newtype
, targ0
),
13760 fold_convert_loc (loc
, newtype
, targ1
));
13765 case COMPOUND_EXPR
:
13766 /* When pedantic, a compound expression can be neither an lvalue
13767 nor an integer constant expression. */
13768 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13770 /* Don't let (0, 0) be null pointer constant. */
13771 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13772 : fold_convert_loc (loc
, type
, arg1
);
13773 return pedantic_non_lvalue_loc (loc
, tem
);
13776 if ((TREE_CODE (arg0
) == REAL_CST
13777 && TREE_CODE (arg1
) == REAL_CST
)
13778 || (TREE_CODE (arg0
) == INTEGER_CST
13779 && TREE_CODE (arg1
) == INTEGER_CST
))
13780 return build_complex (type
, arg0
, arg1
);
13781 if (TREE_CODE (arg0
) == REALPART_EXPR
13782 && TREE_CODE (arg1
) == IMAGPART_EXPR
13783 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13784 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13785 TREE_OPERAND (arg1
, 0), 0))
13786 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13787 TREE_OPERAND (arg1
, 0));
13791 /* An ASSERT_EXPR should never be passed to fold_binary. */
13792 gcc_unreachable ();
13794 case VEC_PACK_TRUNC_EXPR
:
13795 case VEC_PACK_FIX_TRUNC_EXPR
:
13797 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13800 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13801 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13802 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13805 elts
= XALLOCAVEC (tree
, nelts
);
13806 if (!vec_cst_ctor_to_array (arg0
, elts
)
13807 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13810 for (i
= 0; i
< nelts
; i
++)
13812 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13813 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13814 TREE_TYPE (type
), elts
[i
]);
13815 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13819 return build_vector (type
, elts
);
13822 case VEC_WIDEN_MULT_LO_EXPR
:
13823 case VEC_WIDEN_MULT_HI_EXPR
:
13824 case VEC_WIDEN_MULT_EVEN_EXPR
:
13825 case VEC_WIDEN_MULT_ODD_EXPR
:
13827 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13828 unsigned int out
, ofs
, scale
;
13831 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13832 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13833 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13836 elts
= XALLOCAVEC (tree
, nelts
* 4);
13837 if (!vec_cst_ctor_to_array (arg0
, elts
)
13838 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13841 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13842 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13843 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13844 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13845 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13846 scale
= 1, ofs
= 0;
13847 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13848 scale
= 1, ofs
= 1;
13850 for (out
= 0; out
< nelts
; out
++)
13852 unsigned int in1
= (out
<< scale
) + ofs
;
13853 unsigned int in2
= in1
+ nelts
* 2;
13856 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13857 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13859 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13861 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13862 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13866 return build_vector (type
, elts
);
13871 } /* switch (code) */
13874 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13875 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13879 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13881 switch (TREE_CODE (*tp
))
13887 *walk_subtrees
= 0;
13889 /* ... fall through ... */
13896 /* Return whether the sub-tree ST contains a label which is accessible from
13897 outside the sub-tree. */
13900 contains_label_p (tree st
)
13903 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13906 /* Fold a ternary expression of code CODE and type TYPE with operands
13907 OP0, OP1, and OP2. Return the folded expression if folding is
13908 successful. Otherwise, return NULL_TREE. */
13911 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13912 tree op0
, tree op1
, tree op2
)
13915 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13916 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13918 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13919 && TREE_CODE_LENGTH (code
) == 3);
13921 /* Strip any conversions that don't change the mode. This is safe
13922 for every expression, except for a comparison expression because
13923 its signedness is derived from its operands. So, in the latter
13924 case, only strip conversions that don't change the signedness.
13926 Note that this is done as an internal manipulation within the
13927 constant folder, in order to find the simplest representation of
13928 the arguments so that their form can be studied. In any cases,
13929 the appropriate type conversions should be put back in the tree
13930 that will get out of the constant folder. */
13951 case COMPONENT_REF
:
13952 if (TREE_CODE (arg0
) == CONSTRUCTOR
13953 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13955 unsigned HOST_WIDE_INT idx
;
13957 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13964 case VEC_COND_EXPR
:
13965 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13966 so all simple results must be passed through pedantic_non_lvalue. */
13967 if (TREE_CODE (arg0
) == INTEGER_CST
)
13969 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13970 tem
= integer_zerop (arg0
) ? op2
: op1
;
13971 /* Only optimize constant conditions when the selected branch
13972 has the same type as the COND_EXPR. This avoids optimizing
13973 away "c ? x : throw", where the throw has a void type.
13974 Avoid throwing away that operand which contains label. */
13975 if ((!TREE_SIDE_EFFECTS (unused_op
)
13976 || !contains_label_p (unused_op
))
13977 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13978 || VOID_TYPE_P (type
)))
13979 return pedantic_non_lvalue_loc (loc
, tem
);
13982 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13984 if (integer_all_onesp (arg0
))
13985 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
13986 if (integer_zerop (arg0
))
13987 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
13989 if ((TREE_CODE (arg1
) == VECTOR_CST
13990 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13991 && (TREE_CODE (arg2
) == VECTOR_CST
13992 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13994 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13995 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13996 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13997 for (i
= 0; i
< nelts
; i
++)
13999 tree val
= VECTOR_CST_ELT (arg0
, i
);
14000 if (integer_all_onesp (val
))
14002 else if (integer_zerop (val
))
14003 sel
[i
] = nelts
+ i
;
14004 else /* Currently unreachable. */
14007 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14008 if (t
!= NULL_TREE
)
14013 if (operand_equal_p (arg1
, op2
, 0))
14014 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14016 /* If we have A op B ? A : C, we may be able to convert this to a
14017 simpler expression, depending on the operation and the values
14018 of B and C. Signed zeros prevent all of these transformations,
14019 for reasons given above each one.
14021 Also try swapping the arguments and inverting the conditional. */
14022 if (COMPARISON_CLASS_P (arg0
)
14023 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14024 arg1
, TREE_OPERAND (arg0
, 1))
14025 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14027 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14032 if (COMPARISON_CLASS_P (arg0
)
14033 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14035 TREE_OPERAND (arg0
, 1))
14036 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14038 location_t loc0
= expr_location_or (arg0
, loc
);
14039 tem
= fold_truth_not_expr (loc0
, arg0
);
14040 if (tem
&& COMPARISON_CLASS_P (tem
))
14042 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14048 /* ??? Fixup the code below for VEC_COND_EXPR. */
14049 if (code
== VEC_COND_EXPR
)
14052 /* If the second operand is simpler than the third, swap them
14053 since that produces better jump optimization results. */
14054 if (truth_value_p (TREE_CODE (arg0
))
14055 && tree_swap_operands_p (op1
, op2
, false))
14057 location_t loc0
= expr_location_or (arg0
, loc
);
14058 /* See if this can be inverted. If it can't, possibly because
14059 it was a floating-point inequality comparison, don't do
14061 tem
= fold_truth_not_expr (loc0
, arg0
);
14063 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14066 /* Convert A ? 1 : 0 to simply A. */
14067 if (integer_onep (op1
)
14068 && integer_zerop (op2
)
14069 /* If we try to convert OP0 to our type, the
14070 call to fold will try to move the conversion inside
14071 a COND, which will recurse. In that case, the COND_EXPR
14072 is probably the best choice, so leave it alone. */
14073 && type
== TREE_TYPE (arg0
))
14074 return pedantic_non_lvalue_loc (loc
, arg0
);
14076 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14077 over COND_EXPR in cases such as floating point comparisons. */
14078 if (integer_zerop (op1
)
14079 && integer_onep (op2
)
14080 && truth_value_p (TREE_CODE (arg0
)))
14081 return pedantic_non_lvalue_loc (loc
,
14082 fold_convert_loc (loc
, type
,
14083 invert_truthvalue_loc (loc
,
14086 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14087 if (TREE_CODE (arg0
) == LT_EXPR
14088 && integer_zerop (TREE_OPERAND (arg0
, 1))
14089 && integer_zerop (op2
)
14090 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14092 /* sign_bit_p only checks ARG1 bits within A's precision.
14093 If <sign bit of A> has wider type than A, bits outside
14094 of A's precision in <sign bit of A> need to be checked.
14095 If they are all 0, this optimization needs to be done
14096 in unsigned A's type, if they are all 1 in signed A's type,
14097 otherwise this can't be done. */
14098 if (TYPE_PRECISION (TREE_TYPE (tem
))
14099 < TYPE_PRECISION (TREE_TYPE (arg1
))
14100 && TYPE_PRECISION (TREE_TYPE (tem
))
14101 < TYPE_PRECISION (type
))
14103 unsigned HOST_WIDE_INT mask_lo
;
14104 HOST_WIDE_INT mask_hi
;
14105 int inner_width
, outer_width
;
14108 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14109 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14110 if (outer_width
> TYPE_PRECISION (type
))
14111 outer_width
= TYPE_PRECISION (type
);
14113 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14115 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
14116 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14122 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
14123 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14125 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14127 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
14128 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14132 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
14133 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14135 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14136 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14138 tem_type
= signed_type_for (TREE_TYPE (tem
));
14139 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14141 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14142 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14144 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14145 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14153 fold_convert_loc (loc
, type
,
14154 fold_build2_loc (loc
, BIT_AND_EXPR
,
14155 TREE_TYPE (tem
), tem
,
14156 fold_convert_loc (loc
,
14161 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14162 already handled above. */
14163 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14164 && integer_onep (TREE_OPERAND (arg0
, 1))
14165 && integer_zerop (op2
)
14166 && integer_pow2p (arg1
))
14168 tree tem
= TREE_OPERAND (arg0
, 0);
14170 if (TREE_CODE (tem
) == RSHIFT_EXPR
14171 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14172 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14173 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14174 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14175 TREE_OPERAND (tem
, 0), arg1
);
14178 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14179 is probably obsolete because the first operand should be a
14180 truth value (that's why we have the two cases above), but let's
14181 leave it in until we can confirm this for all front-ends. */
14182 if (integer_zerop (op2
)
14183 && TREE_CODE (arg0
) == NE_EXPR
14184 && integer_zerop (TREE_OPERAND (arg0
, 1))
14185 && integer_pow2p (arg1
)
14186 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14187 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14188 arg1
, OEP_ONLY_CONST
))
14189 return pedantic_non_lvalue_loc (loc
,
14190 fold_convert_loc (loc
, type
,
14191 TREE_OPERAND (arg0
, 0)));
14193 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14194 if (integer_zerop (op2
)
14195 && truth_value_p (TREE_CODE (arg0
))
14196 && truth_value_p (TREE_CODE (arg1
)))
14197 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14198 fold_convert_loc (loc
, type
, arg0
),
14201 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14202 if (integer_onep (op2
)
14203 && truth_value_p (TREE_CODE (arg0
))
14204 && truth_value_p (TREE_CODE (arg1
)))
14206 location_t loc0
= expr_location_or (arg0
, loc
);
14207 /* Only perform transformation if ARG0 is easily inverted. */
14208 tem
= fold_truth_not_expr (loc0
, arg0
);
14210 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14211 fold_convert_loc (loc
, type
, tem
),
14215 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14216 if (integer_zerop (arg1
)
14217 && truth_value_p (TREE_CODE (arg0
))
14218 && truth_value_p (TREE_CODE (op2
)))
14220 location_t loc0
= expr_location_or (arg0
, loc
);
14221 /* Only perform transformation if ARG0 is easily inverted. */
14222 tem
= fold_truth_not_expr (loc0
, arg0
);
14224 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14225 fold_convert_loc (loc
, type
, tem
),
14229 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14230 if (integer_onep (arg1
)
14231 && truth_value_p (TREE_CODE (arg0
))
14232 && truth_value_p (TREE_CODE (op2
)))
14233 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14234 fold_convert_loc (loc
, type
, arg0
),
14240 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14241 of fold_ternary on them. */
14242 gcc_unreachable ();
14244 case BIT_FIELD_REF
:
14245 if ((TREE_CODE (arg0
) == VECTOR_CST
14246 || (TREE_CODE (arg0
) == CONSTRUCTOR
14247 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14248 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14249 || (TREE_CODE (type
) == VECTOR_TYPE
14250 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14252 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14253 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14254 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14255 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14258 && (idx
% width
) == 0
14259 && (n
% width
) == 0
14260 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14265 if (TREE_CODE (arg0
) == VECTOR_CST
)
14268 return VECTOR_CST_ELT (arg0
, idx
);
14270 tree
*vals
= XALLOCAVEC (tree
, n
);
14271 for (unsigned i
= 0; i
< n
; ++i
)
14272 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14273 return build_vector (type
, vals
);
14276 /* Constructor elements can be subvectors. */
14277 unsigned HOST_WIDE_INT k
= 1;
14278 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14280 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14281 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14282 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14285 /* We keep an exact subset of the constructor elements. */
14286 if ((idx
% k
) == 0 && (n
% k
) == 0)
14288 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14289 return build_constructor (type
, NULL
);
14294 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14295 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14296 return build_zero_cst (type
);
14299 vec
<constructor_elt
, va_gc
> *vals
;
14300 vec_alloc (vals
, n
);
14301 for (unsigned i
= 0;
14302 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14304 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14306 (arg0
, idx
+ i
)->value
);
14307 return build_constructor (type
, vals
);
14309 /* The bitfield references a single constructor element. */
14310 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14312 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14313 return build_zero_cst (type
);
14315 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14317 return fold_build3_loc (loc
, code
, type
,
14318 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14319 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14324 /* A bit-field-ref that referenced the full argument can be stripped. */
14325 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14326 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14327 && integer_zerop (op2
))
14328 return fold_convert_loc (loc
, type
, arg0
);
14330 /* On constants we can use native encode/interpret to constant
14331 fold (nearly) all BIT_FIELD_REFs. */
14332 if (CONSTANT_CLASS_P (arg0
)
14333 && can_native_interpret_type_p (type
)
14334 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14335 /* This limitation should not be necessary, we just need to
14336 round this up to mode size. */
14337 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14338 /* Need bit-shifting of the buffer to relax the following. */
14339 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14341 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14342 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14343 unsigned HOST_WIDE_INT clen
;
14344 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14345 /* ??? We cannot tell native_encode_expr to start at
14346 some random byte only. So limit us to a reasonable amount
14350 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14351 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14353 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14355 tree v
= native_interpret_expr (type
,
14356 b
+ bitpos
/ BITS_PER_UNIT
,
14357 bitsize
/ BITS_PER_UNIT
);
14367 /* For integers we can decompose the FMA if possible. */
14368 if (TREE_CODE (arg0
) == INTEGER_CST
14369 && TREE_CODE (arg1
) == INTEGER_CST
)
14370 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14371 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14372 if (integer_zerop (arg2
))
14373 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14375 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14377 case VEC_PERM_EXPR
:
14378 if (TREE_CODE (arg2
) == VECTOR_CST
)
14380 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14381 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14383 bool need_mask_canon
= false;
14384 bool all_in_vec0
= true;
14385 bool all_in_vec1
= true;
14386 bool maybe_identity
= true;
14387 bool single_arg
= (op0
== op1
);
14388 bool changed
= false;
14390 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14391 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14392 for (i
= 0; i
< nelts
; i
++)
14394 tree val
= VECTOR_CST_ELT (arg2
, i
);
14395 if (TREE_CODE (val
) != INTEGER_CST
)
14398 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14399 if (TREE_INT_CST_HIGH (val
)
14400 || ((unsigned HOST_WIDE_INT
)
14401 TREE_INT_CST_LOW (val
) != sel
[i
]))
14402 need_mask_canon
= true;
14404 if (sel
[i
] < nelts
)
14405 all_in_vec1
= false;
14407 all_in_vec0
= false;
14409 if ((sel
[i
] & (nelts
-1)) != i
)
14410 maybe_identity
= false;
14413 if (maybe_identity
)
14423 else if (all_in_vec1
)
14426 for (i
= 0; i
< nelts
; i
++)
14428 need_mask_canon
= true;
14431 if ((TREE_CODE (op0
) == VECTOR_CST
14432 || TREE_CODE (op0
) == CONSTRUCTOR
)
14433 && (TREE_CODE (op1
) == VECTOR_CST
14434 || TREE_CODE (op1
) == CONSTRUCTOR
))
14436 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14437 if (t
!= NULL_TREE
)
14441 if (op0
== op1
&& !single_arg
)
14444 if (need_mask_canon
&& arg2
== op2
)
14446 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14447 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14448 for (i
= 0; i
< nelts
; i
++)
14449 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14450 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14455 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14461 } /* switch (code) */
14464 /* Perform constant folding and related simplification of EXPR.
14465 The related simplifications include x*1 => x, x*0 => 0, etc.,
14466 and application of the associative law.
14467 NOP_EXPR conversions may be removed freely (as long as we
14468 are careful not to change the type of the overall expression).
14469 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14470 but we can constant-fold them if they have constant operands. */
14472 #ifdef ENABLE_FOLD_CHECKING
14473 # define fold(x) fold_1 (x)
14474 static tree
fold_1 (tree
);
14480 const tree t
= expr
;
14481 enum tree_code code
= TREE_CODE (t
);
14482 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14484 location_t loc
= EXPR_LOCATION (expr
);
14486 /* Return right away if a constant. */
14487 if (kind
== tcc_constant
)
14490 /* CALL_EXPR-like objects with variable numbers of operands are
14491 treated specially. */
14492 if (kind
== tcc_vl_exp
)
14494 if (code
== CALL_EXPR
)
14496 tem
= fold_call_expr (loc
, expr
, false);
14497 return tem
? tem
: expr
;
14502 if (IS_EXPR_CODE_CLASS (kind
))
14504 tree type
= TREE_TYPE (t
);
14505 tree op0
, op1
, op2
;
14507 switch (TREE_CODE_LENGTH (code
))
14510 op0
= TREE_OPERAND (t
, 0);
14511 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14512 return tem
? tem
: expr
;
14514 op0
= TREE_OPERAND (t
, 0);
14515 op1
= TREE_OPERAND (t
, 1);
14516 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14517 return tem
? tem
: expr
;
14519 op0
= TREE_OPERAND (t
, 0);
14520 op1
= TREE_OPERAND (t
, 1);
14521 op2
= TREE_OPERAND (t
, 2);
14522 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14523 return tem
? tem
: expr
;
14533 tree op0
= TREE_OPERAND (t
, 0);
14534 tree op1
= TREE_OPERAND (t
, 1);
14536 if (TREE_CODE (op1
) == INTEGER_CST
14537 && TREE_CODE (op0
) == CONSTRUCTOR
14538 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14540 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14541 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14542 unsigned HOST_WIDE_INT begin
= 0;
14544 /* Find a matching index by means of a binary search. */
14545 while (begin
!= end
)
14547 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14548 tree index
= (*elts
)[middle
].index
;
14550 if (TREE_CODE (index
) == INTEGER_CST
14551 && tree_int_cst_lt (index
, op1
))
14552 begin
= middle
+ 1;
14553 else if (TREE_CODE (index
) == INTEGER_CST
14554 && tree_int_cst_lt (op1
, index
))
14556 else if (TREE_CODE (index
) == RANGE_EXPR
14557 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14558 begin
= middle
+ 1;
14559 else if (TREE_CODE (index
) == RANGE_EXPR
14560 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14563 return (*elts
)[middle
].value
;
14570 /* Return a VECTOR_CST if possible. */
14573 tree type
= TREE_TYPE (t
);
14574 if (TREE_CODE (type
) != VECTOR_TYPE
)
14577 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14578 unsigned HOST_WIDE_INT idx
, pos
= 0;
14581 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14583 if (!CONSTANT_CLASS_P (value
))
14585 if (TREE_CODE (value
) == VECTOR_CST
)
14587 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14588 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14591 vec
[pos
++] = value
;
14593 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14594 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14596 return build_vector (type
, vec
);
14600 return fold (DECL_INITIAL (t
));
14604 } /* switch (code) */
14607 #ifdef ENABLE_FOLD_CHECKING
14610 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14611 hash_table
<pointer_hash
<tree_node
> >);
14612 static void fold_check_failed (const_tree
, const_tree
);
14613 void print_fold_checksum (const_tree
);
14615 /* When --enable-checking=fold, compute a digest of expr before
14616 and after actual fold call to see if fold did not accidentally
14617 change original expr. */
14623 struct md5_ctx ctx
;
14624 unsigned char checksum_before
[16], checksum_after
[16];
14625 hash_table
<pointer_hash
<tree_node
> > ht
;
14628 md5_init_ctx (&ctx
);
14629 fold_checksum_tree (expr
, &ctx
, ht
);
14630 md5_finish_ctx (&ctx
, checksum_before
);
14633 ret
= fold_1 (expr
);
14635 md5_init_ctx (&ctx
);
14636 fold_checksum_tree (expr
, &ctx
, ht
);
14637 md5_finish_ctx (&ctx
, checksum_after
);
14640 if (memcmp (checksum_before
, checksum_after
, 16))
14641 fold_check_failed (expr
, ret
);
14647 print_fold_checksum (const_tree expr
)
14649 struct md5_ctx ctx
;
14650 unsigned char checksum
[16], cnt
;
14651 hash_table
<pointer_hash
<tree_node
> > ht
;
14654 md5_init_ctx (&ctx
);
14655 fold_checksum_tree (expr
, &ctx
, ht
);
14656 md5_finish_ctx (&ctx
, checksum
);
14658 for (cnt
= 0; cnt
< 16; ++cnt
)
14659 fprintf (stderr
, "%02x", checksum
[cnt
]);
14660 putc ('\n', stderr
);
14664 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14666 internal_error ("fold check: original tree changed by fold");
14670 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14671 hash_table
<pointer_hash
<tree_node
> > ht
)
14674 enum tree_code code
;
14675 union tree_node buf
;
14681 slot
= ht
.find_slot (expr
, INSERT
);
14684 *slot
= CONST_CAST_TREE (expr
);
14685 code
= TREE_CODE (expr
);
14686 if (TREE_CODE_CLASS (code
) == tcc_declaration
14687 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14689 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14690 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14691 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14692 expr
= (tree
) &buf
;
14694 else if (TREE_CODE_CLASS (code
) == tcc_type
14695 && (TYPE_POINTER_TO (expr
)
14696 || TYPE_REFERENCE_TO (expr
)
14697 || TYPE_CACHED_VALUES_P (expr
)
14698 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14699 || TYPE_NEXT_VARIANT (expr
)))
14701 /* Allow these fields to be modified. */
14703 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14704 expr
= tmp
= (tree
) &buf
;
14705 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14706 TYPE_POINTER_TO (tmp
) = NULL
;
14707 TYPE_REFERENCE_TO (tmp
) = NULL
;
14708 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14709 if (TYPE_CACHED_VALUES_P (tmp
))
14711 TYPE_CACHED_VALUES_P (tmp
) = 0;
14712 TYPE_CACHED_VALUES (tmp
) = NULL
;
14715 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14716 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14717 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14718 if (TREE_CODE_CLASS (code
) != tcc_type
14719 && TREE_CODE_CLASS (code
) != tcc_declaration
14720 && code
!= TREE_LIST
14721 && code
!= SSA_NAME
14722 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14723 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14724 switch (TREE_CODE_CLASS (code
))
14730 md5_process_bytes (TREE_STRING_POINTER (expr
),
14731 TREE_STRING_LENGTH (expr
), ctx
);
14734 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14735 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14738 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14739 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14745 case tcc_exceptional
:
14749 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14750 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14751 expr
= TREE_CHAIN (expr
);
14752 goto recursive_label
;
14755 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14756 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14762 case tcc_expression
:
14763 case tcc_reference
:
14764 case tcc_comparison
:
14767 case tcc_statement
:
14769 len
= TREE_OPERAND_LENGTH (expr
);
14770 for (i
= 0; i
< len
; ++i
)
14771 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14773 case tcc_declaration
:
14774 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14775 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14776 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14778 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14779 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14780 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14781 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14782 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14784 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14785 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14787 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14789 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14790 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14791 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14795 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14796 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14797 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14798 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14799 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14800 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14801 if (INTEGRAL_TYPE_P (expr
)
14802 || SCALAR_FLOAT_TYPE_P (expr
))
14804 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14805 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14807 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14808 if (TREE_CODE (expr
) == RECORD_TYPE
14809 || TREE_CODE (expr
) == UNION_TYPE
14810 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14811 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14812 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14819 /* Helper function for outputting the checksum of a tree T. When
14820 debugging with gdb, you can "define mynext" to be "next" followed
14821 by "call debug_fold_checksum (op0)", then just trace down till the
14824 DEBUG_FUNCTION
void
14825 debug_fold_checksum (const_tree t
)
14828 unsigned char checksum
[16];
14829 struct md5_ctx ctx
;
14830 hash_table
<pointer_hash
<tree_node
> > ht
;
14833 md5_init_ctx (&ctx
);
14834 fold_checksum_tree (t
, &ctx
, ht
);
14835 md5_finish_ctx (&ctx
, checksum
);
14838 for (i
= 0; i
< 16; i
++)
14839 fprintf (stderr
, "%d ", checksum
[i
]);
14841 fprintf (stderr
, "\n");
14846 /* Fold a unary tree expression with code CODE of type TYPE with an
14847 operand OP0. LOC is the location of the resulting expression.
14848 Return a folded expression if successful. Otherwise, return a tree
14849 expression with code CODE of type TYPE with an operand OP0. */
14852 fold_build1_stat_loc (location_t loc
,
14853 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14856 #ifdef ENABLE_FOLD_CHECKING
14857 unsigned char checksum_before
[16], checksum_after
[16];
14858 struct md5_ctx ctx
;
14859 hash_table
<pointer_hash
<tree_node
> > ht
;
14862 md5_init_ctx (&ctx
);
14863 fold_checksum_tree (op0
, &ctx
, ht
);
14864 md5_finish_ctx (&ctx
, checksum_before
);
14868 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14870 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14872 #ifdef ENABLE_FOLD_CHECKING
14873 md5_init_ctx (&ctx
);
14874 fold_checksum_tree (op0
, &ctx
, ht
);
14875 md5_finish_ctx (&ctx
, checksum_after
);
14878 if (memcmp (checksum_before
, checksum_after
, 16))
14879 fold_check_failed (op0
, tem
);
14884 /* Fold a binary tree expression with code CODE of type TYPE with
14885 operands OP0 and OP1. LOC is the location of the resulting
14886 expression. Return a folded expression if successful. Otherwise,
14887 return a tree expression with code CODE of type TYPE with operands
14891 fold_build2_stat_loc (location_t loc
,
14892 enum tree_code code
, tree type
, tree op0
, tree op1
14896 #ifdef ENABLE_FOLD_CHECKING
14897 unsigned char checksum_before_op0
[16],
14898 checksum_before_op1
[16],
14899 checksum_after_op0
[16],
14900 checksum_after_op1
[16];
14901 struct md5_ctx ctx
;
14902 hash_table
<pointer_hash
<tree_node
> > ht
;
14905 md5_init_ctx (&ctx
);
14906 fold_checksum_tree (op0
, &ctx
, ht
);
14907 md5_finish_ctx (&ctx
, checksum_before_op0
);
14910 md5_init_ctx (&ctx
);
14911 fold_checksum_tree (op1
, &ctx
, ht
);
14912 md5_finish_ctx (&ctx
, checksum_before_op1
);
14916 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14918 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14920 #ifdef ENABLE_FOLD_CHECKING
14921 md5_init_ctx (&ctx
);
14922 fold_checksum_tree (op0
, &ctx
, ht
);
14923 md5_finish_ctx (&ctx
, checksum_after_op0
);
14926 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14927 fold_check_failed (op0
, tem
);
14929 md5_init_ctx (&ctx
);
14930 fold_checksum_tree (op1
, &ctx
, ht
);
14931 md5_finish_ctx (&ctx
, checksum_after_op1
);
14934 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14935 fold_check_failed (op1
, tem
);
14940 /* Fold a ternary tree expression with code CODE of type TYPE with
14941 operands OP0, OP1, and OP2. Return a folded expression if
14942 successful. Otherwise, return a tree expression with code CODE of
14943 type TYPE with operands OP0, OP1, and OP2. */
14946 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14947 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14950 #ifdef ENABLE_FOLD_CHECKING
14951 unsigned char checksum_before_op0
[16],
14952 checksum_before_op1
[16],
14953 checksum_before_op2
[16],
14954 checksum_after_op0
[16],
14955 checksum_after_op1
[16],
14956 checksum_after_op2
[16];
14957 struct md5_ctx ctx
;
14958 hash_table
<pointer_hash
<tree_node
> > ht
;
14961 md5_init_ctx (&ctx
);
14962 fold_checksum_tree (op0
, &ctx
, ht
);
14963 md5_finish_ctx (&ctx
, checksum_before_op0
);
14966 md5_init_ctx (&ctx
);
14967 fold_checksum_tree (op1
, &ctx
, ht
);
14968 md5_finish_ctx (&ctx
, checksum_before_op1
);
14971 md5_init_ctx (&ctx
);
14972 fold_checksum_tree (op2
, &ctx
, ht
);
14973 md5_finish_ctx (&ctx
, checksum_before_op2
);
14977 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14978 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14980 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14982 #ifdef ENABLE_FOLD_CHECKING
14983 md5_init_ctx (&ctx
);
14984 fold_checksum_tree (op0
, &ctx
, ht
);
14985 md5_finish_ctx (&ctx
, checksum_after_op0
);
14988 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14989 fold_check_failed (op0
, tem
);
14991 md5_init_ctx (&ctx
);
14992 fold_checksum_tree (op1
, &ctx
, ht
);
14993 md5_finish_ctx (&ctx
, checksum_after_op1
);
14996 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14997 fold_check_failed (op1
, tem
);
14999 md5_init_ctx (&ctx
);
15000 fold_checksum_tree (op2
, &ctx
, ht
);
15001 md5_finish_ctx (&ctx
, checksum_after_op2
);
15004 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15005 fold_check_failed (op2
, tem
);
15010 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15011 arguments in ARGARRAY, and a null static chain.
15012 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15013 of type TYPE from the given operands as constructed by build_call_array. */
15016 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15017 int nargs
, tree
*argarray
)
15020 #ifdef ENABLE_FOLD_CHECKING
15021 unsigned char checksum_before_fn
[16],
15022 checksum_before_arglist
[16],
15023 checksum_after_fn
[16],
15024 checksum_after_arglist
[16];
15025 struct md5_ctx ctx
;
15026 hash_table
<pointer_hash
<tree_node
> > ht
;
15030 md5_init_ctx (&ctx
);
15031 fold_checksum_tree (fn
, &ctx
, ht
);
15032 md5_finish_ctx (&ctx
, checksum_before_fn
);
15035 md5_init_ctx (&ctx
);
15036 for (i
= 0; i
< nargs
; i
++)
15037 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15038 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15042 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15044 #ifdef ENABLE_FOLD_CHECKING
15045 md5_init_ctx (&ctx
);
15046 fold_checksum_tree (fn
, &ctx
, ht
);
15047 md5_finish_ctx (&ctx
, checksum_after_fn
);
15050 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15051 fold_check_failed (fn
, tem
);
15053 md5_init_ctx (&ctx
);
15054 for (i
= 0; i
< nargs
; i
++)
15055 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15056 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15059 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15060 fold_check_failed (NULL_TREE
, tem
);
15065 /* Perform constant folding and related simplification of initializer
15066 expression EXPR. These behave identically to "fold_buildN" but ignore
15067 potential run-time traps and exceptions that fold must preserve. */
15069 #define START_FOLD_INIT \
15070 int saved_signaling_nans = flag_signaling_nans;\
15071 int saved_trapping_math = flag_trapping_math;\
15072 int saved_rounding_math = flag_rounding_math;\
15073 int saved_trapv = flag_trapv;\
15074 int saved_folding_initializer = folding_initializer;\
15075 flag_signaling_nans = 0;\
15076 flag_trapping_math = 0;\
15077 flag_rounding_math = 0;\
15079 folding_initializer = 1;
15081 #define END_FOLD_INIT \
15082 flag_signaling_nans = saved_signaling_nans;\
15083 flag_trapping_math = saved_trapping_math;\
15084 flag_rounding_math = saved_rounding_math;\
15085 flag_trapv = saved_trapv;\
15086 folding_initializer = saved_folding_initializer;
15089 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15090 tree type
, tree op
)
15095 result
= fold_build1_loc (loc
, code
, type
, op
);
15102 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15103 tree type
, tree op0
, tree op1
)
15108 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15115 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
15116 tree type
, tree op0
, tree op1
, tree op2
)
15121 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
15128 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15129 int nargs
, tree
*argarray
)
15134 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15140 #undef START_FOLD_INIT
15141 #undef END_FOLD_INIT
15143 /* Determine if first argument is a multiple of second argument. Return 0 if
15144 it is not, or we cannot easily determined it to be.
15146 An example of the sort of thing we care about (at this point; this routine
15147 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15148 fold cases do now) is discovering that
15150 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15156 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15158 This code also handles discovering that
15160 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15162 is a multiple of 8 so we don't have to worry about dealing with a
15163 possible remainder.
15165 Note that we *look* inside a SAVE_EXPR only to determine how it was
15166 calculated; it is not safe for fold to do much of anything else with the
15167 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15168 at run time. For example, the latter example above *cannot* be implemented
15169 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15170 evaluation time of the original SAVE_EXPR is not necessarily the same at
15171 the time the new expression is evaluated. The only optimization of this
15172 sort that would be valid is changing
15174 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15178 SAVE_EXPR (I) * SAVE_EXPR (J)
15180 (where the same SAVE_EXPR (J) is used in the original and the
15181 transformed version). */
15184 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15186 if (operand_equal_p (top
, bottom
, 0))
15189 if (TREE_CODE (type
) != INTEGER_TYPE
)
15192 switch (TREE_CODE (top
))
15195 /* Bitwise and provides a power of two multiple. If the mask is
15196 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15197 if (!integer_pow2p (bottom
))
15202 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15203 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15207 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15208 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15211 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15215 op1
= TREE_OPERAND (top
, 1);
15216 /* const_binop may not detect overflow correctly,
15217 so check for it explicitly here. */
15218 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15219 > TREE_INT_CST_LOW (op1
)
15220 && TREE_INT_CST_HIGH (op1
) == 0
15221 && 0 != (t1
= fold_convert (type
,
15222 const_binop (LSHIFT_EXPR
,
15225 && !TREE_OVERFLOW (t1
))
15226 return multiple_of_p (type
, t1
, bottom
);
15231 /* Can't handle conversions from non-integral or wider integral type. */
15232 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15233 || (TYPE_PRECISION (type
)
15234 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15237 /* .. fall through ... */
15240 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15243 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15244 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15247 if (TREE_CODE (bottom
) != INTEGER_CST
15248 || integer_zerop (bottom
)
15249 || (TYPE_UNSIGNED (type
)
15250 && (tree_int_cst_sgn (top
) < 0
15251 || tree_int_cst_sgn (bottom
) < 0)))
15253 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15261 /* Return true if CODE or TYPE is known to be non-negative. */
15264 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15266 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15267 && truth_value_p (code
))
15268 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15269 have a signed:1 type (where the value is -1 and 0). */
15274 /* Return true if (CODE OP0) is known to be non-negative. If the return
15275 value is based on the assumption that signed overflow is undefined,
15276 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15277 *STRICT_OVERFLOW_P. */
15280 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15281 bool *strict_overflow_p
)
15283 if (TYPE_UNSIGNED (type
))
15289 /* We can't return 1 if flag_wrapv is set because
15290 ABS_EXPR<INT_MIN> = INT_MIN. */
15291 if (!INTEGRAL_TYPE_P (type
))
15293 if (TYPE_OVERFLOW_UNDEFINED (type
))
15295 *strict_overflow_p
= true;
15300 case NON_LVALUE_EXPR
:
15302 case FIX_TRUNC_EXPR
:
15303 return tree_expr_nonnegative_warnv_p (op0
,
15304 strict_overflow_p
);
15308 tree inner_type
= TREE_TYPE (op0
);
15309 tree outer_type
= type
;
15311 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15313 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15314 return tree_expr_nonnegative_warnv_p (op0
,
15315 strict_overflow_p
);
15316 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15318 if (TYPE_UNSIGNED (inner_type
))
15320 return tree_expr_nonnegative_warnv_p (op0
,
15321 strict_overflow_p
);
15324 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
15326 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15327 return tree_expr_nonnegative_warnv_p (op0
,
15328 strict_overflow_p
);
15329 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15330 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15331 && TYPE_UNSIGNED (inner_type
);
15337 return tree_simple_nonnegative_warnv_p (code
, type
);
15340 /* We don't know sign of `t', so be conservative and return false. */
15344 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15345 value is based on the assumption that signed overflow is undefined,
15346 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15347 *STRICT_OVERFLOW_P. */
15350 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15351 tree op1
, bool *strict_overflow_p
)
15353 if (TYPE_UNSIGNED (type
))
15358 case POINTER_PLUS_EXPR
:
15360 if (FLOAT_TYPE_P (type
))
15361 return (tree_expr_nonnegative_warnv_p (op0
,
15363 && tree_expr_nonnegative_warnv_p (op1
,
15364 strict_overflow_p
));
15366 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15367 both unsigned and at least 2 bits shorter than the result. */
15368 if (TREE_CODE (type
) == INTEGER_TYPE
15369 && TREE_CODE (op0
) == NOP_EXPR
15370 && TREE_CODE (op1
) == NOP_EXPR
)
15372 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15373 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15374 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15375 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15377 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15378 TYPE_PRECISION (inner2
)) + 1;
15379 return prec
< TYPE_PRECISION (type
);
15385 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15387 /* x * x is always non-negative for floating point x
15388 or without overflow. */
15389 if (operand_equal_p (op0
, op1
, 0)
15390 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15391 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15393 if (TYPE_OVERFLOW_UNDEFINED (type
))
15394 *strict_overflow_p
= true;
15399 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15400 both unsigned and their total bits is shorter than the result. */
15401 if (TREE_CODE (type
) == INTEGER_TYPE
15402 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15403 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15405 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15406 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15408 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15409 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15412 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15413 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15415 if (TREE_CODE (op0
) == INTEGER_CST
)
15416 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15418 if (TREE_CODE (op1
) == INTEGER_CST
)
15419 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15421 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15422 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15424 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15425 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15426 : TYPE_PRECISION (inner0
);
15428 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15429 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15430 : TYPE_PRECISION (inner1
);
15432 return precision0
+ precision1
< TYPE_PRECISION (type
);
15439 return (tree_expr_nonnegative_warnv_p (op0
,
15441 || tree_expr_nonnegative_warnv_p (op1
,
15442 strict_overflow_p
));
15448 case TRUNC_DIV_EXPR
:
15449 case CEIL_DIV_EXPR
:
15450 case FLOOR_DIV_EXPR
:
15451 case ROUND_DIV_EXPR
:
15452 return (tree_expr_nonnegative_warnv_p (op0
,
15454 && tree_expr_nonnegative_warnv_p (op1
,
15455 strict_overflow_p
));
15457 case TRUNC_MOD_EXPR
:
15458 case CEIL_MOD_EXPR
:
15459 case FLOOR_MOD_EXPR
:
15460 case ROUND_MOD_EXPR
:
15461 return tree_expr_nonnegative_warnv_p (op0
,
15462 strict_overflow_p
);
15464 return tree_simple_nonnegative_warnv_p (code
, type
);
15467 /* We don't know sign of `t', so be conservative and return false. */
15471 /* Return true if T is known to be non-negative. If the return
15472 value is based on the assumption that signed overflow is undefined,
15473 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15474 *STRICT_OVERFLOW_P. */
15477 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15479 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15482 switch (TREE_CODE (t
))
15485 return tree_int_cst_sgn (t
) >= 0;
15488 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15491 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15494 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15496 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15497 strict_overflow_p
));
15499 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15502 /* We don't know sign of `t', so be conservative and return false. */
15506 /* Return true if T is known to be non-negative. If the return
15507 value is based on the assumption that signed overflow is undefined,
15508 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15509 *STRICT_OVERFLOW_P. */
15512 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15513 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15515 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15516 switch (DECL_FUNCTION_CODE (fndecl
))
15518 CASE_FLT_FN (BUILT_IN_ACOS
):
15519 CASE_FLT_FN (BUILT_IN_ACOSH
):
15520 CASE_FLT_FN (BUILT_IN_CABS
):
15521 CASE_FLT_FN (BUILT_IN_COSH
):
15522 CASE_FLT_FN (BUILT_IN_ERFC
):
15523 CASE_FLT_FN (BUILT_IN_EXP
):
15524 CASE_FLT_FN (BUILT_IN_EXP10
):
15525 CASE_FLT_FN (BUILT_IN_EXP2
):
15526 CASE_FLT_FN (BUILT_IN_FABS
):
15527 CASE_FLT_FN (BUILT_IN_FDIM
):
15528 CASE_FLT_FN (BUILT_IN_HYPOT
):
15529 CASE_FLT_FN (BUILT_IN_POW10
):
15530 CASE_INT_FN (BUILT_IN_FFS
):
15531 CASE_INT_FN (BUILT_IN_PARITY
):
15532 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15533 case BUILT_IN_BSWAP32
:
15534 case BUILT_IN_BSWAP64
:
15538 CASE_FLT_FN (BUILT_IN_SQRT
):
15539 /* sqrt(-0.0) is -0.0. */
15540 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15542 return tree_expr_nonnegative_warnv_p (arg0
,
15543 strict_overflow_p
);
15545 CASE_FLT_FN (BUILT_IN_ASINH
):
15546 CASE_FLT_FN (BUILT_IN_ATAN
):
15547 CASE_FLT_FN (BUILT_IN_ATANH
):
15548 CASE_FLT_FN (BUILT_IN_CBRT
):
15549 CASE_FLT_FN (BUILT_IN_CEIL
):
15550 CASE_FLT_FN (BUILT_IN_ERF
):
15551 CASE_FLT_FN (BUILT_IN_EXPM1
):
15552 CASE_FLT_FN (BUILT_IN_FLOOR
):
15553 CASE_FLT_FN (BUILT_IN_FMOD
):
15554 CASE_FLT_FN (BUILT_IN_FREXP
):
15555 CASE_FLT_FN (BUILT_IN_ICEIL
):
15556 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15557 CASE_FLT_FN (BUILT_IN_IRINT
):
15558 CASE_FLT_FN (BUILT_IN_IROUND
):
15559 CASE_FLT_FN (BUILT_IN_LCEIL
):
15560 CASE_FLT_FN (BUILT_IN_LDEXP
):
15561 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15562 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15563 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15564 CASE_FLT_FN (BUILT_IN_LLRINT
):
15565 CASE_FLT_FN (BUILT_IN_LLROUND
):
15566 CASE_FLT_FN (BUILT_IN_LRINT
):
15567 CASE_FLT_FN (BUILT_IN_LROUND
):
15568 CASE_FLT_FN (BUILT_IN_MODF
):
15569 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15570 CASE_FLT_FN (BUILT_IN_RINT
):
15571 CASE_FLT_FN (BUILT_IN_ROUND
):
15572 CASE_FLT_FN (BUILT_IN_SCALB
):
15573 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15574 CASE_FLT_FN (BUILT_IN_SCALBN
):
15575 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15576 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15577 CASE_FLT_FN (BUILT_IN_SINH
):
15578 CASE_FLT_FN (BUILT_IN_TANH
):
15579 CASE_FLT_FN (BUILT_IN_TRUNC
):
15580 /* True if the 1st argument is nonnegative. */
15581 return tree_expr_nonnegative_warnv_p (arg0
,
15582 strict_overflow_p
);
15584 CASE_FLT_FN (BUILT_IN_FMAX
):
15585 /* True if the 1st OR 2nd arguments are nonnegative. */
15586 return (tree_expr_nonnegative_warnv_p (arg0
,
15588 || (tree_expr_nonnegative_warnv_p (arg1
,
15589 strict_overflow_p
)));
15591 CASE_FLT_FN (BUILT_IN_FMIN
):
15592 /* True if the 1st AND 2nd arguments are nonnegative. */
15593 return (tree_expr_nonnegative_warnv_p (arg0
,
15595 && (tree_expr_nonnegative_warnv_p (arg1
,
15596 strict_overflow_p
)));
15598 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15599 /* True if the 2nd argument is nonnegative. */
15600 return tree_expr_nonnegative_warnv_p (arg1
,
15601 strict_overflow_p
);
15603 CASE_FLT_FN (BUILT_IN_POWI
):
15604 /* True if the 1st argument is nonnegative or the second
15605 argument is an even integer. */
15606 if (TREE_CODE (arg1
) == INTEGER_CST
15607 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15609 return tree_expr_nonnegative_warnv_p (arg0
,
15610 strict_overflow_p
);
15612 CASE_FLT_FN (BUILT_IN_POW
):
15613 /* True if the 1st argument is nonnegative or the second
15614 argument is an even integer valued real. */
15615 if (TREE_CODE (arg1
) == REAL_CST
)
15620 c
= TREE_REAL_CST (arg1
);
15621 n
= real_to_integer (&c
);
15624 REAL_VALUE_TYPE cint
;
15625 real_from_integer (&cint
, VOIDmode
, n
,
15626 n
< 0 ? -1 : 0, 0);
15627 if (real_identical (&c
, &cint
))
15631 return tree_expr_nonnegative_warnv_p (arg0
,
15632 strict_overflow_p
);
15637 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15641 /* Return true if T is known to be non-negative. If the return
15642 value is based on the assumption that signed overflow is undefined,
15643 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15644 *STRICT_OVERFLOW_P. */
15647 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15649 enum tree_code code
= TREE_CODE (t
);
15650 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15657 tree temp
= TARGET_EXPR_SLOT (t
);
15658 t
= TARGET_EXPR_INITIAL (t
);
15660 /* If the initializer is non-void, then it's a normal expression
15661 that will be assigned to the slot. */
15662 if (!VOID_TYPE_P (t
))
15663 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15665 /* Otherwise, the initializer sets the slot in some way. One common
15666 way is an assignment statement at the end of the initializer. */
15669 if (TREE_CODE (t
) == BIND_EXPR
)
15670 t
= expr_last (BIND_EXPR_BODY (t
));
15671 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15672 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15673 t
= expr_last (TREE_OPERAND (t
, 0));
15674 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15679 if (TREE_CODE (t
) == MODIFY_EXPR
15680 && TREE_OPERAND (t
, 0) == temp
)
15681 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15682 strict_overflow_p
);
15689 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15690 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15692 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15693 get_callee_fndecl (t
),
15696 strict_overflow_p
);
15698 case COMPOUND_EXPR
:
15700 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15701 strict_overflow_p
);
15703 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15704 strict_overflow_p
);
15706 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15707 strict_overflow_p
);
15710 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15714 /* We don't know sign of `t', so be conservative and return false. */
15718 /* Return true if T is known to be non-negative. If the return
15719 value is based on the assumption that signed overflow is undefined,
15720 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15721 *STRICT_OVERFLOW_P. */
15724 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15726 enum tree_code code
;
15727 if (t
== error_mark_node
)
15730 code
= TREE_CODE (t
);
15731 switch (TREE_CODE_CLASS (code
))
15734 case tcc_comparison
:
15735 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15737 TREE_OPERAND (t
, 0),
15738 TREE_OPERAND (t
, 1),
15739 strict_overflow_p
);
15742 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15744 TREE_OPERAND (t
, 0),
15745 strict_overflow_p
);
15748 case tcc_declaration
:
15749 case tcc_reference
:
15750 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15758 case TRUTH_AND_EXPR
:
15759 case TRUTH_OR_EXPR
:
15760 case TRUTH_XOR_EXPR
:
15761 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15763 TREE_OPERAND (t
, 0),
15764 TREE_OPERAND (t
, 1),
15765 strict_overflow_p
);
15766 case TRUTH_NOT_EXPR
:
15767 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15769 TREE_OPERAND (t
, 0),
15770 strict_overflow_p
);
15777 case WITH_SIZE_EXPR
:
15779 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15782 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15786 /* Return true if `t' is known to be non-negative. Handle warnings
15787 about undefined signed overflow. */
15790 tree_expr_nonnegative_p (tree t
)
15792 bool ret
, strict_overflow_p
;
15794 strict_overflow_p
= false;
15795 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15796 if (strict_overflow_p
)
15797 fold_overflow_warning (("assuming signed overflow does not occur when "
15798 "determining that expression is always "
15800 WARN_STRICT_OVERFLOW_MISC
);
15805 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15806 For floating point we further ensure that T is not denormal.
15807 Similar logic is present in nonzero_address in rtlanal.h.
15809 If the return value is based on the assumption that signed overflow
15810 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15811 change *STRICT_OVERFLOW_P. */
15814 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15815 bool *strict_overflow_p
)
15820 return tree_expr_nonzero_warnv_p (op0
,
15821 strict_overflow_p
);
15825 tree inner_type
= TREE_TYPE (op0
);
15826 tree outer_type
= type
;
15828 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15829 && tree_expr_nonzero_warnv_p (op0
,
15830 strict_overflow_p
));
15834 case NON_LVALUE_EXPR
:
15835 return tree_expr_nonzero_warnv_p (op0
,
15836 strict_overflow_p
);
15845 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15846 For floating point we further ensure that T is not denormal.
15847 Similar logic is present in nonzero_address in rtlanal.h.
15849 If the return value is based on the assumption that signed overflow
15850 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15851 change *STRICT_OVERFLOW_P. */
15854 tree_binary_nonzero_warnv_p (enum tree_code code
,
15857 tree op1
, bool *strict_overflow_p
)
15859 bool sub_strict_overflow_p
;
15862 case POINTER_PLUS_EXPR
:
15864 if (TYPE_OVERFLOW_UNDEFINED (type
))
15866 /* With the presence of negative values it is hard
15867 to say something. */
15868 sub_strict_overflow_p
= false;
15869 if (!tree_expr_nonnegative_warnv_p (op0
,
15870 &sub_strict_overflow_p
)
15871 || !tree_expr_nonnegative_warnv_p (op1
,
15872 &sub_strict_overflow_p
))
15874 /* One of operands must be positive and the other non-negative. */
15875 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15876 overflows, on a twos-complement machine the sum of two
15877 nonnegative numbers can never be zero. */
15878 return (tree_expr_nonzero_warnv_p (op0
,
15880 || tree_expr_nonzero_warnv_p (op1
,
15881 strict_overflow_p
));
15886 if (TYPE_OVERFLOW_UNDEFINED (type
))
15888 if (tree_expr_nonzero_warnv_p (op0
,
15890 && tree_expr_nonzero_warnv_p (op1
,
15891 strict_overflow_p
))
15893 *strict_overflow_p
= true;
15900 sub_strict_overflow_p
= false;
15901 if (tree_expr_nonzero_warnv_p (op0
,
15902 &sub_strict_overflow_p
)
15903 && tree_expr_nonzero_warnv_p (op1
,
15904 &sub_strict_overflow_p
))
15906 if (sub_strict_overflow_p
)
15907 *strict_overflow_p
= true;
15912 sub_strict_overflow_p
= false;
15913 if (tree_expr_nonzero_warnv_p (op0
,
15914 &sub_strict_overflow_p
))
15916 if (sub_strict_overflow_p
)
15917 *strict_overflow_p
= true;
15919 /* When both operands are nonzero, then MAX must be too. */
15920 if (tree_expr_nonzero_warnv_p (op1
,
15921 strict_overflow_p
))
15924 /* MAX where operand 0 is positive is positive. */
15925 return tree_expr_nonnegative_warnv_p (op0
,
15926 strict_overflow_p
);
15928 /* MAX where operand 1 is positive is positive. */
15929 else if (tree_expr_nonzero_warnv_p (op1
,
15930 &sub_strict_overflow_p
)
15931 && tree_expr_nonnegative_warnv_p (op1
,
15932 &sub_strict_overflow_p
))
15934 if (sub_strict_overflow_p
)
15935 *strict_overflow_p
= true;
15941 return (tree_expr_nonzero_warnv_p (op1
,
15943 || tree_expr_nonzero_warnv_p (op0
,
15944 strict_overflow_p
));
15953 /* Return true when T is an address and is known to be nonzero.
15954 For floating point we further ensure that T is not denormal.
15955 Similar logic is present in nonzero_address in rtlanal.h.
15957 If the return value is based on the assumption that signed overflow
15958 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15959 change *STRICT_OVERFLOW_P. */
15962 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15964 bool sub_strict_overflow_p
;
15965 switch (TREE_CODE (t
))
15968 return !integer_zerop (t
);
15972 tree base
= TREE_OPERAND (t
, 0);
15973 if (!DECL_P (base
))
15974 base
= get_base_address (base
);
15979 /* Weak declarations may link to NULL. Other things may also be NULL
15980 so protect with -fdelete-null-pointer-checks; but not variables
15981 allocated on the stack. */
15983 && (flag_delete_null_pointer_checks
15984 || (DECL_CONTEXT (base
)
15985 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15986 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15987 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15989 /* Constants are never weak. */
15990 if (CONSTANT_CLASS_P (base
))
15997 sub_strict_overflow_p
= false;
15998 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15999 &sub_strict_overflow_p
)
16000 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16001 &sub_strict_overflow_p
))
16003 if (sub_strict_overflow_p
)
16004 *strict_overflow_p
= true;
16015 /* Return true when T is an address and is known to be nonzero.
16016 For floating point we further ensure that T is not denormal.
16017 Similar logic is present in nonzero_address in rtlanal.h.
16019 If the return value is based on the assumption that signed overflow
16020 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16021 change *STRICT_OVERFLOW_P. */
16024 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16026 tree type
= TREE_TYPE (t
);
16027 enum tree_code code
;
16029 /* Doing something useful for floating point would need more work. */
16030 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
16033 code
= TREE_CODE (t
);
16034 switch (TREE_CODE_CLASS (code
))
16037 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
16038 strict_overflow_p
);
16040 case tcc_comparison
:
16041 return tree_binary_nonzero_warnv_p (code
, type
,
16042 TREE_OPERAND (t
, 0),
16043 TREE_OPERAND (t
, 1),
16044 strict_overflow_p
);
16046 case tcc_declaration
:
16047 case tcc_reference
:
16048 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
16056 case TRUTH_NOT_EXPR
:
16057 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
16058 strict_overflow_p
);
16060 case TRUTH_AND_EXPR
:
16061 case TRUTH_OR_EXPR
:
16062 case TRUTH_XOR_EXPR
:
16063 return tree_binary_nonzero_warnv_p (code
, type
,
16064 TREE_OPERAND (t
, 0),
16065 TREE_OPERAND (t
, 1),
16066 strict_overflow_p
);
16073 case WITH_SIZE_EXPR
:
16075 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
16077 case COMPOUND_EXPR
:
16080 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16081 strict_overflow_p
);
16084 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
16085 strict_overflow_p
);
16088 return alloca_call_p (t
);
16096 /* Return true when T is an address and is known to be nonzero.
16097 Handle warnings about undefined signed overflow. */
16100 tree_expr_nonzero_p (tree t
)
16102 bool ret
, strict_overflow_p
;
16104 strict_overflow_p
= false;
16105 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
16106 if (strict_overflow_p
)
16107 fold_overflow_warning (("assuming signed overflow does not occur when "
16108 "determining that expression is always "
16110 WARN_STRICT_OVERFLOW_MISC
);
16114 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16115 attempt to fold the expression to a constant without modifying TYPE,
16118 If the expression could be simplified to a constant, then return
16119 the constant. If the expression would not be simplified to a
16120 constant, then return NULL_TREE. */
16123 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16125 tree tem
= fold_binary (code
, type
, op0
, op1
);
16126 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16129 /* Given the components of a unary expression CODE, TYPE and OP0,
16130 attempt to fold the expression to a constant without modifying
16133 If the expression could be simplified to a constant, then return
16134 the constant. If the expression would not be simplified to a
16135 constant, then return NULL_TREE. */
16138 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16140 tree tem
= fold_unary (code
, type
, op0
);
16141 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16144 /* If EXP represents referencing an element in a constant string
16145 (either via pointer arithmetic or array indexing), return the
16146 tree representing the value accessed, otherwise return NULL. */
16149 fold_read_from_constant_string (tree exp
)
16151 if ((TREE_CODE (exp
) == INDIRECT_REF
16152 || TREE_CODE (exp
) == ARRAY_REF
)
16153 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16155 tree exp1
= TREE_OPERAND (exp
, 0);
16158 location_t loc
= EXPR_LOCATION (exp
);
16160 if (TREE_CODE (exp
) == INDIRECT_REF
)
16161 string
= string_constant (exp1
, &index
);
16164 tree low_bound
= array_ref_low_bound (exp
);
16165 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16167 /* Optimize the special-case of a zero lower bound.
16169 We convert the low_bound to sizetype to avoid some problems
16170 with constant folding. (E.g. suppose the lower bound is 1,
16171 and its mode is QI. Without the conversion,l (ARRAY
16172 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16173 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16174 if (! integer_zerop (low_bound
))
16175 index
= size_diffop_loc (loc
, index
,
16176 fold_convert_loc (loc
, sizetype
, low_bound
));
16182 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16183 && TREE_CODE (string
) == STRING_CST
16184 && TREE_CODE (index
) == INTEGER_CST
16185 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16186 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16188 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16189 return build_int_cst_type (TREE_TYPE (exp
),
16190 (TREE_STRING_POINTER (string
)
16191 [TREE_INT_CST_LOW (index
)]));
16196 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16197 an integer constant, real, or fixed-point constant.
16199 TYPE is the type of the result. */
16202 fold_negate_const (tree arg0
, tree type
)
16204 tree t
= NULL_TREE
;
16206 switch (TREE_CODE (arg0
))
16210 double_int val
= tree_to_double_int (arg0
);
16212 val
= val
.neg_with_overflow (&overflow
);
16213 t
= force_fit_type_double (type
, val
, 1,
16214 (overflow
| TREE_OVERFLOW (arg0
))
16215 && !TYPE_UNSIGNED (type
));
16220 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16225 FIXED_VALUE_TYPE f
;
16226 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16227 &(TREE_FIXED_CST (arg0
)), NULL
,
16228 TYPE_SATURATING (type
));
16229 t
= build_fixed (type
, f
);
16230 /* Propagate overflow flags. */
16231 if (overflow_p
| TREE_OVERFLOW (arg0
))
16232 TREE_OVERFLOW (t
) = 1;
16237 gcc_unreachable ();
16243 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16244 an integer constant or real constant.
16246 TYPE is the type of the result. */
16249 fold_abs_const (tree arg0
, tree type
)
16251 tree t
= NULL_TREE
;
16253 switch (TREE_CODE (arg0
))
16257 double_int val
= tree_to_double_int (arg0
);
16259 /* If the value is unsigned or non-negative, then the absolute value
16260 is the same as the ordinary value. */
16261 if (TYPE_UNSIGNED (type
)
16262 || !val
.is_negative ())
16265 /* If the value is negative, then the absolute value is
16270 val
= val
.neg_with_overflow (&overflow
);
16271 t
= force_fit_type_double (type
, val
, -1,
16272 overflow
| TREE_OVERFLOW (arg0
));
16278 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16279 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16285 gcc_unreachable ();
16291 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16292 constant. TYPE is the type of the result. */
16295 fold_not_const (const_tree arg0
, tree type
)
16299 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16301 val
= ~tree_to_double_int (arg0
);
16302 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16305 /* Given CODE, a relational operator, the target type, TYPE and two
16306 constant operands OP0 and OP1, return the result of the
16307 relational operation. If the result is not a compile time
16308 constant, then return NULL_TREE. */
16311 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16313 int result
, invert
;
16315 /* From here on, the only cases we handle are when the result is
16316 known to be a constant. */
16318 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16320 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16321 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16323 /* Handle the cases where either operand is a NaN. */
16324 if (real_isnan (c0
) || real_isnan (c1
))
16334 case UNORDERED_EXPR
:
16348 if (flag_trapping_math
)
16354 gcc_unreachable ();
16357 return constant_boolean_node (result
, type
);
16360 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16363 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16365 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16366 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16367 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16370 /* Handle equality/inequality of complex constants. */
16371 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16373 tree rcond
= fold_relational_const (code
, type
,
16374 TREE_REALPART (op0
),
16375 TREE_REALPART (op1
));
16376 tree icond
= fold_relational_const (code
, type
,
16377 TREE_IMAGPART (op0
),
16378 TREE_IMAGPART (op1
));
16379 if (code
== EQ_EXPR
)
16380 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16381 else if (code
== NE_EXPR
)
16382 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16387 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16389 unsigned count
= VECTOR_CST_NELTS (op0
);
16390 tree
*elts
= XALLOCAVEC (tree
, count
);
16391 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16392 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16394 for (unsigned i
= 0; i
< count
; i
++)
16396 tree elem_type
= TREE_TYPE (type
);
16397 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16398 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16400 tree tem
= fold_relational_const (code
, elem_type
,
16403 if (tem
== NULL_TREE
)
16406 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16409 return build_vector (type
, elts
);
16412 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16414 To compute GT, swap the arguments and do LT.
16415 To compute GE, do LT and invert the result.
16416 To compute LE, swap the arguments, do LT and invert the result.
16417 To compute NE, do EQ and invert the result.
16419 Therefore, the code below must handle only EQ and LT. */
16421 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16426 code
= swap_tree_comparison (code
);
16429 /* Note that it is safe to invert for real values here because we
16430 have already handled the one case that it matters. */
16433 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16436 code
= invert_tree_comparison (code
, false);
16439 /* Compute a result for LT or EQ if args permit;
16440 Otherwise return T. */
16441 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16443 if (code
== EQ_EXPR
)
16444 result
= tree_int_cst_equal (op0
, op1
);
16445 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16446 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16448 result
= INT_CST_LT (op0
, op1
);
16455 return constant_boolean_node (result
, type
);
16458 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16459 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16463 fold_build_cleanup_point_expr (tree type
, tree expr
)
16465 /* If the expression does not have side effects then we don't have to wrap
16466 it with a cleanup point expression. */
16467 if (!TREE_SIDE_EFFECTS (expr
))
16470 /* If the expression is a return, check to see if the expression inside the
16471 return has no side effects or the right hand side of the modify expression
16472 inside the return. If either don't have side effects set we don't need to
16473 wrap the expression in a cleanup point expression. Note we don't check the
16474 left hand side of the modify because it should always be a return decl. */
16475 if (TREE_CODE (expr
) == RETURN_EXPR
)
16477 tree op
= TREE_OPERAND (expr
, 0);
16478 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16480 op
= TREE_OPERAND (op
, 1);
16481 if (!TREE_SIDE_EFFECTS (op
))
16485 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16488 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16489 of an indirection through OP0, or NULL_TREE if no simplification is
16493 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16499 subtype
= TREE_TYPE (sub
);
16500 if (!POINTER_TYPE_P (subtype
))
16503 if (TREE_CODE (sub
) == ADDR_EXPR
)
16505 tree op
= TREE_OPERAND (sub
, 0);
16506 tree optype
= TREE_TYPE (op
);
16507 /* *&CONST_DECL -> to the value of the const decl. */
16508 if (TREE_CODE (op
) == CONST_DECL
)
16509 return DECL_INITIAL (op
);
16510 /* *&p => p; make sure to handle *&"str"[cst] here. */
16511 if (type
== optype
)
16513 tree fop
= fold_read_from_constant_string (op
);
16519 /* *(foo *)&fooarray => fooarray[0] */
16520 else if (TREE_CODE (optype
) == ARRAY_TYPE
16521 && type
== TREE_TYPE (optype
)
16522 && (!in_gimple_form
16523 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16525 tree type_domain
= TYPE_DOMAIN (optype
);
16526 tree min_val
= size_zero_node
;
16527 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16528 min_val
= TYPE_MIN_VALUE (type_domain
);
16530 && TREE_CODE (min_val
) != INTEGER_CST
)
16532 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16533 NULL_TREE
, NULL_TREE
);
16535 /* *(foo *)&complexfoo => __real__ complexfoo */
16536 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16537 && type
== TREE_TYPE (optype
))
16538 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16539 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16540 else if (TREE_CODE (optype
) == VECTOR_TYPE
16541 && type
== TREE_TYPE (optype
))
16543 tree part_width
= TYPE_SIZE (type
);
16544 tree index
= bitsize_int (0);
16545 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16549 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16550 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16552 tree op00
= TREE_OPERAND (sub
, 0);
16553 tree op01
= TREE_OPERAND (sub
, 1);
16556 if (TREE_CODE (op00
) == ADDR_EXPR
)
16559 op00
= TREE_OPERAND (op00
, 0);
16560 op00type
= TREE_TYPE (op00
);
16562 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16563 if (TREE_CODE (op00type
) == VECTOR_TYPE
16564 && type
== TREE_TYPE (op00type
))
16566 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16567 tree part_width
= TYPE_SIZE (type
);
16568 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16569 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16570 tree index
= bitsize_int (indexi
);
16572 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16573 return fold_build3_loc (loc
,
16574 BIT_FIELD_REF
, type
, op00
,
16575 part_width
, index
);
16578 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16579 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16580 && type
== TREE_TYPE (op00type
))
16582 tree size
= TYPE_SIZE_UNIT (type
);
16583 if (tree_int_cst_equal (size
, op01
))
16584 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16586 /* ((foo *)&fooarray)[1] => fooarray[1] */
16587 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16588 && type
== TREE_TYPE (op00type
))
16590 tree type_domain
= TYPE_DOMAIN (op00type
);
16591 tree min_val
= size_zero_node
;
16592 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16593 min_val
= TYPE_MIN_VALUE (type_domain
);
16594 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16595 TYPE_SIZE_UNIT (type
));
16596 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16597 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16598 NULL_TREE
, NULL_TREE
);
16603 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16604 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16605 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16606 && (!in_gimple_form
16607 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16610 tree min_val
= size_zero_node
;
16611 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16612 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16613 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16614 min_val
= TYPE_MIN_VALUE (type_domain
);
16616 && TREE_CODE (min_val
) != INTEGER_CST
)
16618 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16625 /* Builds an expression for an indirection through T, simplifying some
16629 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16631 tree type
= TREE_TYPE (TREE_TYPE (t
));
16632 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16637 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16640 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16643 fold_indirect_ref_loc (location_t loc
, tree t
)
16645 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16653 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16654 whose result is ignored. The type of the returned tree need not be
16655 the same as the original expression. */
16658 fold_ignored_result (tree t
)
16660 if (!TREE_SIDE_EFFECTS (t
))
16661 return integer_zero_node
;
16664 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16667 t
= TREE_OPERAND (t
, 0);
16671 case tcc_comparison
:
16672 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16673 t
= TREE_OPERAND (t
, 0);
16674 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16675 t
= TREE_OPERAND (t
, 1);
16680 case tcc_expression
:
16681 switch (TREE_CODE (t
))
16683 case COMPOUND_EXPR
:
16684 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16686 t
= TREE_OPERAND (t
, 0);
16690 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16691 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16693 t
= TREE_OPERAND (t
, 0);
16706 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16707 This can only be applied to objects of a sizetype. */
16710 round_up_loc (location_t loc
, tree value
, int divisor
)
16712 tree div
= NULL_TREE
;
16714 gcc_assert (divisor
> 0);
16718 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16719 have to do anything. Only do this when we are not given a const,
16720 because in that case, this check is more expensive than just
16722 if (TREE_CODE (value
) != INTEGER_CST
)
16724 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16726 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16730 /* If divisor is a power of two, simplify this to bit manipulation. */
16731 if (divisor
== (divisor
& -divisor
))
16733 if (TREE_CODE (value
) == INTEGER_CST
)
16735 double_int val
= tree_to_double_int (value
);
16738 if ((val
.low
& (divisor
- 1)) == 0)
16741 overflow_p
= TREE_OVERFLOW (value
);
16742 val
.low
&= ~(divisor
- 1);
16743 val
.low
+= divisor
;
16751 return force_fit_type_double (TREE_TYPE (value
), val
,
16758 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16759 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16760 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16761 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16767 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16768 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16769 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16775 /* Likewise, but round down. */
16778 round_down_loc (location_t loc
, tree value
, int divisor
)
16780 tree div
= NULL_TREE
;
16782 gcc_assert (divisor
> 0);
16786 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16787 have to do anything. Only do this when we are not given a const,
16788 because in that case, this check is more expensive than just
16790 if (TREE_CODE (value
) != INTEGER_CST
)
16792 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16794 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16798 /* If divisor is a power of two, simplify this to bit manipulation. */
16799 if (divisor
== (divisor
& -divisor
))
16803 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16804 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16809 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16810 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16811 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16817 /* Returns the pointer to the base of the object addressed by EXP and
16818 extracts the information about the offset of the access, storing it
16819 to PBITPOS and POFFSET. */
16822 split_address_to_core_and_offset (tree exp
,
16823 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16826 enum machine_mode mode
;
16827 int unsignedp
, volatilep
;
16828 HOST_WIDE_INT bitsize
;
16829 location_t loc
= EXPR_LOCATION (exp
);
16831 if (TREE_CODE (exp
) == ADDR_EXPR
)
16833 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16834 poffset
, &mode
, &unsignedp
, &volatilep
,
16836 core
= build_fold_addr_expr_loc (loc
, core
);
16842 *poffset
= NULL_TREE
;
16848 /* Returns true if addresses of E1 and E2 differ by a constant, false
16849 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16852 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16855 HOST_WIDE_INT bitpos1
, bitpos2
;
16856 tree toffset1
, toffset2
, tdiff
, type
;
16858 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16859 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16861 if (bitpos1
% BITS_PER_UNIT
!= 0
16862 || bitpos2
% BITS_PER_UNIT
!= 0
16863 || !operand_equal_p (core1
, core2
, 0))
16866 if (toffset1
&& toffset2
)
16868 type
= TREE_TYPE (toffset1
);
16869 if (type
!= TREE_TYPE (toffset2
))
16870 toffset2
= fold_convert (type
, toffset2
);
16872 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16873 if (!cst_and_fits_in_hwi (tdiff
))
16876 *diff
= int_cst_value (tdiff
);
16878 else if (toffset1
|| toffset2
)
16880 /* If only one of the offsets is non-constant, the difference cannot
16887 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16891 /* Simplify the floating point expression EXP when the sign of the
16892 result is not significant. Return NULL_TREE if no simplification
16896 fold_strip_sign_ops (tree exp
)
16899 location_t loc
= EXPR_LOCATION (exp
);
16901 switch (TREE_CODE (exp
))
16905 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16906 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16910 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16912 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16913 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16914 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16915 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16916 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16917 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16920 case COMPOUND_EXPR
:
16921 arg0
= TREE_OPERAND (exp
, 0);
16922 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16924 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16928 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16929 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16931 return fold_build3_loc (loc
,
16932 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16933 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16934 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16939 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16942 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16943 /* Strip copysign function call, return the 1st argument. */
16944 arg0
= CALL_EXPR_ARG (exp
, 0);
16945 arg1
= CALL_EXPR_ARG (exp
, 1);
16946 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16949 /* Strip sign ops from the argument of "odd" math functions. */
16950 if (negate_mathfn_p (fcode
))
16952 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16954 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);