1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
66 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
73 #include "gimple-expr.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
81 #include "plugin-api.h"
84 #include "generic-match.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
89 int folding_initializer
= 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code
{
113 static bool negate_mathfn_p (enum built_in_function
);
114 static bool negate_expr_p (tree
);
115 static tree
negate_expr (tree
);
116 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
117 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
118 static tree
const_binop (enum tree_code
, tree
, tree
);
119 static enum comparison_code
comparison_to_compcode (enum tree_code
);
120 static enum tree_code
compcode_to_comparison (enum comparison_code
);
121 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
122 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
123 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
124 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
125 static tree
make_bit_field_ref (location_t
, tree
, tree
,
126 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
127 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
129 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
131 machine_mode
*, int *, int *,
133 static int simple_operand_p (const_tree
);
134 static bool simple_operand_p_2 (tree
);
135 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
136 static tree
range_predecessor (tree
);
137 static tree
range_successor (tree
);
138 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
140 static tree
unextend (tree
, int, int, tree
);
141 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
143 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
144 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
145 static tree
fold_binary_op_with_conditional_arg (location_t
,
146 enum tree_code
, tree
,
149 static tree
fold_mathfn_compare (location_t
,
150 enum built_in_function
, enum tree_code
,
152 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
153 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
154 static bool reorder_operands_p (const_tree
, const_tree
);
155 static tree
fold_negate_const (tree
, tree
);
156 static tree
fold_not_const (const_tree
, tree
);
157 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
158 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
160 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
161 Otherwise, return LOC. */
164 expr_location_or (tree t
, location_t loc
)
166 location_t tloc
= EXPR_LOCATION (t
);
167 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
170 /* Similar to protected_set_expr_location, but never modify x in place,
171 if location can and needs to be set, unshare it. */
174 protected_set_expr_location_unshare (tree x
, location_t loc
)
176 if (CAN_HAVE_LOCATION_P (x
)
177 && EXPR_LOCATION (x
) != loc
178 && !(TREE_CODE (x
) == SAVE_EXPR
179 || TREE_CODE (x
) == TARGET_EXPR
180 || TREE_CODE (x
) == BIND_EXPR
))
183 SET_EXPR_LOCATION (x
, loc
);
188 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
189 division and returns the quotient. Otherwise returns
193 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
197 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
199 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
204 /* This is nonzero if we should defer warnings about undefined
205 overflow. This facility exists because these warnings are a
206 special case. The code to estimate loop iterations does not want
207 to issue any warnings, since it works with expressions which do not
208 occur in user code. Various bits of cleanup code call fold(), but
209 only use the result if it has certain characteristics (e.g., is a
210 constant); that code only wants to issue a warning if the result is
213 static int fold_deferring_overflow_warnings
;
215 /* If a warning about undefined overflow is deferred, this is the
216 warning. Note that this may cause us to turn two warnings into
217 one, but that is fine since it is sufficient to only give one
218 warning per expression. */
220 static const char* fold_deferred_overflow_warning
;
222 /* If a warning about undefined overflow is deferred, this is the
223 level at which the warning should be emitted. */
225 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
227 /* Start deferring overflow warnings. We could use a stack here to
228 permit nested calls, but at present it is not necessary. */
231 fold_defer_overflow_warnings (void)
233 ++fold_deferring_overflow_warnings
;
236 /* Stop deferring overflow warnings. If there is a pending warning,
237 and ISSUE is true, then issue the warning if appropriate. STMT is
238 the statement with which the warning should be associated (used for
239 location information); STMT may be NULL. CODE is the level of the
240 warning--a warn_strict_overflow_code value. This function will use
241 the smaller of CODE and the deferred code when deciding whether to
242 issue the warning. CODE may be zero to mean to always use the
246 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
251 gcc_assert (fold_deferring_overflow_warnings
> 0);
252 --fold_deferring_overflow_warnings
;
253 if (fold_deferring_overflow_warnings
> 0)
255 if (fold_deferred_overflow_warning
!= NULL
257 && code
< (int) fold_deferred_overflow_code
)
258 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
262 warnmsg
= fold_deferred_overflow_warning
;
263 fold_deferred_overflow_warning
= NULL
;
265 if (!issue
|| warnmsg
== NULL
)
268 if (gimple_no_warning_p (stmt
))
271 /* Use the smallest code level when deciding to issue the
273 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
274 code
= fold_deferred_overflow_code
;
276 if (!issue_strict_overflow_warning (code
))
280 locus
= input_location
;
282 locus
= gimple_location (stmt
);
283 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
286 /* Stop deferring overflow warnings, ignoring any deferred
290 fold_undefer_and_ignore_overflow_warnings (void)
292 fold_undefer_overflow_warnings (false, NULL
, 0);
295 /* Whether we are deferring overflow warnings. */
298 fold_deferring_overflow_warnings_p (void)
300 return fold_deferring_overflow_warnings
> 0;
303 /* This is called when we fold something based on the fact that signed
304 overflow is undefined. */
307 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
309 if (fold_deferring_overflow_warnings
> 0)
311 if (fold_deferred_overflow_warning
== NULL
312 || wc
< fold_deferred_overflow_code
)
314 fold_deferred_overflow_warning
= gmsgid
;
315 fold_deferred_overflow_code
= wc
;
318 else if (issue_strict_overflow_warning (wc
))
319 warning (OPT_Wstrict_overflow
, gmsgid
);
322 /* Return true if the built-in mathematical function specified by CODE
323 is odd, i.e. -f(x) == f(-x). */
326 negate_mathfn_p (enum built_in_function code
)
330 CASE_FLT_FN (BUILT_IN_ASIN
):
331 CASE_FLT_FN (BUILT_IN_ASINH
):
332 CASE_FLT_FN (BUILT_IN_ATAN
):
333 CASE_FLT_FN (BUILT_IN_ATANH
):
334 CASE_FLT_FN (BUILT_IN_CASIN
):
335 CASE_FLT_FN (BUILT_IN_CASINH
):
336 CASE_FLT_FN (BUILT_IN_CATAN
):
337 CASE_FLT_FN (BUILT_IN_CATANH
):
338 CASE_FLT_FN (BUILT_IN_CBRT
):
339 CASE_FLT_FN (BUILT_IN_CPROJ
):
340 CASE_FLT_FN (BUILT_IN_CSIN
):
341 CASE_FLT_FN (BUILT_IN_CSINH
):
342 CASE_FLT_FN (BUILT_IN_CTAN
):
343 CASE_FLT_FN (BUILT_IN_CTANH
):
344 CASE_FLT_FN (BUILT_IN_ERF
):
345 CASE_FLT_FN (BUILT_IN_LLROUND
):
346 CASE_FLT_FN (BUILT_IN_LROUND
):
347 CASE_FLT_FN (BUILT_IN_ROUND
):
348 CASE_FLT_FN (BUILT_IN_SIN
):
349 CASE_FLT_FN (BUILT_IN_SINH
):
350 CASE_FLT_FN (BUILT_IN_TAN
):
351 CASE_FLT_FN (BUILT_IN_TANH
):
352 CASE_FLT_FN (BUILT_IN_TRUNC
):
355 CASE_FLT_FN (BUILT_IN_LLRINT
):
356 CASE_FLT_FN (BUILT_IN_LRINT
):
357 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
358 CASE_FLT_FN (BUILT_IN_RINT
):
359 return !flag_rounding_math
;
367 /* Check whether we may negate an integer constant T without causing
371 may_negate_without_overflow_p (const_tree t
)
375 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
377 type
= TREE_TYPE (t
);
378 if (TYPE_UNSIGNED (type
))
381 return !wi::only_sign_bit_p (t
);
384 /* Determine whether an expression T can be cheaply negated using
385 the function negate_expr without introducing undefined overflow. */
388 negate_expr_p (tree t
)
395 type
= TREE_TYPE (t
);
398 switch (TREE_CODE (t
))
401 if (TYPE_OVERFLOW_WRAPS (type
))
404 /* Check that -CST will not overflow type. */
405 return may_negate_without_overflow_p (t
);
407 return (INTEGRAL_TYPE_P (type
)
408 && TYPE_OVERFLOW_WRAPS (type
));
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
420 return negate_expr_p (TREE_REALPART (t
))
421 && negate_expr_p (TREE_IMAGPART (t
));
425 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
428 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
430 for (i
= 0; i
< count
; i
++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
438 return negate_expr_p (TREE_OPERAND (t
, 0))
439 && negate_expr_p (TREE_OPERAND (t
, 1));
442 return negate_expr_p (TREE_OPERAND (t
, 0));
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
446 || HONOR_SIGNED_ZEROS (element_mode (type
)))
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t
, 1))
450 && reorder_operands_p (TREE_OPERAND (t
, 0),
451 TREE_OPERAND (t
, 1)))
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t
, 0));
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
459 && !HONOR_SIGNED_ZEROS (element_mode (type
))
460 && reorder_operands_p (TREE_OPERAND (t
, 0),
461 TREE_OPERAND (t
, 1));
464 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
471 return negate_expr_p (TREE_OPERAND (t
, 1))
472 || negate_expr_p (TREE_OPERAND (t
, 0));
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
485 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
487 /* If overflow is undefined then we have to be careful because
488 we ask whether it's ok to associate the negate with the
489 division which is not ok for example for
490 -((a - b) / c) where (-(a - b)) / c may invoke undefined
491 overflow because of negating INT_MIN. So do not use
492 negate_expr_p here but open-code the two important cases. */
493 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
494 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
495 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
498 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
500 return negate_expr_p (TREE_OPERAND (t
, 1));
503 /* Negate -((double)float) as (double)(-float). */
504 if (TREE_CODE (type
) == REAL_TYPE
)
506 tree tem
= strip_float_extensions (t
);
508 return negate_expr_p (tem
);
513 /* Negate -f(x) as f(-x). */
514 if (negate_mathfn_p (builtin_mathfn_code (t
)))
515 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
519 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
520 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
522 tree op1
= TREE_OPERAND (t
, 1);
523 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
534 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
535 simplification is possible.
536 If negate_expr_p would return true for T, NULL_TREE will never be
540 fold_negate_expr (location_t loc
, tree t
)
542 tree type
= TREE_TYPE (t
);
545 switch (TREE_CODE (t
))
547 /* Convert - (~A) to A + 1. */
549 if (INTEGRAL_TYPE_P (type
))
550 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
551 build_one_cst (type
));
555 tem
= fold_negate_const (t
, type
);
556 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
557 || (!TYPE_OVERFLOW_TRAPS (type
)
558 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0))
563 tem
= fold_negate_const (t
, type
);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
570 tem
= fold_negate_const (t
, type
);
575 tree rpart
= negate_expr (TREE_REALPART (t
));
576 tree ipart
= negate_expr (TREE_IMAGPART (t
));
578 if ((TREE_CODE (rpart
) == REAL_CST
579 && TREE_CODE (ipart
) == REAL_CST
)
580 || (TREE_CODE (rpart
) == INTEGER_CST
581 && TREE_CODE (ipart
) == INTEGER_CST
))
582 return build_complex (type
, rpart
, ipart
);
588 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
589 tree
*elts
= XALLOCAVEC (tree
, count
);
591 for (i
= 0; i
< count
; i
++)
593 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
594 if (elts
[i
] == NULL_TREE
)
598 return build_vector (type
, elts
);
602 if (negate_expr_p (t
))
603 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
604 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
605 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
609 if (negate_expr_p (t
))
610 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
611 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
615 if (!TYPE_OVERFLOW_SANITIZED (type
))
616 return TREE_OPERAND (t
, 0);
620 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
621 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
623 /* -(A + B) -> (-B) - A. */
624 if (negate_expr_p (TREE_OPERAND (t
, 1))
625 && reorder_operands_p (TREE_OPERAND (t
, 0),
626 TREE_OPERAND (t
, 1)))
628 tem
= negate_expr (TREE_OPERAND (t
, 1));
629 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
630 tem
, TREE_OPERAND (t
, 0));
633 /* -(A + B) -> (-A) - B. */
634 if (negate_expr_p (TREE_OPERAND (t
, 0)))
636 tem
= negate_expr (TREE_OPERAND (t
, 0));
637 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
638 tem
, TREE_OPERAND (t
, 1));
644 /* - (A - B) -> B - A */
645 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
646 && !HONOR_SIGNED_ZEROS (element_mode (type
))
647 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
648 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
649 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
653 if (TYPE_UNSIGNED (type
))
659 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
661 tem
= TREE_OPERAND (t
, 1);
662 if (negate_expr_p (tem
))
663 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
664 TREE_OPERAND (t
, 0), negate_expr (tem
));
665 tem
= TREE_OPERAND (t
, 0);
666 if (negate_expr_p (tem
))
667 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
668 negate_expr (tem
), TREE_OPERAND (t
, 1));
675 /* In general we can't negate A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. But if overflow is
678 undefined, we can negate, because - (INT_MIN / 1) is an
680 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
682 const char * const warnmsg
= G_("assuming signed overflow does not "
683 "occur when negating a division");
684 tem
= TREE_OPERAND (t
, 1);
685 if (negate_expr_p (tem
))
687 if (INTEGRAL_TYPE_P (type
)
688 && (TREE_CODE (tem
) != INTEGER_CST
689 || integer_onep (tem
)))
690 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
691 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
692 TREE_OPERAND (t
, 0), negate_expr (tem
));
694 /* If overflow is undefined then we have to be careful because
695 we ask whether it's ok to associate the negate with the
696 division which is not ok for example for
697 -((a - b) / c) where (-(a - b)) / c may invoke undefined
698 overflow because of negating INT_MIN. So do not use
699 negate_expr_p here but open-code the two important cases. */
700 tem
= TREE_OPERAND (t
, 0);
701 if ((INTEGRAL_TYPE_P (type
)
702 && (TREE_CODE (tem
) == NEGATE_EXPR
703 || (TREE_CODE (tem
) == INTEGER_CST
704 && may_negate_without_overflow_p (tem
))))
705 || !INTEGRAL_TYPE_P (type
))
706 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
707 negate_expr (tem
), TREE_OPERAND (t
, 1));
712 /* Convert -((double)float) into (double)(-float). */
713 if (TREE_CODE (type
) == REAL_TYPE
)
715 tem
= strip_float_extensions (t
);
716 if (tem
!= t
&& negate_expr_p (tem
))
717 return fold_convert_loc (loc
, type
, negate_expr (tem
));
722 /* Negate -f(x) as f(-x). */
723 if (negate_mathfn_p (builtin_mathfn_code (t
))
724 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
728 fndecl
= get_callee_fndecl (t
);
729 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
730 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
735 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
736 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
738 tree op1
= TREE_OPERAND (t
, 1);
739 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
741 tree ntype
= TYPE_UNSIGNED (type
)
742 ? signed_type_for (type
)
743 : unsigned_type_for (type
);
744 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
745 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
746 return fold_convert_loc (loc
, type
, temp
);
758 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
759 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
771 loc
= EXPR_LOCATION (t
);
772 type
= TREE_TYPE (t
);
775 tem
= fold_negate_expr (loc
, t
);
777 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
778 return fold_convert_loc (loc
, type
, tem
);
781 /* Split a tree IN into a constant, literal and variable parts that could be
782 combined with CODE to make IN. "constant" means an expression with
783 TREE_CONSTANT but that isn't an actual constant. CODE must be a
784 commutative arithmetic operation. Store the constant part into *CONP,
785 the literal in *LITP and return the variable part. If a part isn't
786 present, set it to null. If the tree does not decompose in this way,
787 return the entire tree as the variable part and the other parts as null.
789 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
790 case, we negate an operand that was subtracted. Except if it is a
791 literal for which we use *MINUS_LITP instead.
793 If NEGATE_P is true, we are negating all of IN, again except a literal
794 for which we use *MINUS_LITP instead.
796 If IN is itself a literal or constant, return it as appropriate.
798 Note that we do not guarantee that any of the three values will be the
799 same type as IN, but they will have the same signedness and mode. */
802 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
803 tree
*minus_litp
, int negate_p
)
811 /* Strip any conversions that don't change the machine mode or signedness. */
812 STRIP_SIGN_NOPS (in
);
814 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
815 || TREE_CODE (in
) == FIXED_CST
)
817 else if (TREE_CODE (in
) == code
818 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
819 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
820 /* We can associate addition and subtraction together (even
821 though the C standard doesn't say so) for integers because
822 the value is not affected. For reals, the value might be
823 affected, so we can't. */
824 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
825 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
827 tree op0
= TREE_OPERAND (in
, 0);
828 tree op1
= TREE_OPERAND (in
, 1);
829 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
830 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
832 /* First see if either of the operands is a literal, then a constant. */
833 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
834 || TREE_CODE (op0
) == FIXED_CST
)
835 *litp
= op0
, op0
= 0;
836 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
837 || TREE_CODE (op1
) == FIXED_CST
)
838 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
840 if (op0
!= 0 && TREE_CONSTANT (op0
))
841 *conp
= op0
, op0
= 0;
842 else if (op1
!= 0 && TREE_CONSTANT (op1
))
843 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
845 /* If we haven't dealt with either operand, this is not a case we can
846 decompose. Otherwise, VAR is either of the ones remaining, if any. */
847 if (op0
!= 0 && op1
!= 0)
852 var
= op1
, neg_var_p
= neg1_p
;
854 /* Now do any needed negations. */
856 *minus_litp
= *litp
, *litp
= 0;
858 *conp
= negate_expr (*conp
);
860 var
= negate_expr (var
);
862 else if (TREE_CODE (in
) == BIT_NOT_EXPR
863 && code
== PLUS_EXPR
)
865 /* -X - 1 is folded to ~X, undo that here. */
866 *minus_litp
= build_one_cst (TREE_TYPE (in
));
867 var
= negate_expr (TREE_OPERAND (in
, 0));
869 else if (TREE_CONSTANT (in
))
877 *minus_litp
= *litp
, *litp
= 0;
878 else if (*minus_litp
)
879 *litp
= *minus_litp
, *minus_litp
= 0;
880 *conp
= negate_expr (*conp
);
881 var
= negate_expr (var
);
887 /* Re-associate trees split by the above function. T1 and T2 are
888 either expressions to associate or null. Return the new
889 expression, if any. LOC is the location of the new expression. If
890 we build an operation, do it in TYPE and with CODE. */
893 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
900 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
901 try to fold this since we will have infinite recursion. But do
902 deal with any NEGATE_EXPRs. */
903 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
904 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
906 if (code
== PLUS_EXPR
)
908 if (TREE_CODE (t1
) == NEGATE_EXPR
)
909 return build2_loc (loc
, MINUS_EXPR
, type
,
910 fold_convert_loc (loc
, type
, t2
),
911 fold_convert_loc (loc
, type
,
912 TREE_OPERAND (t1
, 0)));
913 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
914 return build2_loc (loc
, MINUS_EXPR
, type
,
915 fold_convert_loc (loc
, type
, t1
),
916 fold_convert_loc (loc
, type
,
917 TREE_OPERAND (t2
, 0)));
918 else if (integer_zerop (t2
))
919 return fold_convert_loc (loc
, type
, t1
);
921 else if (code
== MINUS_EXPR
)
923 if (integer_zerop (t2
))
924 return fold_convert_loc (loc
, type
, t1
);
927 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
928 fold_convert_loc (loc
, type
, t2
));
931 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
932 fold_convert_loc (loc
, type
, t2
));
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936 for use in int_const_binop, size_binop and size_diffop. */
939 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
941 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
943 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
958 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
959 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
960 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
964 /* Combine two integer constants ARG1 and ARG2 under operation CODE
965 to produce a new constant. Return NULL_TREE if we don't know how
966 to evaluate CODE at compile-time. */
969 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
974 tree type
= TREE_TYPE (arg1
);
975 signop sign
= TYPE_SIGN (type
);
976 bool overflow
= false;
978 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
979 TYPE_SIGN (TREE_TYPE (parg2
)));
984 res
= wi::bit_or (arg1
, arg2
);
988 res
= wi::bit_xor (arg1
, arg2
);
992 res
= wi::bit_and (arg1
, arg2
);
997 if (wi::neg_p (arg2
))
1000 if (code
== RSHIFT_EXPR
)
1006 if (code
== RSHIFT_EXPR
)
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res
= wi::rshift (arg1
, arg2
, sign
);
1012 res
= wi::lshift (arg1
, arg2
);
1017 if (wi::neg_p (arg2
))
1020 if (code
== RROTATE_EXPR
)
1021 code
= LROTATE_EXPR
;
1023 code
= RROTATE_EXPR
;
1026 if (code
== RROTATE_EXPR
)
1027 res
= wi::rrotate (arg1
, arg2
);
1029 res
= wi::lrotate (arg1
, arg2
);
1033 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1037 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1041 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1044 case MULT_HIGHPART_EXPR
:
1045 res
= wi::mul_high (arg1
, arg2
, sign
);
1048 case TRUNC_DIV_EXPR
:
1049 case EXACT_DIV_EXPR
:
1052 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1055 case FLOOR_DIV_EXPR
:
1058 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1064 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1067 case ROUND_DIV_EXPR
:
1070 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1073 case TRUNC_MOD_EXPR
:
1076 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1079 case FLOOR_MOD_EXPR
:
1082 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1088 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1091 case ROUND_MOD_EXPR
:
1094 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1098 res
= wi::min (arg1
, arg2
, sign
);
1102 res
= wi::max (arg1
, arg2
, sign
);
1109 t
= force_fit_type (type
, res
, overflowable
,
1110 (((sign
== SIGNED
|| overflowable
== -1)
1112 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1118 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1120 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1124 constant. We assume ARG1 and ARG2 have the same data type, or at least
1125 are the same kind of constant and the same machine mode. Return zero if
1126 combining the constants is not allowed in the current operating mode. */
1129 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1131 /* Sanity check for the recursive cases. */
1138 if (TREE_CODE (arg1
) == INTEGER_CST
)
1139 return int_const_binop (code
, arg1
, arg2
);
1141 if (TREE_CODE (arg1
) == REAL_CST
)
1146 REAL_VALUE_TYPE value
;
1147 REAL_VALUE_TYPE result
;
1151 /* The following codes are handled by real_arithmetic. */
1166 d1
= TREE_REAL_CST (arg1
);
1167 d2
= TREE_REAL_CST (arg2
);
1169 type
= TREE_TYPE (arg1
);
1170 mode
= TYPE_MODE (type
);
1172 /* Don't perform operation if we honor signaling NaNs and
1173 either operand is a NaN. */
1174 if (HONOR_SNANS (mode
)
1175 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1178 /* Don't perform operation if it would raise a division
1179 by zero exception. */
1180 if (code
== RDIV_EXPR
1181 && REAL_VALUES_EQUAL (d2
, dconst0
)
1182 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1185 /* If either operand is a NaN, just return it. Otherwise, set up
1186 for floating-point trap; we return an overflow. */
1187 if (REAL_VALUE_ISNAN (d1
))
1189 else if (REAL_VALUE_ISNAN (d2
))
1192 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1193 real_convert (&result
, mode
, &value
);
1195 /* Don't constant fold this floating point operation if
1196 the result has overflowed and flag_trapping_math. */
1197 if (flag_trapping_math
1198 && MODE_HAS_INFINITIES (mode
)
1199 && REAL_VALUE_ISINF (result
)
1200 && !REAL_VALUE_ISINF (d1
)
1201 && !REAL_VALUE_ISINF (d2
))
1204 /* Don't constant fold this floating point operation if the
1205 result may dependent upon the run-time rounding mode and
1206 flag_rounding_math is set, or if GCC's software emulation
1207 is unable to accurately represent the result. */
1208 if ((flag_rounding_math
1209 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1210 && (inexact
|| !real_identical (&result
, &value
)))
1213 t
= build_real (type
, result
);
1215 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1219 if (TREE_CODE (arg1
) == FIXED_CST
)
1221 FIXED_VALUE_TYPE f1
;
1222 FIXED_VALUE_TYPE f2
;
1223 FIXED_VALUE_TYPE result
;
1228 /* The following codes are handled by fixed_arithmetic. */
1234 case TRUNC_DIV_EXPR
:
1235 f2
= TREE_FIXED_CST (arg2
);
1242 f2
.data
.high
= w2
.elt (1);
1243 f2
.data
.low
= w2
.elt (0);
1252 f1
= TREE_FIXED_CST (arg1
);
1253 type
= TREE_TYPE (arg1
);
1254 sat_p
= TYPE_SATURATING (type
);
1255 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1256 t
= build_fixed (type
, result
);
1257 /* Propagate overflow flags. */
1258 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1259 TREE_OVERFLOW (t
) = 1;
1263 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1265 tree type
= TREE_TYPE (arg1
);
1266 tree r1
= TREE_REALPART (arg1
);
1267 tree i1
= TREE_IMAGPART (arg1
);
1268 tree r2
= TREE_REALPART (arg2
);
1269 tree i2
= TREE_IMAGPART (arg2
);
1276 real
= const_binop (code
, r1
, r2
);
1277 imag
= const_binop (code
, i1
, i2
);
1281 if (COMPLEX_FLOAT_TYPE_P (type
))
1282 return do_mpc_arg2 (arg1
, arg2
, type
,
1283 /* do_nonfinite= */ folding_initializer
,
1286 real
= const_binop (MINUS_EXPR
,
1287 const_binop (MULT_EXPR
, r1
, r2
),
1288 const_binop (MULT_EXPR
, i1
, i2
));
1289 imag
= const_binop (PLUS_EXPR
,
1290 const_binop (MULT_EXPR
, r1
, i2
),
1291 const_binop (MULT_EXPR
, i1
, r2
));
1295 if (COMPLEX_FLOAT_TYPE_P (type
))
1296 return do_mpc_arg2 (arg1
, arg2
, type
,
1297 /* do_nonfinite= */ folding_initializer
,
1300 case TRUNC_DIV_EXPR
:
1302 case FLOOR_DIV_EXPR
:
1303 case ROUND_DIV_EXPR
:
1304 if (flag_complex_method
== 0)
1306 /* Keep this algorithm in sync with
1307 tree-complex.c:expand_complex_div_straight().
1309 Expand complex division to scalars, straightforward algorithm.
1310 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1314 = const_binop (PLUS_EXPR
,
1315 const_binop (MULT_EXPR
, r2
, r2
),
1316 const_binop (MULT_EXPR
, i2
, i2
));
1318 = const_binop (PLUS_EXPR
,
1319 const_binop (MULT_EXPR
, r1
, r2
),
1320 const_binop (MULT_EXPR
, i1
, i2
));
1322 = const_binop (MINUS_EXPR
,
1323 const_binop (MULT_EXPR
, i1
, r2
),
1324 const_binop (MULT_EXPR
, r1
, i2
));
1326 real
= const_binop (code
, t1
, magsquared
);
1327 imag
= const_binop (code
, t2
, magsquared
);
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_wide().
1334 Expand complex division to scalars, modified algorithm to minimize
1335 overflow with wide input ranges. */
1336 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1337 fold_abs_const (r2
, TREE_TYPE (type
)),
1338 fold_abs_const (i2
, TREE_TYPE (type
)));
1340 if (integer_nonzerop (compare
))
1342 /* In the TRUE branch, we compute
1344 div = (br * ratio) + bi;
1345 tr = (ar * ratio) + ai;
1346 ti = (ai * ratio) - ar;
1349 tree ratio
= const_binop (code
, r2
, i2
);
1350 tree div
= const_binop (PLUS_EXPR
, i2
,
1351 const_binop (MULT_EXPR
, r2
, ratio
));
1352 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1353 real
= const_binop (PLUS_EXPR
, real
, i1
);
1354 real
= const_binop (code
, real
, div
);
1356 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1357 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1358 imag
= const_binop (code
, imag
, div
);
1362 /* In the FALSE branch, we compute
1364 divisor = (d * ratio) + c;
1365 tr = (b * ratio) + a;
1366 ti = b - (a * ratio);
1369 tree ratio
= const_binop (code
, i2
, r2
);
1370 tree div
= const_binop (PLUS_EXPR
, r2
,
1371 const_binop (MULT_EXPR
, i2
, ratio
));
1373 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1374 real
= const_binop (PLUS_EXPR
, real
, r1
);
1375 real
= const_binop (code
, real
, div
);
1377 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1378 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1379 imag
= const_binop (code
, imag
, div
);
1389 return build_complex (type
, real
, imag
);
1392 if (TREE_CODE (arg1
) == VECTOR_CST
1393 && TREE_CODE (arg2
) == VECTOR_CST
)
1395 tree type
= TREE_TYPE (arg1
);
1396 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1397 tree
*elts
= XALLOCAVEC (tree
, count
);
1399 for (i
= 0; i
< count
; i
++)
1401 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1402 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1404 elts
[i
] = const_binop (code
, elem1
, elem2
);
1406 /* It is possible that const_binop cannot handle the given
1407 code and return NULL_TREE */
1408 if (elts
[i
] == NULL_TREE
)
1412 return build_vector (type
, elts
);
1415 /* Shifts allow a scalar offset for a vector. */
1416 if (TREE_CODE (arg1
) == VECTOR_CST
1417 && TREE_CODE (arg2
) == INTEGER_CST
)
1419 tree type
= TREE_TYPE (arg1
);
1420 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1421 tree
*elts
= XALLOCAVEC (tree
, count
);
1423 for (i
= 0; i
< count
; i
++)
1425 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1427 elts
[i
] = const_binop (code
, elem1
, arg2
);
1429 /* It is possible that const_binop cannot handle the given
1430 code and return NULL_TREE. */
1431 if (elts
[i
] == NULL_TREE
)
1435 return build_vector (type
, elts
);
1440 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1441 indicates which particular sizetype to create. */
1444 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1446 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1449 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1450 is a tree code. The type of the result is taken from the operands.
1451 Both must be equivalent integer types, ala int_binop_types_match_p.
1452 If the operands are constant, so is the result. */
1455 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1457 tree type
= TREE_TYPE (arg0
);
1459 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1460 return error_mark_node
;
1462 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1465 /* Handle the special case of two integer constants faster. */
1466 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1468 /* And some specific cases even faster than that. */
1469 if (code
== PLUS_EXPR
)
1471 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1473 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1476 else if (code
== MINUS_EXPR
)
1478 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1481 else if (code
== MULT_EXPR
)
1483 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1487 /* Handle general case of two integer constants. For sizetype
1488 constant calculations we always want to know about overflow,
1489 even in the unsigned case. */
1490 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1493 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1496 /* Given two values, either both of sizetype or both of bitsizetype,
1497 compute the difference between the two values. Return the value
1498 in signed type corresponding to the type of the operands. */
1501 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1503 tree type
= TREE_TYPE (arg0
);
1506 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1509 /* If the type is already signed, just do the simple thing. */
1510 if (!TYPE_UNSIGNED (type
))
1511 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1513 if (type
== sizetype
)
1515 else if (type
== bitsizetype
)
1516 ctype
= sbitsizetype
;
1518 ctype
= signed_type_for (type
);
1520 /* If either operand is not a constant, do the conversions to the signed
1521 type and subtract. The hardware will do the right thing with any
1522 overflow in the subtraction. */
1523 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1524 return size_binop_loc (loc
, MINUS_EXPR
,
1525 fold_convert_loc (loc
, ctype
, arg0
),
1526 fold_convert_loc (loc
, ctype
, arg1
));
1528 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1529 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1530 overflow) and negate (which can't either). Special-case a result
1531 of zero while we're here. */
1532 if (tree_int_cst_equal (arg0
, arg1
))
1533 return build_int_cst (ctype
, 0);
1534 else if (tree_int_cst_lt (arg1
, arg0
))
1535 return fold_convert_loc (loc
, ctype
,
1536 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1538 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1539 fold_convert_loc (loc
, ctype
,
1540 size_binop_loc (loc
,
1545 /* A subroutine of fold_convert_const handling conversions of an
1546 INTEGER_CST to another integer type. */
1549 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1551 /* Given an integer constant, make new constant with new type,
1552 appropriately sign-extended or truncated. Use widest_int
1553 so that any extension is done according ARG1's type. */
1554 return force_fit_type (type
, wi::to_widest (arg1
),
1555 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1556 TREE_OVERFLOW (arg1
));
1559 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1560 to an integer type. */
1563 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1565 bool overflow
= false;
1568 /* The following code implements the floating point to integer
1569 conversion rules required by the Java Language Specification,
1570 that IEEE NaNs are mapped to zero and values that overflow
1571 the target precision saturate, i.e. values greater than
1572 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1573 are mapped to INT_MIN. These semantics are allowed by the
1574 C and C++ standards that simply state that the behavior of
1575 FP-to-integer conversion is unspecified upon overflow. */
1579 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1583 case FIX_TRUNC_EXPR
:
1584 real_trunc (&r
, VOIDmode
, &x
);
1591 /* If R is NaN, return zero and show we have an overflow. */
1592 if (REAL_VALUE_ISNAN (r
))
1595 val
= wi::zero (TYPE_PRECISION (type
));
1598 /* See if R is less than the lower bound or greater than the
1603 tree lt
= TYPE_MIN_VALUE (type
);
1604 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1605 if (REAL_VALUES_LESS (r
, l
))
1614 tree ut
= TYPE_MAX_VALUE (type
);
1617 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1618 if (REAL_VALUES_LESS (u
, r
))
1627 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1629 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1633 /* A subroutine of fold_convert_const handling conversions of a
1634 FIXED_CST to an integer type. */
1637 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1640 double_int temp
, temp_trunc
;
1643 /* Right shift FIXED_CST to temp by fbit. */
1644 temp
= TREE_FIXED_CST (arg1
).data
;
1645 mode
= TREE_FIXED_CST (arg1
).mode
;
1646 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1648 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1649 HOST_BITS_PER_DOUBLE_INT
,
1650 SIGNED_FIXED_POINT_MODE_P (mode
));
1652 /* Left shift temp to temp_trunc by fbit. */
1653 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1654 HOST_BITS_PER_DOUBLE_INT
,
1655 SIGNED_FIXED_POINT_MODE_P (mode
));
1659 temp
= double_int_zero
;
1660 temp_trunc
= double_int_zero
;
1663 /* If FIXED_CST is negative, we need to round the value toward 0.
1664 By checking if the fractional bits are not zero to add 1 to temp. */
1665 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1666 && temp_trunc
.is_negative ()
1667 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1668 temp
+= double_int_one
;
1670 /* Given a fixed-point constant, make new constant with new type,
1671 appropriately sign-extended or truncated. */
1672 t
= force_fit_type (type
, temp
, -1,
1673 (temp
.is_negative ()
1674 && (TYPE_UNSIGNED (type
)
1675 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1676 | TREE_OVERFLOW (arg1
));
1681 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1682 to another floating point type. */
1685 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1687 REAL_VALUE_TYPE value
;
1690 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1691 t
= build_real (type
, value
);
1693 /* If converting an infinity or NAN to a representation that doesn't
1694 have one, set the overflow bit so that we can produce some kind of
1695 error message at the appropriate point if necessary. It's not the
1696 most user-friendly message, but it's better than nothing. */
1697 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1698 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1699 TREE_OVERFLOW (t
) = 1;
1700 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1701 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1702 TREE_OVERFLOW (t
) = 1;
1703 /* Regular overflow, conversion produced an infinity in a mode that
1704 can't represent them. */
1705 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1706 && REAL_VALUE_ISINF (value
)
1707 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1708 TREE_OVERFLOW (t
) = 1;
1710 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1714 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1715 to a floating point type. */
1718 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1720 REAL_VALUE_TYPE value
;
1723 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1724 t
= build_real (type
, value
);
1726 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1730 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1731 to another fixed-point type. */
1734 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1736 FIXED_VALUE_TYPE value
;
1740 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1741 TYPE_SATURATING (type
));
1742 t
= build_fixed (type
, value
);
1744 /* Propagate overflow flags. */
1745 if (overflow_p
| TREE_OVERFLOW (arg1
))
1746 TREE_OVERFLOW (t
) = 1;
1750 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1751 to a fixed-point type. */
1754 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1756 FIXED_VALUE_TYPE value
;
1761 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1763 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1764 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1765 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1767 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1769 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1770 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1771 TYPE_SATURATING (type
));
1772 t
= build_fixed (type
, value
);
1774 /* Propagate overflow flags. */
1775 if (overflow_p
| TREE_OVERFLOW (arg1
))
1776 TREE_OVERFLOW (t
) = 1;
1780 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1781 to a fixed-point type. */
1784 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1786 FIXED_VALUE_TYPE value
;
1790 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1791 &TREE_REAL_CST (arg1
),
1792 TYPE_SATURATING (type
));
1793 t
= build_fixed (type
, value
);
1795 /* Propagate overflow flags. */
1796 if (overflow_p
| TREE_OVERFLOW (arg1
))
1797 TREE_OVERFLOW (t
) = 1;
1801 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1802 type TYPE. If no simplification can be done return NULL_TREE. */
1805 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1807 if (TREE_TYPE (arg1
) == type
)
1810 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1811 || TREE_CODE (type
) == OFFSET_TYPE
)
1813 if (TREE_CODE (arg1
) == INTEGER_CST
)
1814 return fold_convert_const_int_from_int (type
, arg1
);
1815 else if (TREE_CODE (arg1
) == REAL_CST
)
1816 return fold_convert_const_int_from_real (code
, type
, arg1
);
1817 else if (TREE_CODE (arg1
) == FIXED_CST
)
1818 return fold_convert_const_int_from_fixed (type
, arg1
);
1820 else if (TREE_CODE (type
) == REAL_TYPE
)
1822 if (TREE_CODE (arg1
) == INTEGER_CST
)
1823 return build_real_from_int_cst (type
, arg1
);
1824 else if (TREE_CODE (arg1
) == REAL_CST
)
1825 return fold_convert_const_real_from_real (type
, arg1
);
1826 else if (TREE_CODE (arg1
) == FIXED_CST
)
1827 return fold_convert_const_real_from_fixed (type
, arg1
);
1829 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1831 if (TREE_CODE (arg1
) == FIXED_CST
)
1832 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1833 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1834 return fold_convert_const_fixed_from_int (type
, arg1
);
1835 else if (TREE_CODE (arg1
) == REAL_CST
)
1836 return fold_convert_const_fixed_from_real (type
, arg1
);
1841 /* Construct a vector of zero elements of vector type TYPE. */
1844 build_zero_vector (tree type
)
1848 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1849 return build_vector_from_val (type
, t
);
1852 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1855 fold_convertible_p (const_tree type
, const_tree arg
)
1857 tree orig
= TREE_TYPE (arg
);
1862 if (TREE_CODE (arg
) == ERROR_MARK
1863 || TREE_CODE (type
) == ERROR_MARK
1864 || TREE_CODE (orig
) == ERROR_MARK
)
1867 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1870 switch (TREE_CODE (type
))
1872 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1873 case POINTER_TYPE
: case REFERENCE_TYPE
:
1875 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1876 || TREE_CODE (orig
) == OFFSET_TYPE
)
1878 return (TREE_CODE (orig
) == VECTOR_TYPE
1879 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1882 case FIXED_POINT_TYPE
:
1886 return TREE_CODE (type
) == TREE_CODE (orig
);
1893 /* Convert expression ARG to type TYPE. Used by the middle-end for
1894 simple conversions in preference to calling the front-end's convert. */
1897 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1899 tree orig
= TREE_TYPE (arg
);
1905 if (TREE_CODE (arg
) == ERROR_MARK
1906 || TREE_CODE (type
) == ERROR_MARK
1907 || TREE_CODE (orig
) == ERROR_MARK
)
1908 return error_mark_node
;
1910 switch (TREE_CODE (type
))
1913 case REFERENCE_TYPE
:
1914 /* Handle conversions between pointers to different address spaces. */
1915 if (POINTER_TYPE_P (orig
)
1916 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1917 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1918 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1921 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1923 if (TREE_CODE (arg
) == INTEGER_CST
)
1925 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1926 if (tem
!= NULL_TREE
)
1929 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1930 || TREE_CODE (orig
) == OFFSET_TYPE
)
1931 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1932 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1933 return fold_convert_loc (loc
, type
,
1934 fold_build1_loc (loc
, REALPART_EXPR
,
1935 TREE_TYPE (orig
), arg
));
1936 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1937 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1938 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1941 if (TREE_CODE (arg
) == INTEGER_CST
)
1943 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1944 if (tem
!= NULL_TREE
)
1947 else if (TREE_CODE (arg
) == REAL_CST
)
1949 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1950 if (tem
!= NULL_TREE
)
1953 else if (TREE_CODE (arg
) == FIXED_CST
)
1955 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1956 if (tem
!= NULL_TREE
)
1960 switch (TREE_CODE (orig
))
1963 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1964 case POINTER_TYPE
: case REFERENCE_TYPE
:
1965 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1968 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1970 case FIXED_POINT_TYPE
:
1971 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1974 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1975 return fold_convert_loc (loc
, type
, tem
);
1981 case FIXED_POINT_TYPE
:
1982 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1983 || TREE_CODE (arg
) == REAL_CST
)
1985 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1986 if (tem
!= NULL_TREE
)
1987 goto fold_convert_exit
;
1990 switch (TREE_CODE (orig
))
1992 case FIXED_POINT_TYPE
:
1997 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2000 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2001 return fold_convert_loc (loc
, type
, tem
);
2008 switch (TREE_CODE (orig
))
2011 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2012 case POINTER_TYPE
: case REFERENCE_TYPE
:
2014 case FIXED_POINT_TYPE
:
2015 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2016 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2017 fold_convert_loc (loc
, TREE_TYPE (type
),
2018 integer_zero_node
));
2023 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2025 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2026 TREE_OPERAND (arg
, 0));
2027 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2028 TREE_OPERAND (arg
, 1));
2029 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2032 arg
= save_expr (arg
);
2033 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2034 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2035 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2036 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2037 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2045 if (integer_zerop (arg
))
2046 return build_zero_vector (type
);
2047 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2048 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2049 || TREE_CODE (orig
) == VECTOR_TYPE
);
2050 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2053 tem
= fold_ignored_result (arg
);
2054 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2057 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2058 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2062 protected_set_expr_location_unshare (tem
, loc
);
2066 /* Return false if expr can be assumed not to be an lvalue, true
2070 maybe_lvalue_p (const_tree x
)
2072 /* We only need to wrap lvalue tree codes. */
2073 switch (TREE_CODE (x
))
2086 case ARRAY_RANGE_REF
:
2092 case PREINCREMENT_EXPR
:
2093 case PREDECREMENT_EXPR
:
2095 case TRY_CATCH_EXPR
:
2096 case WITH_CLEANUP_EXPR
:
2105 /* Assume the worst for front-end tree codes. */
2106 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2114 /* Return an expr equal to X but certainly not valid as an lvalue. */
2117 non_lvalue_loc (location_t loc
, tree x
)
2119 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2124 if (! maybe_lvalue_p (x
))
2126 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2129 /* When pedantic, return an expr equal to X but certainly not valid as a
2130 pedantic lvalue. Otherwise, return X. */
2133 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2135 return protected_set_expr_location_unshare (x
, loc
);
2138 /* Given a tree comparison code, return the code that is the logical inverse.
2139 It is generally not safe to do this for floating-point comparisons, except
2140 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2141 ERROR_MARK in this case. */
2144 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2146 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2147 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2157 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2159 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2161 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2163 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2177 return UNORDERED_EXPR
;
2178 case UNORDERED_EXPR
:
2179 return ORDERED_EXPR
;
2185 /* Similar, but return the comparison that results if the operands are
2186 swapped. This is safe for floating-point. */
2189 swap_tree_comparison (enum tree_code code
)
2196 case UNORDERED_EXPR
:
2222 /* Convert a comparison tree code from an enum tree_code representation
2223 into a compcode bit-based encoding. This function is the inverse of
2224 compcode_to_comparison. */
2226 static enum comparison_code
2227 comparison_to_compcode (enum tree_code code
)
2244 return COMPCODE_ORD
;
2245 case UNORDERED_EXPR
:
2246 return COMPCODE_UNORD
;
2248 return COMPCODE_UNLT
;
2250 return COMPCODE_UNEQ
;
2252 return COMPCODE_UNLE
;
2254 return COMPCODE_UNGT
;
2256 return COMPCODE_LTGT
;
2258 return COMPCODE_UNGE
;
2264 /* Convert a compcode bit-based encoding of a comparison operator back
2265 to GCC's enum tree_code representation. This function is the
2266 inverse of comparison_to_compcode. */
2268 static enum tree_code
2269 compcode_to_comparison (enum comparison_code code
)
2286 return ORDERED_EXPR
;
2287 case COMPCODE_UNORD
:
2288 return UNORDERED_EXPR
;
2306 /* Return a tree for the comparison which is the combination of
2307 doing the AND or OR (depending on CODE) of the two operations LCODE
2308 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2309 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2310 if this makes the transformation invalid. */
2313 combine_comparisons (location_t loc
,
2314 enum tree_code code
, enum tree_code lcode
,
2315 enum tree_code rcode
, tree truth_type
,
2316 tree ll_arg
, tree lr_arg
)
2318 bool honor_nans
= HONOR_NANS (element_mode (ll_arg
));
2319 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2320 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2325 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2326 compcode
= lcompcode
& rcompcode
;
2329 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2330 compcode
= lcompcode
| rcompcode
;
2339 /* Eliminate unordered comparisons, as well as LTGT and ORD
2340 which are not used unless the mode has NaNs. */
2341 compcode
&= ~COMPCODE_UNORD
;
2342 if (compcode
== COMPCODE_LTGT
)
2343 compcode
= COMPCODE_NE
;
2344 else if (compcode
== COMPCODE_ORD
)
2345 compcode
= COMPCODE_TRUE
;
2347 else if (flag_trapping_math
)
2349 /* Check that the original operation and the optimized ones will trap
2350 under the same condition. */
2351 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2352 && (lcompcode
!= COMPCODE_EQ
)
2353 && (lcompcode
!= COMPCODE_ORD
);
2354 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2355 && (rcompcode
!= COMPCODE_EQ
)
2356 && (rcompcode
!= COMPCODE_ORD
);
2357 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2358 && (compcode
!= COMPCODE_EQ
)
2359 && (compcode
!= COMPCODE_ORD
);
2361 /* In a short-circuited boolean expression the LHS might be
2362 such that the RHS, if evaluated, will never trap. For
2363 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2364 if neither x nor y is NaN. (This is a mixed blessing: for
2365 example, the expression above will never trap, hence
2366 optimizing it to x < y would be invalid). */
2367 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2368 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2371 /* If the comparison was short-circuited, and only the RHS
2372 trapped, we may now generate a spurious trap. */
2374 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2377 /* If we changed the conditions that cause a trap, we lose. */
2378 if ((ltrap
|| rtrap
) != trap
)
2382 if (compcode
== COMPCODE_TRUE
)
2383 return constant_boolean_node (true, truth_type
);
2384 else if (compcode
== COMPCODE_FALSE
)
2385 return constant_boolean_node (false, truth_type
);
2388 enum tree_code tcode
;
2390 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2391 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2395 /* Return nonzero if two operands (typically of the same tree node)
2396 are necessarily equal. If either argument has side-effects this
2397 function returns zero. FLAGS modifies behavior as follows:
2399 If OEP_ONLY_CONST is set, only return nonzero for constants.
2400 This function tests whether the operands are indistinguishable;
2401 it does not test whether they are equal using C's == operation.
2402 The distinction is important for IEEE floating point, because
2403 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2404 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2406 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2407 even though it may hold multiple values during a function.
2408 This is because a GCC tree node guarantees that nothing else is
2409 executed between the evaluation of its "operands" (which may often
2410 be evaluated in arbitrary order). Hence if the operands themselves
2411 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2412 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2413 unset means assuming isochronic (or instantaneous) tree equivalence.
2414 Unless comparing arbitrary expression trees, such as from different
2415 statements, this flag can usually be left unset.
2417 If OEP_PURE_SAME is set, then pure functions with identical arguments
2418 are considered the same. It is used when the caller has other ways
2419 to ensure that global memory is unchanged in between. */
2422 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2424 /* If either is ERROR_MARK, they aren't equal. */
2425 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2426 || TREE_TYPE (arg0
) == error_mark_node
2427 || TREE_TYPE (arg1
) == error_mark_node
)
2430 /* Similar, if either does not have a type (like a released SSA name),
2431 they aren't equal. */
2432 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2435 /* Check equality of integer constants before bailing out due to
2436 precision differences. */
2437 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2438 return tree_int_cst_equal (arg0
, arg1
);
2440 /* If both types don't have the same signedness, then we can't consider
2441 them equal. We must check this before the STRIP_NOPS calls
2442 because they may change the signedness of the arguments. As pointers
2443 strictly don't have a signedness, require either two pointers or
2444 two non-pointers as well. */
2445 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2446 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2449 /* We cannot consider pointers to different address space equal. */
2450 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2451 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2452 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2455 /* If both types don't have the same precision, then it is not safe
2457 if (element_precision (TREE_TYPE (arg0
))
2458 != element_precision (TREE_TYPE (arg1
)))
2464 /* In case both args are comparisons but with different comparison
2465 code, try to swap the comparison operands of one arg to produce
2466 a match and compare that variant. */
2467 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2468 && COMPARISON_CLASS_P (arg0
)
2469 && COMPARISON_CLASS_P (arg1
))
2471 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2473 if (TREE_CODE (arg0
) == swap_code
)
2474 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2475 TREE_OPERAND (arg1
, 1), flags
)
2476 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2477 TREE_OPERAND (arg1
, 0), flags
);
2480 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2481 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2482 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2485 /* This is needed for conversions and for COMPONENT_REF.
2486 Might as well play it safe and always test this. */
2487 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2488 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2489 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2492 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2493 We don't care about side effects in that case because the SAVE_EXPR
2494 takes care of that for us. In all other cases, two expressions are
2495 equal if they have no side effects. If we have two identical
2496 expressions with side effects that should be treated the same due
2497 to the only side effects being identical SAVE_EXPR's, that will
2498 be detected in the recursive calls below.
2499 If we are taking an invariant address of two identical objects
2500 they are necessarily equal as well. */
2501 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2502 && (TREE_CODE (arg0
) == SAVE_EXPR
2503 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2504 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2507 /* Next handle constant cases, those for which we can return 1 even
2508 if ONLY_CONST is set. */
2509 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2510 switch (TREE_CODE (arg0
))
2513 return tree_int_cst_equal (arg0
, arg1
);
2516 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2517 TREE_FIXED_CST (arg1
));
2520 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2521 TREE_REAL_CST (arg1
)))
2525 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2527 /* If we do not distinguish between signed and unsigned zero,
2528 consider them equal. */
2529 if (real_zerop (arg0
) && real_zerop (arg1
))
2538 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2541 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2543 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2544 VECTOR_CST_ELT (arg1
, i
), flags
))
2551 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2553 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2557 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2558 && ! memcmp (TREE_STRING_POINTER (arg0
),
2559 TREE_STRING_POINTER (arg1
),
2560 TREE_STRING_LENGTH (arg0
)));
2563 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2564 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2565 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2570 if (flags
& OEP_ONLY_CONST
)
2573 /* Define macros to test an operand from arg0 and arg1 for equality and a
2574 variant that allows null and views null as being different from any
2575 non-null value. In the latter case, if either is null, the both
2576 must be; otherwise, do the normal comparison. */
2577 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2578 TREE_OPERAND (arg1, N), flags)
2580 #define OP_SAME_WITH_NULL(N) \
2581 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2582 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2584 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2587 /* Two conversions are equal only if signedness and modes match. */
2588 switch (TREE_CODE (arg0
))
2591 case FIX_TRUNC_EXPR
:
2592 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2593 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2603 case tcc_comparison
:
2605 if (OP_SAME (0) && OP_SAME (1))
2608 /* For commutative ops, allow the other order. */
2609 return (commutative_tree_code (TREE_CODE (arg0
))
2610 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2611 TREE_OPERAND (arg1
, 1), flags
)
2612 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2613 TREE_OPERAND (arg1
, 0), flags
));
2616 /* If either of the pointer (or reference) expressions we are
2617 dereferencing contain a side effect, these cannot be equal,
2618 but their addresses can be. */
2619 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2620 && (TREE_SIDE_EFFECTS (arg0
)
2621 || TREE_SIDE_EFFECTS (arg1
)))
2624 switch (TREE_CODE (arg0
))
2627 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2634 case TARGET_MEM_REF
:
2635 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2636 /* Require equal extra operands and then fall through to MEM_REF
2637 handling of the two common operands. */
2638 if (!OP_SAME_WITH_NULL (2)
2639 || !OP_SAME_WITH_NULL (3)
2640 || !OP_SAME_WITH_NULL (4))
2644 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2645 /* Require equal access sizes, and similar pointer types.
2646 We can have incomplete types for array references of
2647 variable-sized arrays from the Fortran frontend
2648 though. Also verify the types are compatible. */
2649 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2650 || (TYPE_SIZE (TREE_TYPE (arg0
))
2651 && TYPE_SIZE (TREE_TYPE (arg1
))
2652 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2653 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2654 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2655 && alias_ptr_types_compatible_p
2656 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2657 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2658 && OP_SAME (0) && OP_SAME (1));
2661 case ARRAY_RANGE_REF
:
2662 /* Operands 2 and 3 may be null.
2663 Compare the array index by value if it is constant first as we
2664 may have different types but same value here. */
2667 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2668 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2669 TREE_OPERAND (arg1
, 1))
2671 && OP_SAME_WITH_NULL (2)
2672 && OP_SAME_WITH_NULL (3));
2675 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2676 may be NULL when we're called to compare MEM_EXPRs. */
2677 if (!OP_SAME_WITH_NULL (0)
2680 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2681 return OP_SAME_WITH_NULL (2);
2686 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2687 return OP_SAME (1) && OP_SAME (2);
2693 case tcc_expression
:
2694 switch (TREE_CODE (arg0
))
2697 case TRUTH_NOT_EXPR
:
2700 case TRUTH_ANDIF_EXPR
:
2701 case TRUTH_ORIF_EXPR
:
2702 return OP_SAME (0) && OP_SAME (1);
2705 case WIDEN_MULT_PLUS_EXPR
:
2706 case WIDEN_MULT_MINUS_EXPR
:
2709 /* The multiplcation operands are commutative. */
2712 case TRUTH_AND_EXPR
:
2714 case TRUTH_XOR_EXPR
:
2715 if (OP_SAME (0) && OP_SAME (1))
2718 /* Otherwise take into account this is a commutative operation. */
2719 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2720 TREE_OPERAND (arg1
, 1), flags
)
2721 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2722 TREE_OPERAND (arg1
, 0), flags
));
2727 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2734 switch (TREE_CODE (arg0
))
2737 /* If the CALL_EXPRs call different functions, then they
2738 clearly can not be equal. */
2739 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2744 unsigned int cef
= call_expr_flags (arg0
);
2745 if (flags
& OEP_PURE_SAME
)
2746 cef
&= ECF_CONST
| ECF_PURE
;
2753 /* Now see if all the arguments are the same. */
2755 const_call_expr_arg_iterator iter0
, iter1
;
2757 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2758 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2760 a0
= next_const_call_expr_arg (&iter0
),
2761 a1
= next_const_call_expr_arg (&iter1
))
2762 if (! operand_equal_p (a0
, a1
, flags
))
2765 /* If we get here and both argument lists are exhausted
2766 then the CALL_EXPRs are equal. */
2767 return ! (a0
|| a1
);
2773 case tcc_declaration
:
2774 /* Consider __builtin_sqrt equal to sqrt. */
2775 return (TREE_CODE (arg0
) == FUNCTION_DECL
2776 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2777 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2778 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2785 #undef OP_SAME_WITH_NULL
2788 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2789 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2791 When in doubt, return 0. */
2794 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2796 int unsignedp1
, unsignedpo
;
2797 tree primarg0
, primarg1
, primother
;
2798 unsigned int correct_width
;
2800 if (operand_equal_p (arg0
, arg1
, 0))
2803 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2804 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2807 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2808 and see if the inner values are the same. This removes any
2809 signedness comparison, which doesn't matter here. */
2810 primarg0
= arg0
, primarg1
= arg1
;
2811 STRIP_NOPS (primarg0
);
2812 STRIP_NOPS (primarg1
);
2813 if (operand_equal_p (primarg0
, primarg1
, 0))
2816 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2817 actual comparison operand, ARG0.
2819 First throw away any conversions to wider types
2820 already present in the operands. */
2822 primarg1
= get_narrower (arg1
, &unsignedp1
);
2823 primother
= get_narrower (other
, &unsignedpo
);
2825 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2826 if (unsignedp1
== unsignedpo
2827 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2828 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2830 tree type
= TREE_TYPE (arg0
);
2832 /* Make sure shorter operand is extended the right way
2833 to match the longer operand. */
2834 primarg1
= fold_convert (signed_or_unsigned_type_for
2835 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2837 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2844 /* See if ARG is an expression that is either a comparison or is performing
2845 arithmetic on comparisons. The comparisons must only be comparing
2846 two different values, which will be stored in *CVAL1 and *CVAL2; if
2847 they are nonzero it means that some operands have already been found.
2848 No variables may be used anywhere else in the expression except in the
2849 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2850 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2852 If this is true, return 1. Otherwise, return zero. */
2855 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2857 enum tree_code code
= TREE_CODE (arg
);
2858 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2860 /* We can handle some of the tcc_expression cases here. */
2861 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2863 else if (tclass
== tcc_expression
2864 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2865 || code
== COMPOUND_EXPR
))
2866 tclass
= tcc_binary
;
2868 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2869 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2871 /* If we've already found a CVAL1 or CVAL2, this expression is
2872 two complex to handle. */
2873 if (*cval1
|| *cval2
)
2883 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2886 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2887 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2888 cval1
, cval2
, save_p
));
2893 case tcc_expression
:
2894 if (code
== COND_EXPR
)
2895 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2896 cval1
, cval2
, save_p
)
2897 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2898 cval1
, cval2
, save_p
)
2899 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2900 cval1
, cval2
, save_p
));
2903 case tcc_comparison
:
2904 /* First see if we can handle the first operand, then the second. For
2905 the second operand, we know *CVAL1 can't be zero. It must be that
2906 one side of the comparison is each of the values; test for the
2907 case where this isn't true by failing if the two operands
2910 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2911 TREE_OPERAND (arg
, 1), 0))
2915 *cval1
= TREE_OPERAND (arg
, 0);
2916 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2918 else if (*cval2
== 0)
2919 *cval2
= TREE_OPERAND (arg
, 0);
2920 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2925 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2927 else if (*cval2
== 0)
2928 *cval2
= TREE_OPERAND (arg
, 1);
2929 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2941 /* ARG is a tree that is known to contain just arithmetic operations and
2942 comparisons. Evaluate the operations in the tree substituting NEW0 for
2943 any occurrence of OLD0 as an operand of a comparison and likewise for
2947 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2948 tree old1
, tree new1
)
2950 tree type
= TREE_TYPE (arg
);
2951 enum tree_code code
= TREE_CODE (arg
);
2952 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2954 /* We can handle some of the tcc_expression cases here. */
2955 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2957 else if (tclass
== tcc_expression
2958 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2959 tclass
= tcc_binary
;
2964 return fold_build1_loc (loc
, code
, type
,
2965 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2966 old0
, new0
, old1
, new1
));
2969 return fold_build2_loc (loc
, code
, type
,
2970 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2971 old0
, new0
, old1
, new1
),
2972 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2973 old0
, new0
, old1
, new1
));
2975 case tcc_expression
:
2979 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2983 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2987 return fold_build3_loc (loc
, code
, type
,
2988 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2989 old0
, new0
, old1
, new1
),
2990 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2991 old0
, new0
, old1
, new1
),
2992 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2993 old0
, new0
, old1
, new1
));
2997 /* Fall through - ??? */
2999 case tcc_comparison
:
3001 tree arg0
= TREE_OPERAND (arg
, 0);
3002 tree arg1
= TREE_OPERAND (arg
, 1);
3004 /* We need to check both for exact equality and tree equality. The
3005 former will be true if the operand has a side-effect. In that
3006 case, we know the operand occurred exactly once. */
3008 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3010 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3013 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3015 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3018 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3026 /* Return a tree for the case when the result of an expression is RESULT
3027 converted to TYPE and OMITTED was previously an operand of the expression
3028 but is now not needed (e.g., we folded OMITTED * 0).
3030 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3031 the conversion of RESULT to TYPE. */
3034 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3036 tree t
= fold_convert_loc (loc
, type
, result
);
3038 /* If the resulting operand is an empty statement, just return the omitted
3039 statement casted to void. */
3040 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3041 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3042 fold_ignored_result (omitted
));
3044 if (TREE_SIDE_EFFECTS (omitted
))
3045 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3046 fold_ignored_result (omitted
), t
);
3048 return non_lvalue_loc (loc
, t
);
3051 /* Return a tree for the case when the result of an expression is RESULT
3052 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3053 of the expression but are now not needed.
3055 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3056 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3057 evaluated before OMITTED2. Otherwise, if neither has side effects,
3058 just do the conversion of RESULT to TYPE. */
3061 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3062 tree omitted1
, tree omitted2
)
3064 tree t
= fold_convert_loc (loc
, type
, result
);
3066 if (TREE_SIDE_EFFECTS (omitted2
))
3067 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3068 if (TREE_SIDE_EFFECTS (omitted1
))
3069 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3071 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3075 /* Return a simplified tree node for the truth-negation of ARG. This
3076 never alters ARG itself. We assume that ARG is an operation that
3077 returns a truth value (0 or 1).
3079 FIXME: one would think we would fold the result, but it causes
3080 problems with the dominator optimizer. */
3083 fold_truth_not_expr (location_t loc
, tree arg
)
3085 tree type
= TREE_TYPE (arg
);
3086 enum tree_code code
= TREE_CODE (arg
);
3087 location_t loc1
, loc2
;
3089 /* If this is a comparison, we can simply invert it, except for
3090 floating-point non-equality comparisons, in which case we just
3091 enclose a TRUTH_NOT_EXPR around what we have. */
3093 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3095 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3096 if (FLOAT_TYPE_P (op_type
)
3097 && flag_trapping_math
3098 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3099 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3102 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3103 if (code
== ERROR_MARK
)
3106 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3107 TREE_OPERAND (arg
, 1));
3113 return constant_boolean_node (integer_zerop (arg
), type
);
3115 case TRUTH_AND_EXPR
:
3116 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3117 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3118 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3119 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3120 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3123 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3124 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3125 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3126 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3127 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3129 case TRUTH_XOR_EXPR
:
3130 /* Here we can invert either operand. We invert the first operand
3131 unless the second operand is a TRUTH_NOT_EXPR in which case our
3132 result is the XOR of the first operand with the inside of the
3133 negation of the second operand. */
3135 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3136 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3137 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3139 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3140 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3141 TREE_OPERAND (arg
, 1));
3143 case TRUTH_ANDIF_EXPR
:
3144 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3145 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3146 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3147 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3148 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3150 case TRUTH_ORIF_EXPR
:
3151 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3152 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3153 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3154 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3155 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3157 case TRUTH_NOT_EXPR
:
3158 return TREE_OPERAND (arg
, 0);
3162 tree arg1
= TREE_OPERAND (arg
, 1);
3163 tree arg2
= TREE_OPERAND (arg
, 2);
3165 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3166 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3168 /* A COND_EXPR may have a throw as one operand, which
3169 then has void type. Just leave void operands
3171 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3172 VOID_TYPE_P (TREE_TYPE (arg1
))
3173 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3174 VOID_TYPE_P (TREE_TYPE (arg2
))
3175 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3179 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3180 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3181 TREE_OPERAND (arg
, 0),
3182 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3184 case NON_LVALUE_EXPR
:
3185 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3186 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3189 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3190 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3192 /* ... fall through ... */
3195 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3196 return build1_loc (loc
, TREE_CODE (arg
), type
,
3197 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3200 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3202 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3205 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3207 case CLEANUP_POINT_EXPR
:
3208 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3209 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3210 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3217 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3218 assume that ARG is an operation that returns a truth value (0 or 1
3219 for scalars, 0 or -1 for vectors). Return the folded expression if
3220 folding is successful. Otherwise, return NULL_TREE. */
3223 fold_invert_truthvalue (location_t loc
, tree arg
)
3225 tree type
= TREE_TYPE (arg
);
3226 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3232 /* Return a simplified tree node for the truth-negation of ARG. This
3233 never alters ARG itself. We assume that ARG is an operation that
3234 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3237 invert_truthvalue_loc (location_t loc
, tree arg
)
3239 if (TREE_CODE (arg
) == ERROR_MARK
)
3242 tree type
= TREE_TYPE (arg
);
3243 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3249 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3250 operands are another bit-wise operation with a common input. If so,
3251 distribute the bit operations to save an operation and possibly two if
3252 constants are involved. For example, convert
3253 (A | B) & (A | C) into A | (B & C)
3254 Further simplification will occur if B and C are constants.
3256 If this optimization cannot be done, 0 will be returned. */
3259 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3260 tree arg0
, tree arg1
)
3265 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3266 || TREE_CODE (arg0
) == code
3267 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3268 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3271 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3273 common
= TREE_OPERAND (arg0
, 0);
3274 left
= TREE_OPERAND (arg0
, 1);
3275 right
= TREE_OPERAND (arg1
, 1);
3277 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3279 common
= TREE_OPERAND (arg0
, 0);
3280 left
= TREE_OPERAND (arg0
, 1);
3281 right
= TREE_OPERAND (arg1
, 0);
3283 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3285 common
= TREE_OPERAND (arg0
, 1);
3286 left
= TREE_OPERAND (arg0
, 0);
3287 right
= TREE_OPERAND (arg1
, 1);
3289 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3291 common
= TREE_OPERAND (arg0
, 1);
3292 left
= TREE_OPERAND (arg0
, 0);
3293 right
= TREE_OPERAND (arg1
, 0);
3298 common
= fold_convert_loc (loc
, type
, common
);
3299 left
= fold_convert_loc (loc
, type
, left
);
3300 right
= fold_convert_loc (loc
, type
, right
);
3301 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3302 fold_build2_loc (loc
, code
, type
, left
, right
));
3305 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3306 with code CODE. This optimization is unsafe. */
3308 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3309 tree arg0
, tree arg1
)
3311 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3312 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3314 /* (A / C) +- (B / C) -> (A +- B) / C. */
3316 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3317 TREE_OPERAND (arg1
, 1), 0))
3318 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3319 fold_build2_loc (loc
, code
, type
,
3320 TREE_OPERAND (arg0
, 0),
3321 TREE_OPERAND (arg1
, 0)),
3322 TREE_OPERAND (arg0
, 1));
3324 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3325 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3326 TREE_OPERAND (arg1
, 0), 0)
3327 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3328 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3330 REAL_VALUE_TYPE r0
, r1
;
3331 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3332 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3334 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3336 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3337 real_arithmetic (&r0
, code
, &r0
, &r1
);
3338 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3339 TREE_OPERAND (arg0
, 0),
3340 build_real (type
, r0
));
3346 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3347 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3350 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3351 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3353 tree result
, bftype
;
3357 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3358 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3359 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3360 && tree_fits_shwi_p (size
)
3361 && tree_to_shwi (size
) == bitsize
)
3362 return fold_convert_loc (loc
, type
, inner
);
3366 if (TYPE_PRECISION (bftype
) != bitsize
3367 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3368 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3370 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3371 size_int (bitsize
), bitsize_int (bitpos
));
3374 result
= fold_convert_loc (loc
, type
, result
);
3379 /* Optimize a bit-field compare.
3381 There are two cases: First is a compare against a constant and the
3382 second is a comparison of two items where the fields are at the same
3383 bit position relative to the start of a chunk (byte, halfword, word)
3384 large enough to contain it. In these cases we can avoid the shift
3385 implicit in bitfield extractions.
3387 For constants, we emit a compare of the shifted constant with the
3388 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3389 compared. For two fields at the same position, we do the ANDs with the
3390 similar mask and compare the result of the ANDs.
3392 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3393 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3394 are the left and right operands of the comparison, respectively.
3396 If the optimization described above can be done, we return the resulting
3397 tree. Otherwise we return zero. */
3400 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3401 tree compare_type
, tree lhs
, tree rhs
)
3403 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3404 tree type
= TREE_TYPE (lhs
);
3406 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3407 machine_mode lmode
, rmode
, nmode
;
3408 int lunsignedp
, runsignedp
;
3409 int lvolatilep
= 0, rvolatilep
= 0;
3410 tree linner
, rinner
= NULL_TREE
;
3414 /* Get all the information about the extractions being done. If the bit size
3415 if the same as the size of the underlying object, we aren't doing an
3416 extraction at all and so can do nothing. We also don't want to
3417 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3418 then will no longer be able to replace it. */
3419 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3420 &lunsignedp
, &lvolatilep
, false);
3421 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3422 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3427 /* If this is not a constant, we can only do something if bit positions,
3428 sizes, and signedness are the same. */
3429 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3430 &runsignedp
, &rvolatilep
, false);
3432 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3433 || lunsignedp
!= runsignedp
|| offset
!= 0
3434 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3438 /* See if we can find a mode to refer to this field. We should be able to,
3439 but fail if we can't. */
3440 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3441 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3442 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3443 TYPE_ALIGN (TREE_TYPE (rinner
))),
3445 if (nmode
== VOIDmode
)
3448 /* Set signed and unsigned types of the precision of this mode for the
3450 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3452 /* Compute the bit position and size for the new reference and our offset
3453 within it. If the new reference is the same size as the original, we
3454 won't optimize anything, so return zero. */
3455 nbitsize
= GET_MODE_BITSIZE (nmode
);
3456 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3458 if (nbitsize
== lbitsize
)
3461 if (BYTES_BIG_ENDIAN
)
3462 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3464 /* Make the mask to be used against the extracted field. */
3465 mask
= build_int_cst_type (unsigned_type
, -1);
3466 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3467 mask
= const_binop (RSHIFT_EXPR
, mask
,
3468 size_int (nbitsize
- lbitsize
- lbitpos
));
3471 /* If not comparing with constant, just rework the comparison
3473 return fold_build2_loc (loc
, code
, compare_type
,
3474 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3475 make_bit_field_ref (loc
, linner
,
3480 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3481 make_bit_field_ref (loc
, rinner
,
3487 /* Otherwise, we are handling the constant case. See if the constant is too
3488 big for the field. Warn and return a tree of for 0 (false) if so. We do
3489 this not only for its own sake, but to avoid having to test for this
3490 error case below. If we didn't, we might generate wrong code.
3492 For unsigned fields, the constant shifted right by the field length should
3493 be all zero. For signed fields, the high-order bits should agree with
3498 if (wi::lrshift (rhs
, lbitsize
) != 0)
3500 warning (0, "comparison is always %d due to width of bit-field",
3502 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3507 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3508 if (tem
!= 0 && tem
!= -1)
3510 warning (0, "comparison is always %d due to width of bit-field",
3512 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3516 /* Single-bit compares should always be against zero. */
3517 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3519 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3520 rhs
= build_int_cst (type
, 0);
3523 /* Make a new bitfield reference, shift the constant over the
3524 appropriate number of bits and mask it with the computed mask
3525 (in case this was a signed field). If we changed it, make a new one. */
3526 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3528 rhs
= const_binop (BIT_AND_EXPR
,
3529 const_binop (LSHIFT_EXPR
,
3530 fold_convert_loc (loc
, unsigned_type
, rhs
),
3531 size_int (lbitpos
)),
3534 lhs
= build2_loc (loc
, code
, compare_type
,
3535 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3539 /* Subroutine for fold_truth_andor_1: decode a field reference.
3541 If EXP is a comparison reference, we return the innermost reference.
3543 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3544 set to the starting bit number.
3546 If the innermost field can be completely contained in a mode-sized
3547 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3549 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3550 otherwise it is not changed.
3552 *PUNSIGNEDP is set to the signedness of the field.
3554 *PMASK is set to the mask used. This is either contained in a
3555 BIT_AND_EXPR or derived from the width of the field.
3557 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3559 Return 0 if this is not a component reference or is one that we can't
3560 do anything with. */
3563 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3564 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3565 int *punsignedp
, int *pvolatilep
,
3566 tree
*pmask
, tree
*pand_mask
)
3568 tree outer_type
= 0;
3570 tree mask
, inner
, offset
;
3572 unsigned int precision
;
3574 /* All the optimizations using this function assume integer fields.
3575 There are problems with FP fields since the type_for_size call
3576 below can fail for, e.g., XFmode. */
3577 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3580 /* We are interested in the bare arrangement of bits, so strip everything
3581 that doesn't affect the machine mode. However, record the type of the
3582 outermost expression if it may matter below. */
3583 if (CONVERT_EXPR_P (exp
)
3584 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3585 outer_type
= TREE_TYPE (exp
);
3588 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3590 and_mask
= TREE_OPERAND (exp
, 1);
3591 exp
= TREE_OPERAND (exp
, 0);
3592 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3593 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3597 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3598 punsignedp
, pvolatilep
, false);
3599 if ((inner
== exp
&& and_mask
== 0)
3600 || *pbitsize
< 0 || offset
!= 0
3601 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3604 /* If the number of bits in the reference is the same as the bitsize of
3605 the outer type, then the outer type gives the signedness. Otherwise
3606 (in case of a small bitfield) the signedness is unchanged. */
3607 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3608 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3610 /* Compute the mask to access the bitfield. */
3611 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3612 precision
= TYPE_PRECISION (unsigned_type
);
3614 mask
= build_int_cst_type (unsigned_type
, -1);
3616 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3617 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3619 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3621 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3622 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3625 *pand_mask
= and_mask
;
3629 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3630 bit positions and MASK is SIGNED. */
3633 all_ones_mask_p (const_tree mask
, unsigned int size
)
3635 tree type
= TREE_TYPE (mask
);
3636 unsigned int precision
= TYPE_PRECISION (type
);
3638 /* If this function returns true when the type of the mask is
3639 UNSIGNED, then there will be errors. In particular see
3640 gcc.c-torture/execute/990326-1.c. There does not appear to be
3641 any documentation paper trail as to why this is so. But the pre
3642 wide-int worked with that restriction and it has been preserved
3644 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3647 return wi::mask (size
, false, precision
) == mask
;
3650 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3651 represents the sign bit of EXP's type. If EXP represents a sign
3652 or zero extension, also test VAL against the unextended type.
3653 The return value is the (sub)expression whose sign bit is VAL,
3654 or NULL_TREE otherwise. */
3657 sign_bit_p (tree exp
, const_tree val
)
3662 /* Tree EXP must have an integral type. */
3663 t
= TREE_TYPE (exp
);
3664 if (! INTEGRAL_TYPE_P (t
))
3667 /* Tree VAL must be an integer constant. */
3668 if (TREE_CODE (val
) != INTEGER_CST
3669 || TREE_OVERFLOW (val
))
3672 width
= TYPE_PRECISION (t
);
3673 if (wi::only_sign_bit_p (val
, width
))
3676 /* Handle extension from a narrower type. */
3677 if (TREE_CODE (exp
) == NOP_EXPR
3678 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3679 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3684 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3685 to be evaluated unconditionally. */
3688 simple_operand_p (const_tree exp
)
3690 /* Strip any conversions that don't change the machine mode. */
3693 return (CONSTANT_CLASS_P (exp
)
3694 || TREE_CODE (exp
) == SSA_NAME
3696 && ! TREE_ADDRESSABLE (exp
)
3697 && ! TREE_THIS_VOLATILE (exp
)
3698 && ! DECL_NONLOCAL (exp
)
3699 /* Don't regard global variables as simple. They may be
3700 allocated in ways unknown to the compiler (shared memory,
3701 #pragma weak, etc). */
3702 && ! TREE_PUBLIC (exp
)
3703 && ! DECL_EXTERNAL (exp
)
3704 /* Weakrefs are not safe to be read, since they can be NULL.
3705 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3706 have DECL_WEAK flag set. */
3707 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3708 /* Loading a static variable is unduly expensive, but global
3709 registers aren't expensive. */
3710 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3713 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3714 to be evaluated unconditionally.
3715 I addition to simple_operand_p, we assume that comparisons, conversions,
3716 and logic-not operations are simple, if their operands are simple, too. */
3719 simple_operand_p_2 (tree exp
)
3721 enum tree_code code
;
3723 if (TREE_SIDE_EFFECTS (exp
)
3724 || tree_could_trap_p (exp
))
3727 while (CONVERT_EXPR_P (exp
))
3728 exp
= TREE_OPERAND (exp
, 0);
3730 code
= TREE_CODE (exp
);
3732 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3733 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3734 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3736 if (code
== TRUTH_NOT_EXPR
)
3737 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3739 return simple_operand_p (exp
);
3743 /* The following functions are subroutines to fold_range_test and allow it to
3744 try to change a logical combination of comparisons into a range test.
3747 X == 2 || X == 3 || X == 4 || X == 5
3751 (unsigned) (X - 2) <= 3
3753 We describe each set of comparisons as being either inside or outside
3754 a range, using a variable named like IN_P, and then describe the
3755 range with a lower and upper bound. If one of the bounds is omitted,
3756 it represents either the highest or lowest value of the type.
3758 In the comments below, we represent a range by two numbers in brackets
3759 preceded by a "+" to designate being inside that range, or a "-" to
3760 designate being outside that range, so the condition can be inverted by
3761 flipping the prefix. An omitted bound is represented by a "-". For
3762 example, "- [-, 10]" means being outside the range starting at the lowest
3763 possible value and ending at 10, in other words, being greater than 10.
3764 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3767 We set up things so that the missing bounds are handled in a consistent
3768 manner so neither a missing bound nor "true" and "false" need to be
3769 handled using a special case. */
3771 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3772 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3773 and UPPER1_P are nonzero if the respective argument is an upper bound
3774 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3775 must be specified for a comparison. ARG1 will be converted to ARG0's
3776 type if both are specified. */
3779 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3780 tree arg1
, int upper1_p
)
3786 /* If neither arg represents infinity, do the normal operation.
3787 Else, if not a comparison, return infinity. Else handle the special
3788 comparison rules. Note that most of the cases below won't occur, but
3789 are handled for consistency. */
3791 if (arg0
!= 0 && arg1
!= 0)
3793 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3794 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3796 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3799 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3802 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3803 for neither. In real maths, we cannot assume open ended ranges are
3804 the same. But, this is computer arithmetic, where numbers are finite.
3805 We can therefore make the transformation of any unbounded range with
3806 the value Z, Z being greater than any representable number. This permits
3807 us to treat unbounded ranges as equal. */
3808 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3809 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3813 result
= sgn0
== sgn1
;
3816 result
= sgn0
!= sgn1
;
3819 result
= sgn0
< sgn1
;
3822 result
= sgn0
<= sgn1
;
3825 result
= sgn0
> sgn1
;
3828 result
= sgn0
>= sgn1
;
3834 return constant_boolean_node (result
, type
);
3837 /* Helper routine for make_range. Perform one step for it, return
3838 new expression if the loop should continue or NULL_TREE if it should
3842 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3843 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3844 bool *strict_overflow_p
)
3846 tree arg0_type
= TREE_TYPE (arg0
);
3847 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3848 int in_p
= *p_in_p
, n_in_p
;
3852 case TRUTH_NOT_EXPR
:
3853 /* We can only do something if the range is testing for zero. */
3854 if (low
== NULL_TREE
|| high
== NULL_TREE
3855 || ! integer_zerop (low
) || ! integer_zerop (high
))
3860 case EQ_EXPR
: case NE_EXPR
:
3861 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3862 /* We can only do something if the range is testing for zero
3863 and if the second operand is an integer constant. Note that
3864 saying something is "in" the range we make is done by
3865 complementing IN_P since it will set in the initial case of
3866 being not equal to zero; "out" is leaving it alone. */
3867 if (low
== NULL_TREE
|| high
== NULL_TREE
3868 || ! integer_zerop (low
) || ! integer_zerop (high
)
3869 || TREE_CODE (arg1
) != INTEGER_CST
)
3874 case NE_EXPR
: /* - [c, c] */
3877 case EQ_EXPR
: /* + [c, c] */
3878 in_p
= ! in_p
, low
= high
= arg1
;
3880 case GT_EXPR
: /* - [-, c] */
3881 low
= 0, high
= arg1
;
3883 case GE_EXPR
: /* + [c, -] */
3884 in_p
= ! in_p
, low
= arg1
, high
= 0;
3886 case LT_EXPR
: /* - [c, -] */
3887 low
= arg1
, high
= 0;
3889 case LE_EXPR
: /* + [-, c] */
3890 in_p
= ! in_p
, low
= 0, high
= arg1
;
3896 /* If this is an unsigned comparison, we also know that EXP is
3897 greater than or equal to zero. We base the range tests we make
3898 on that fact, so we record it here so we can parse existing
3899 range tests. We test arg0_type since often the return type
3900 of, e.g. EQ_EXPR, is boolean. */
3901 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3903 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3905 build_int_cst (arg0_type
, 0),
3909 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3911 /* If the high bound is missing, but we have a nonzero low
3912 bound, reverse the range so it goes from zero to the low bound
3914 if (high
== 0 && low
&& ! integer_zerop (low
))
3917 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3918 build_int_cst (TREE_TYPE (low
), 1), 0);
3919 low
= build_int_cst (arg0_type
, 0);
3929 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3930 low and high are non-NULL, then normalize will DTRT. */
3931 if (!TYPE_UNSIGNED (arg0_type
)
3932 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3934 if (low
== NULL_TREE
)
3935 low
= TYPE_MIN_VALUE (arg0_type
);
3936 if (high
== NULL_TREE
)
3937 high
= TYPE_MAX_VALUE (arg0_type
);
3940 /* (-x) IN [a,b] -> x in [-b, -a] */
3941 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3942 build_int_cst (exp_type
, 0),
3944 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3945 build_int_cst (exp_type
, 0),
3947 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3953 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3954 build_int_cst (exp_type
, 1));
3958 if (TREE_CODE (arg1
) != INTEGER_CST
)
3961 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3962 move a constant to the other side. */
3963 if (!TYPE_UNSIGNED (arg0_type
)
3964 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3967 /* If EXP is signed, any overflow in the computation is undefined,
3968 so we don't worry about it so long as our computations on
3969 the bounds don't overflow. For unsigned, overflow is defined
3970 and this is exactly the right thing. */
3971 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3972 arg0_type
, low
, 0, arg1
, 0);
3973 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3974 arg0_type
, high
, 1, arg1
, 0);
3975 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3976 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3979 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3980 *strict_overflow_p
= true;
3983 /* Check for an unsigned range which has wrapped around the maximum
3984 value thus making n_high < n_low, and normalize it. */
3985 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3987 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3988 build_int_cst (TREE_TYPE (n_high
), 1), 0);
3989 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3990 build_int_cst (TREE_TYPE (n_low
), 1), 0);
3992 /* If the range is of the form +/- [ x+1, x ], we won't
3993 be able to normalize it. But then, it represents the
3994 whole range or the empty set, so make it
3996 if (tree_int_cst_equal (n_low
, low
)
3997 && tree_int_cst_equal (n_high
, high
))
4003 low
= n_low
, high
= n_high
;
4011 case NON_LVALUE_EXPR
:
4012 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4015 if (! INTEGRAL_TYPE_P (arg0_type
)
4016 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4017 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4020 n_low
= low
, n_high
= high
;
4023 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4026 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4028 /* If we're converting arg0 from an unsigned type, to exp,
4029 a signed type, we will be doing the comparison as unsigned.
4030 The tests above have already verified that LOW and HIGH
4033 So we have to ensure that we will handle large unsigned
4034 values the same way that the current signed bounds treat
4037 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4041 /* For fixed-point modes, we need to pass the saturating flag
4042 as the 2nd parameter. */
4043 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4045 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4046 TYPE_SATURATING (arg0_type
));
4049 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4051 /* A range without an upper bound is, naturally, unbounded.
4052 Since convert would have cropped a very large value, use
4053 the max value for the destination type. */
4055 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4056 : TYPE_MAX_VALUE (arg0_type
);
4058 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4059 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4060 fold_convert_loc (loc
, arg0_type
,
4062 build_int_cst (arg0_type
, 1));
4064 /* If the low bound is specified, "and" the range with the
4065 range for which the original unsigned value will be
4069 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4070 1, fold_convert_loc (loc
, arg0_type
,
4075 in_p
= (n_in_p
== in_p
);
4079 /* Otherwise, "or" the range with the range of the input
4080 that will be interpreted as negative. */
4081 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4082 1, fold_convert_loc (loc
, arg0_type
,
4087 in_p
= (in_p
!= n_in_p
);
4101 /* Given EXP, a logical expression, set the range it is testing into
4102 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4103 actually being tested. *PLOW and *PHIGH will be made of the same
4104 type as the returned expression. If EXP is not a comparison, we
4105 will most likely not be returning a useful value and range. Set
4106 *STRICT_OVERFLOW_P to true if the return value is only valid
4107 because signed overflow is undefined; otherwise, do not change
4108 *STRICT_OVERFLOW_P. */
4111 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4112 bool *strict_overflow_p
)
4114 enum tree_code code
;
4115 tree arg0
, arg1
= NULL_TREE
;
4116 tree exp_type
, nexp
;
4119 location_t loc
= EXPR_LOCATION (exp
);
4121 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4122 and see if we can refine the range. Some of the cases below may not
4123 happen, but it doesn't seem worth worrying about this. We "continue"
4124 the outer loop when we've changed something; otherwise we "break"
4125 the switch, which will "break" the while. */
4128 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4132 code
= TREE_CODE (exp
);
4133 exp_type
= TREE_TYPE (exp
);
4136 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4138 if (TREE_OPERAND_LENGTH (exp
) > 0)
4139 arg0
= TREE_OPERAND (exp
, 0);
4140 if (TREE_CODE_CLASS (code
) == tcc_binary
4141 || TREE_CODE_CLASS (code
) == tcc_comparison
4142 || (TREE_CODE_CLASS (code
) == tcc_expression
4143 && TREE_OPERAND_LENGTH (exp
) > 1))
4144 arg1
= TREE_OPERAND (exp
, 1);
4146 if (arg0
== NULL_TREE
)
4149 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4150 &high
, &in_p
, strict_overflow_p
);
4151 if (nexp
== NULL_TREE
)
4156 /* If EXP is a constant, we can evaluate whether this is true or false. */
4157 if (TREE_CODE (exp
) == INTEGER_CST
)
4159 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4161 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4167 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4171 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4172 type, TYPE, return an expression to test if EXP is in (or out of, depending
4173 on IN_P) the range. Return 0 if the test couldn't be created. */
4176 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4177 tree low
, tree high
)
4179 tree etype
= TREE_TYPE (exp
), value
;
4181 #ifdef HAVE_canonicalize_funcptr_for_compare
4182 /* Disable this optimization for function pointer expressions
4183 on targets that require function pointer canonicalization. */
4184 if (HAVE_canonicalize_funcptr_for_compare
4185 && TREE_CODE (etype
) == POINTER_TYPE
4186 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4192 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4194 return invert_truthvalue_loc (loc
, value
);
4199 if (low
== 0 && high
== 0)
4200 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4203 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4204 fold_convert_loc (loc
, etype
, high
));
4207 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4208 fold_convert_loc (loc
, etype
, low
));
4210 if (operand_equal_p (low
, high
, 0))
4211 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4212 fold_convert_loc (loc
, etype
, low
));
4214 if (integer_zerop (low
))
4216 if (! TYPE_UNSIGNED (etype
))
4218 etype
= unsigned_type_for (etype
);
4219 high
= fold_convert_loc (loc
, etype
, high
);
4220 exp
= fold_convert_loc (loc
, etype
, exp
);
4222 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4225 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4226 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4228 int prec
= TYPE_PRECISION (etype
);
4230 if (wi::mask (prec
- 1, false, prec
) == high
)
4232 if (TYPE_UNSIGNED (etype
))
4234 tree signed_etype
= signed_type_for (etype
);
4235 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4237 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4239 etype
= signed_etype
;
4240 exp
= fold_convert_loc (loc
, etype
, exp
);
4242 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4243 build_int_cst (etype
, 0));
4247 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4248 This requires wrap-around arithmetics for the type of the expression.
4249 First make sure that arithmetics in this type is valid, then make sure
4250 that it wraps around. */
4251 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4252 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4253 TYPE_UNSIGNED (etype
));
4255 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4257 tree utype
, minv
, maxv
;
4259 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4260 for the type in question, as we rely on this here. */
4261 utype
= unsigned_type_for (etype
);
4262 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4263 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4264 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4265 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4267 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4274 high
= fold_convert_loc (loc
, etype
, high
);
4275 low
= fold_convert_loc (loc
, etype
, low
);
4276 exp
= fold_convert_loc (loc
, etype
, exp
);
4278 value
= const_binop (MINUS_EXPR
, high
, low
);
4281 if (POINTER_TYPE_P (etype
))
4283 if (value
!= 0 && !TREE_OVERFLOW (value
))
4285 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4286 return build_range_check (loc
, type
,
4287 fold_build_pointer_plus_loc (loc
, exp
, low
),
4288 1, build_int_cst (etype
, 0), value
);
4293 if (value
!= 0 && !TREE_OVERFLOW (value
))
4294 return build_range_check (loc
, type
,
4295 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4296 1, build_int_cst (etype
, 0), value
);
4301 /* Return the predecessor of VAL in its type, handling the infinite case. */
4304 range_predecessor (tree val
)
4306 tree type
= TREE_TYPE (val
);
4308 if (INTEGRAL_TYPE_P (type
)
4309 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4312 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4313 build_int_cst (TREE_TYPE (val
), 1), 0);
4316 /* Return the successor of VAL in its type, handling the infinite case. */
4319 range_successor (tree val
)
4321 tree type
= TREE_TYPE (val
);
4323 if (INTEGRAL_TYPE_P (type
)
4324 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4327 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4328 build_int_cst (TREE_TYPE (val
), 1), 0);
4331 /* Given two ranges, see if we can merge them into one. Return 1 if we
4332 can, 0 if we can't. Set the output range into the specified parameters. */
4335 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4336 tree high0
, int in1_p
, tree low1
, tree high1
)
4344 int lowequal
= ((low0
== 0 && low1
== 0)
4345 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4346 low0
, 0, low1
, 0)));
4347 int highequal
= ((high0
== 0 && high1
== 0)
4348 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4349 high0
, 1, high1
, 1)));
4351 /* Make range 0 be the range that starts first, or ends last if they
4352 start at the same value. Swap them if it isn't. */
4353 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4356 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4357 high1
, 1, high0
, 1))))
4359 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4360 tem
= low0
, low0
= low1
, low1
= tem
;
4361 tem
= high0
, high0
= high1
, high1
= tem
;
4364 /* Now flag two cases, whether the ranges are disjoint or whether the
4365 second range is totally subsumed in the first. Note that the tests
4366 below are simplified by the ones above. */
4367 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4368 high0
, 1, low1
, 0));
4369 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4370 high1
, 1, high0
, 1));
4372 /* We now have four cases, depending on whether we are including or
4373 excluding the two ranges. */
4376 /* If they don't overlap, the result is false. If the second range
4377 is a subset it is the result. Otherwise, the range is from the start
4378 of the second to the end of the first. */
4380 in_p
= 0, low
= high
= 0;
4382 in_p
= 1, low
= low1
, high
= high1
;
4384 in_p
= 1, low
= low1
, high
= high0
;
4387 else if (in0_p
&& ! in1_p
)
4389 /* If they don't overlap, the result is the first range. If they are
4390 equal, the result is false. If the second range is a subset of the
4391 first, and the ranges begin at the same place, we go from just after
4392 the end of the second range to the end of the first. If the second
4393 range is not a subset of the first, or if it is a subset and both
4394 ranges end at the same place, the range starts at the start of the
4395 first range and ends just before the second range.
4396 Otherwise, we can't describe this as a single range. */
4398 in_p
= 1, low
= low0
, high
= high0
;
4399 else if (lowequal
&& highequal
)
4400 in_p
= 0, low
= high
= 0;
4401 else if (subset
&& lowequal
)
4403 low
= range_successor (high1
);
4408 /* We are in the weird situation where high0 > high1 but
4409 high1 has no successor. Punt. */
4413 else if (! subset
|| highequal
)
4416 high
= range_predecessor (low1
);
4420 /* low0 < low1 but low1 has no predecessor. Punt. */
4428 else if (! in0_p
&& in1_p
)
4430 /* If they don't overlap, the result is the second range. If the second
4431 is a subset of the first, the result is false. Otherwise,
4432 the range starts just after the first range and ends at the
4433 end of the second. */
4435 in_p
= 1, low
= low1
, high
= high1
;
4436 else if (subset
|| highequal
)
4437 in_p
= 0, low
= high
= 0;
4440 low
= range_successor (high0
);
4445 /* high1 > high0 but high0 has no successor. Punt. */
4453 /* The case where we are excluding both ranges. Here the complex case
4454 is if they don't overlap. In that case, the only time we have a
4455 range is if they are adjacent. If the second is a subset of the
4456 first, the result is the first. Otherwise, the range to exclude
4457 starts at the beginning of the first range and ends at the end of the
4461 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4462 range_successor (high0
),
4464 in_p
= 0, low
= low0
, high
= high1
;
4467 /* Canonicalize - [min, x] into - [-, x]. */
4468 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4469 switch (TREE_CODE (TREE_TYPE (low0
)))
4472 if (TYPE_PRECISION (TREE_TYPE (low0
))
4473 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4477 if (tree_int_cst_equal (low0
,
4478 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4482 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4483 && integer_zerop (low0
))
4490 /* Canonicalize - [x, max] into - [x, -]. */
4491 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4492 switch (TREE_CODE (TREE_TYPE (high1
)))
4495 if (TYPE_PRECISION (TREE_TYPE (high1
))
4496 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4500 if (tree_int_cst_equal (high1
,
4501 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4505 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4506 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4508 build_int_cst (TREE_TYPE (high1
), 1),
4516 /* The ranges might be also adjacent between the maximum and
4517 minimum values of the given type. For
4518 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4519 return + [x + 1, y - 1]. */
4520 if (low0
== 0 && high1
== 0)
4522 low
= range_successor (high0
);
4523 high
= range_predecessor (low1
);
4524 if (low
== 0 || high
== 0)
4534 in_p
= 0, low
= low0
, high
= high0
;
4536 in_p
= 0, low
= low0
, high
= high1
;
4539 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4544 /* Subroutine of fold, looking inside expressions of the form
4545 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4546 of the COND_EXPR. This function is being used also to optimize
4547 A op B ? C : A, by reversing the comparison first.
4549 Return a folded expression whose code is not a COND_EXPR
4550 anymore, or NULL_TREE if no folding opportunity is found. */
4553 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4554 tree arg0
, tree arg1
, tree arg2
)
4556 enum tree_code comp_code
= TREE_CODE (arg0
);
4557 tree arg00
= TREE_OPERAND (arg0
, 0);
4558 tree arg01
= TREE_OPERAND (arg0
, 1);
4559 tree arg1_type
= TREE_TYPE (arg1
);
4565 /* If we have A op 0 ? A : -A, consider applying the following
4568 A == 0? A : -A same as -A
4569 A != 0? A : -A same as A
4570 A >= 0? A : -A same as abs (A)
4571 A > 0? A : -A same as abs (A)
4572 A <= 0? A : -A same as -abs (A)
4573 A < 0? A : -A same as -abs (A)
4575 None of these transformations work for modes with signed
4576 zeros. If A is +/-0, the first two transformations will
4577 change the sign of the result (from +0 to -0, or vice
4578 versa). The last four will fix the sign of the result,
4579 even though the original expressions could be positive or
4580 negative, depending on the sign of A.
4582 Note that all these transformations are correct if A is
4583 NaN, since the two alternatives (A and -A) are also NaNs. */
4584 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4585 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4586 ? real_zerop (arg01
)
4587 : integer_zerop (arg01
))
4588 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4589 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4590 /* In the case that A is of the form X-Y, '-A' (arg2) may
4591 have already been folded to Y-X, check for that. */
4592 || (TREE_CODE (arg1
) == MINUS_EXPR
4593 && TREE_CODE (arg2
) == MINUS_EXPR
4594 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4595 TREE_OPERAND (arg2
, 1), 0)
4596 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4597 TREE_OPERAND (arg2
, 0), 0))))
4602 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4603 return pedantic_non_lvalue_loc (loc
,
4604 fold_convert_loc (loc
, type
,
4605 negate_expr (tem
)));
4608 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4611 if (flag_trapping_math
)
4616 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4617 arg1
= fold_convert_loc (loc
, signed_type_for
4618 (TREE_TYPE (arg1
)), arg1
);
4619 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4620 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4623 if (flag_trapping_math
)
4627 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4628 arg1
= fold_convert_loc (loc
, signed_type_for
4629 (TREE_TYPE (arg1
)), arg1
);
4630 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4631 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4633 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4637 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4638 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4639 both transformations are correct when A is NaN: A != 0
4640 is then true, and A == 0 is false. */
4642 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4643 && integer_zerop (arg01
) && integer_zerop (arg2
))
4645 if (comp_code
== NE_EXPR
)
4646 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4647 else if (comp_code
== EQ_EXPR
)
4648 return build_zero_cst (type
);
4651 /* Try some transformations of A op B ? A : B.
4653 A == B? A : B same as B
4654 A != B? A : B same as A
4655 A >= B? A : B same as max (A, B)
4656 A > B? A : B same as max (B, A)
4657 A <= B? A : B same as min (A, B)
4658 A < B? A : B same as min (B, A)
4660 As above, these transformations don't work in the presence
4661 of signed zeros. For example, if A and B are zeros of
4662 opposite sign, the first two transformations will change
4663 the sign of the result. In the last four, the original
4664 expressions give different results for (A=+0, B=-0) and
4665 (A=-0, B=+0), but the transformed expressions do not.
4667 The first two transformations are correct if either A or B
4668 is a NaN. In the first transformation, the condition will
4669 be false, and B will indeed be chosen. In the case of the
4670 second transformation, the condition A != B will be true,
4671 and A will be chosen.
4673 The conversions to max() and min() are not correct if B is
4674 a number and A is not. The conditions in the original
4675 expressions will be false, so all four give B. The min()
4676 and max() versions would give a NaN instead. */
4677 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4678 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4679 /* Avoid these transformations if the COND_EXPR may be used
4680 as an lvalue in the C++ front-end. PR c++/19199. */
4682 || VECTOR_TYPE_P (type
)
4683 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4684 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4685 || ! maybe_lvalue_p (arg1
)
4686 || ! maybe_lvalue_p (arg2
)))
4688 tree comp_op0
= arg00
;
4689 tree comp_op1
= arg01
;
4690 tree comp_type
= TREE_TYPE (comp_op0
);
4692 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4693 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4703 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4705 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4710 /* In C++ a ?: expression can be an lvalue, so put the
4711 operand which will be used if they are equal first
4712 so that we can convert this back to the
4713 corresponding COND_EXPR. */
4714 if (!HONOR_NANS (element_mode (arg1
)))
4716 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4717 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4718 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4719 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4720 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4721 comp_op1
, comp_op0
);
4722 return pedantic_non_lvalue_loc (loc
,
4723 fold_convert_loc (loc
, type
, tem
));
4730 if (!HONOR_NANS (element_mode (arg1
)))
4732 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4733 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4734 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4735 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4736 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4737 comp_op1
, comp_op0
);
4738 return pedantic_non_lvalue_loc (loc
,
4739 fold_convert_loc (loc
, type
, tem
));
4743 if (!HONOR_NANS (element_mode (arg1
)))
4744 return pedantic_non_lvalue_loc (loc
,
4745 fold_convert_loc (loc
, type
, arg2
));
4748 if (!HONOR_NANS (element_mode (arg1
)))
4749 return pedantic_non_lvalue_loc (loc
,
4750 fold_convert_loc (loc
, type
, arg1
));
4753 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4758 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4759 we might still be able to simplify this. For example,
4760 if C1 is one less or one more than C2, this might have started
4761 out as a MIN or MAX and been transformed by this function.
4762 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4764 if (INTEGRAL_TYPE_P (type
)
4765 && TREE_CODE (arg01
) == INTEGER_CST
4766 && TREE_CODE (arg2
) == INTEGER_CST
)
4770 if (TREE_CODE (arg1
) == INTEGER_CST
)
4772 /* We can replace A with C1 in this case. */
4773 arg1
= fold_convert_loc (loc
, type
, arg01
);
4774 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4777 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4778 MIN_EXPR, to preserve the signedness of the comparison. */
4779 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4781 && operand_equal_p (arg01
,
4782 const_binop (PLUS_EXPR
, arg2
,
4783 build_int_cst (type
, 1)),
4786 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4787 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4789 return pedantic_non_lvalue_loc (loc
,
4790 fold_convert_loc (loc
, type
, tem
));
4795 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4797 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4799 && operand_equal_p (arg01
,
4800 const_binop (MINUS_EXPR
, arg2
,
4801 build_int_cst (type
, 1)),
4804 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4805 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4807 return pedantic_non_lvalue_loc (loc
,
4808 fold_convert_loc (loc
, type
, tem
));
4813 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4814 MAX_EXPR, to preserve the signedness of the comparison. */
4815 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4817 && operand_equal_p (arg01
,
4818 const_binop (MINUS_EXPR
, arg2
,
4819 build_int_cst (type
, 1)),
4822 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4823 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4825 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4830 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4831 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4833 && operand_equal_p (arg01
,
4834 const_binop (PLUS_EXPR
, arg2
,
4835 build_int_cst (type
, 1)),
4838 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4839 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4841 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4855 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4856 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4857 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4861 /* EXP is some logical combination of boolean tests. See if we can
4862 merge it into some range test. Return the new tree if so. */
4865 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4868 int or_op
= (code
== TRUTH_ORIF_EXPR
4869 || code
== TRUTH_OR_EXPR
);
4870 int in0_p
, in1_p
, in_p
;
4871 tree low0
, low1
, low
, high0
, high1
, high
;
4872 bool strict_overflow_p
= false;
4874 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4875 "when simplifying range test");
4877 if (!INTEGRAL_TYPE_P (type
))
4880 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4881 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4883 /* If this is an OR operation, invert both sides; we will invert
4884 again at the end. */
4886 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4888 /* If both expressions are the same, if we can merge the ranges, and we
4889 can build the range test, return it or it inverted. If one of the
4890 ranges is always true or always false, consider it to be the same
4891 expression as the other. */
4892 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4893 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4895 && 0 != (tem
= (build_range_check (loc
, type
,
4897 : rhs
!= 0 ? rhs
: integer_zero_node
,
4900 if (strict_overflow_p
)
4901 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4902 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4905 /* On machines where the branch cost is expensive, if this is a
4906 short-circuited branch and the underlying object on both sides
4907 is the same, make a non-short-circuit operation. */
4908 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4909 && lhs
!= 0 && rhs
!= 0
4910 && (code
== TRUTH_ANDIF_EXPR
4911 || code
== TRUTH_ORIF_EXPR
)
4912 && operand_equal_p (lhs
, rhs
, 0))
4914 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4915 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4916 which cases we can't do this. */
4917 if (simple_operand_p (lhs
))
4918 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4919 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4922 else if (!lang_hooks
.decls
.global_bindings_p ()
4923 && !CONTAINS_PLACEHOLDER_P (lhs
))
4925 tree common
= save_expr (lhs
);
4927 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4928 or_op
? ! in0_p
: in0_p
,
4930 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4931 or_op
? ! in1_p
: in1_p
,
4934 if (strict_overflow_p
)
4935 fold_overflow_warning (warnmsg
,
4936 WARN_STRICT_OVERFLOW_COMPARISON
);
4937 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4938 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4947 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4948 bit value. Arrange things so the extra bits will be set to zero if and
4949 only if C is signed-extended to its full width. If MASK is nonzero,
4950 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4953 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4955 tree type
= TREE_TYPE (c
);
4956 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4959 if (p
== modesize
|| unsignedp
)
4962 /* We work by getting just the sign bit into the low-order bit, then
4963 into the high-order bit, then sign-extend. We then XOR that value
4965 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
4967 /* We must use a signed type in order to get an arithmetic right shift.
4968 However, we must also avoid introducing accidental overflows, so that
4969 a subsequent call to integer_zerop will work. Hence we must
4970 do the type conversion here. At this point, the constant is either
4971 zero or one, and the conversion to a signed type can never overflow.
4972 We could get an overflow if this conversion is done anywhere else. */
4973 if (TYPE_UNSIGNED (type
))
4974 temp
= fold_convert (signed_type_for (type
), temp
);
4976 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4977 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4979 temp
= const_binop (BIT_AND_EXPR
, temp
,
4980 fold_convert (TREE_TYPE (c
), mask
));
4981 /* If necessary, convert the type back to match the type of C. */
4982 if (TYPE_UNSIGNED (type
))
4983 temp
= fold_convert (type
, temp
);
4985 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4988 /* For an expression that has the form
4992 we can drop one of the inner expressions and simplify to
4996 LOC is the location of the resulting expression. OP is the inner
4997 logical operation; the left-hand side in the examples above, while CMPOP
4998 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4999 removing a condition that guards another, as in
5000 (A != NULL && A->...) || A == NULL
5001 which we must not transform. If RHS_ONLY is true, only eliminate the
5002 right-most operand of the inner logical operation. */
5005 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5008 tree type
= TREE_TYPE (cmpop
);
5009 enum tree_code code
= TREE_CODE (cmpop
);
5010 enum tree_code truthop_code
= TREE_CODE (op
);
5011 tree lhs
= TREE_OPERAND (op
, 0);
5012 tree rhs
= TREE_OPERAND (op
, 1);
5013 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5014 enum tree_code rhs_code
= TREE_CODE (rhs
);
5015 enum tree_code lhs_code
= TREE_CODE (lhs
);
5016 enum tree_code inv_code
;
5018 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5021 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5024 if (rhs_code
== truthop_code
)
5026 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5027 if (newrhs
!= NULL_TREE
)
5030 rhs_code
= TREE_CODE (rhs
);
5033 if (lhs_code
== truthop_code
&& !rhs_only
)
5035 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5036 if (newlhs
!= NULL_TREE
)
5039 lhs_code
= TREE_CODE (lhs
);
5043 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5044 if (inv_code
== rhs_code
5045 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5046 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5048 if (!rhs_only
&& inv_code
== lhs_code
5049 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5050 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5052 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5053 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5058 /* Find ways of folding logical expressions of LHS and RHS:
5059 Try to merge two comparisons to the same innermost item.
5060 Look for range tests like "ch >= '0' && ch <= '9'".
5061 Look for combinations of simple terms on machines with expensive branches
5062 and evaluate the RHS unconditionally.
5064 For example, if we have p->a == 2 && p->b == 4 and we can make an
5065 object large enough to span both A and B, we can do this with a comparison
5066 against the object ANDed with the a mask.
5068 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5069 operations to do this with one comparison.
5071 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5072 function and the one above.
5074 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5075 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5077 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5080 We return the simplified tree or 0 if no optimization is possible. */
5083 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5086 /* If this is the "or" of two comparisons, we can do something if
5087 the comparisons are NE_EXPR. If this is the "and", we can do something
5088 if the comparisons are EQ_EXPR. I.e.,
5089 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5091 WANTED_CODE is this operation code. For single bit fields, we can
5092 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5093 comparison for one-bit fields. */
5095 enum tree_code wanted_code
;
5096 enum tree_code lcode
, rcode
;
5097 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5098 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5099 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5100 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5101 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5102 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5103 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5104 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5105 machine_mode lnmode
, rnmode
;
5106 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5107 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5108 tree l_const
, r_const
;
5109 tree lntype
, rntype
, result
;
5110 HOST_WIDE_INT first_bit
, end_bit
;
5113 /* Start by getting the comparison codes. Fail if anything is volatile.
5114 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5115 it were surrounded with a NE_EXPR. */
5117 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5120 lcode
= TREE_CODE (lhs
);
5121 rcode
= TREE_CODE (rhs
);
5123 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5125 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5126 build_int_cst (TREE_TYPE (lhs
), 0));
5130 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5132 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5133 build_int_cst (TREE_TYPE (rhs
), 0));
5137 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5138 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5141 ll_arg
= TREE_OPERAND (lhs
, 0);
5142 lr_arg
= TREE_OPERAND (lhs
, 1);
5143 rl_arg
= TREE_OPERAND (rhs
, 0);
5144 rr_arg
= TREE_OPERAND (rhs
, 1);
5146 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5147 if (simple_operand_p (ll_arg
)
5148 && simple_operand_p (lr_arg
))
5150 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5151 && operand_equal_p (lr_arg
, rr_arg
, 0))
5153 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5154 truth_type
, ll_arg
, lr_arg
);
5158 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5159 && operand_equal_p (lr_arg
, rl_arg
, 0))
5161 result
= combine_comparisons (loc
, code
, lcode
,
5162 swap_tree_comparison (rcode
),
5163 truth_type
, ll_arg
, lr_arg
);
5169 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5170 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5172 /* If the RHS can be evaluated unconditionally and its operands are
5173 simple, it wins to evaluate the RHS unconditionally on machines
5174 with expensive branches. In this case, this isn't a comparison
5175 that can be merged. */
5177 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5179 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5180 && simple_operand_p (rl_arg
)
5181 && simple_operand_p (rr_arg
))
5183 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5184 if (code
== TRUTH_OR_EXPR
5185 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5186 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5187 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5188 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5189 return build2_loc (loc
, NE_EXPR
, truth_type
,
5190 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5192 build_int_cst (TREE_TYPE (ll_arg
), 0));
5194 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5195 if (code
== TRUTH_AND_EXPR
5196 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5197 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5198 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5199 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5200 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5201 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5203 build_int_cst (TREE_TYPE (ll_arg
), 0));
5206 /* See if the comparisons can be merged. Then get all the parameters for
5209 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5210 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5214 ll_inner
= decode_field_reference (loc
, ll_arg
,
5215 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5216 &ll_unsignedp
, &volatilep
, &ll_mask
,
5218 lr_inner
= decode_field_reference (loc
, lr_arg
,
5219 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5220 &lr_unsignedp
, &volatilep
, &lr_mask
,
5222 rl_inner
= decode_field_reference (loc
, rl_arg
,
5223 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5224 &rl_unsignedp
, &volatilep
, &rl_mask
,
5226 rr_inner
= decode_field_reference (loc
, rr_arg
,
5227 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5228 &rr_unsignedp
, &volatilep
, &rr_mask
,
5231 /* It must be true that the inner operation on the lhs of each
5232 comparison must be the same if we are to be able to do anything.
5233 Then see if we have constants. If not, the same must be true for
5235 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5236 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5239 if (TREE_CODE (lr_arg
) == INTEGER_CST
5240 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5241 l_const
= lr_arg
, r_const
= rr_arg
;
5242 else if (lr_inner
== 0 || rr_inner
== 0
5243 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5246 l_const
= r_const
= 0;
5248 /* If either comparison code is not correct for our logical operation,
5249 fail. However, we can convert a one-bit comparison against zero into
5250 the opposite comparison against that bit being set in the field. */
5252 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5253 if (lcode
!= wanted_code
)
5255 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5257 /* Make the left operand unsigned, since we are only interested
5258 in the value of one bit. Otherwise we are doing the wrong
5267 /* This is analogous to the code for l_const above. */
5268 if (rcode
!= wanted_code
)
5270 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5279 /* See if we can find a mode that contains both fields being compared on
5280 the left. If we can't, fail. Otherwise, update all constants and masks
5281 to be relative to a field of that size. */
5282 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5283 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5284 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5285 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5287 if (lnmode
== VOIDmode
)
5290 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5291 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5292 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5293 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5295 if (BYTES_BIG_ENDIAN
)
5297 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5298 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5301 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5302 size_int (xll_bitpos
));
5303 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5304 size_int (xrl_bitpos
));
5308 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5309 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5310 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5311 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5312 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5315 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5317 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5322 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5323 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5324 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5325 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5326 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5329 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5331 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5335 /* If the right sides are not constant, do the same for it. Also,
5336 disallow this optimization if a size or signedness mismatch occurs
5337 between the left and right sides. */
5340 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5341 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5342 /* Make sure the two fields on the right
5343 correspond to the left without being swapped. */
5344 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5347 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5348 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5349 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5350 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5352 if (rnmode
== VOIDmode
)
5355 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5356 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5357 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5358 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5360 if (BYTES_BIG_ENDIAN
)
5362 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5363 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5366 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5368 size_int (xlr_bitpos
));
5369 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5371 size_int (xrr_bitpos
));
5373 /* Make a mask that corresponds to both fields being compared.
5374 Do this for both items being compared. If the operands are the
5375 same size and the bits being compared are in the same position
5376 then we can do this by masking both and comparing the masked
5378 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5379 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5380 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5382 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5383 ll_unsignedp
|| rl_unsignedp
);
5384 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5385 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5387 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5388 lr_unsignedp
|| rr_unsignedp
);
5389 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5390 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5392 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5395 /* There is still another way we can do something: If both pairs of
5396 fields being compared are adjacent, we may be able to make a wider
5397 field containing them both.
5399 Note that we still must mask the lhs/rhs expressions. Furthermore,
5400 the mask must be shifted to account for the shift done by
5401 make_bit_field_ref. */
5402 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5403 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5404 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5405 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5409 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5410 ll_bitsize
+ rl_bitsize
,
5411 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5412 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5413 lr_bitsize
+ rr_bitsize
,
5414 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5416 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5417 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5418 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5419 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5421 /* Convert to the smaller type before masking out unwanted bits. */
5423 if (lntype
!= rntype
)
5425 if (lnbitsize
> rnbitsize
)
5427 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5428 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5431 else if (lnbitsize
< rnbitsize
)
5433 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5434 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5439 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5440 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5442 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5443 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5445 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5451 /* Handle the case of comparisons with constants. If there is something in
5452 common between the masks, those bits of the constants must be the same.
5453 If not, the condition is always false. Test for this to avoid generating
5454 incorrect code below. */
5455 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5456 if (! integer_zerop (result
)
5457 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5458 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5460 if (wanted_code
== NE_EXPR
)
5462 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5463 return constant_boolean_node (true, truth_type
);
5467 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5468 return constant_boolean_node (false, truth_type
);
5472 /* Construct the expression we will return. First get the component
5473 reference we will make. Unless the mask is all ones the width of
5474 that field, perform the mask operation. Then compare with the
5476 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5477 ll_unsignedp
|| rl_unsignedp
);
5479 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5480 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5481 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5483 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5484 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5487 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5491 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5495 enum tree_code op_code
;
5498 int consts_equal
, consts_lt
;
5501 STRIP_SIGN_NOPS (arg0
);
5503 op_code
= TREE_CODE (arg0
);
5504 minmax_const
= TREE_OPERAND (arg0
, 1);
5505 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5506 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5507 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5508 inner
= TREE_OPERAND (arg0
, 0);
5510 /* If something does not permit us to optimize, return the original tree. */
5511 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5512 || TREE_CODE (comp_const
) != INTEGER_CST
5513 || TREE_OVERFLOW (comp_const
)
5514 || TREE_CODE (minmax_const
) != INTEGER_CST
5515 || TREE_OVERFLOW (minmax_const
))
5518 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5519 and GT_EXPR, doing the rest with recursive calls using logical
5523 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5526 = optimize_minmax_comparison (loc
,
5527 invert_tree_comparison (code
, false),
5530 return invert_truthvalue_loc (loc
, tem
);
5536 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5537 optimize_minmax_comparison
5538 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5539 optimize_minmax_comparison
5540 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5543 if (op_code
== MAX_EXPR
&& consts_equal
)
5544 /* MAX (X, 0) == 0 -> X <= 0 */
5545 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5547 else if (op_code
== MAX_EXPR
&& consts_lt
)
5548 /* MAX (X, 0) == 5 -> X == 5 */
5549 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5551 else if (op_code
== MAX_EXPR
)
5552 /* MAX (X, 0) == -1 -> false */
5553 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5555 else if (consts_equal
)
5556 /* MIN (X, 0) == 0 -> X >= 0 */
5557 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5560 /* MIN (X, 0) == 5 -> false */
5561 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5564 /* MIN (X, 0) == -1 -> X == -1 */
5565 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5568 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5569 /* MAX (X, 0) > 0 -> X > 0
5570 MAX (X, 0) > 5 -> X > 5 */
5571 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5573 else if (op_code
== MAX_EXPR
)
5574 /* MAX (X, 0) > -1 -> true */
5575 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5577 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5578 /* MIN (X, 0) > 0 -> false
5579 MIN (X, 0) > 5 -> false */
5580 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5583 /* MIN (X, 0) > -1 -> X > -1 */
5584 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5591 /* T is an integer expression that is being multiplied, divided, or taken a
5592 modulus (CODE says which and what kind of divide or modulus) by a
5593 constant C. See if we can eliminate that operation by folding it with
5594 other operations already in T. WIDE_TYPE, if non-null, is a type that
5595 should be used for the computation if wider than our type.
5597 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5598 (X * 2) + (Y * 4). We must, however, be assured that either the original
5599 expression would not overflow or that overflow is undefined for the type
5600 in the language in question.
5602 If we return a non-null expression, it is an equivalent form of the
5603 original computation, but need not be in the original type.
5605 We set *STRICT_OVERFLOW_P to true if the return values depends on
5606 signed overflow being undefined. Otherwise we do not change
5607 *STRICT_OVERFLOW_P. */
5610 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5611 bool *strict_overflow_p
)
5613 /* To avoid exponential search depth, refuse to allow recursion past
5614 three levels. Beyond that (1) it's highly unlikely that we'll find
5615 something interesting and (2) we've probably processed it before
5616 when we built the inner expression. */
5625 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5632 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5633 bool *strict_overflow_p
)
5635 tree type
= TREE_TYPE (t
);
5636 enum tree_code tcode
= TREE_CODE (t
);
5637 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5638 > GET_MODE_SIZE (TYPE_MODE (type
)))
5639 ? wide_type
: type
);
5641 int same_p
= tcode
== code
;
5642 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5643 bool sub_strict_overflow_p
;
5645 /* Don't deal with constants of zero here; they confuse the code below. */
5646 if (integer_zerop (c
))
5649 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5650 op0
= TREE_OPERAND (t
, 0);
5652 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5653 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5655 /* Note that we need not handle conditional operations here since fold
5656 already handles those cases. So just do arithmetic here. */
5660 /* For a constant, we can always simplify if we are a multiply
5661 or (for divide and modulus) if it is a multiple of our constant. */
5662 if (code
== MULT_EXPR
5663 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5664 return const_binop (code
, fold_convert (ctype
, t
),
5665 fold_convert (ctype
, c
));
5668 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5669 /* If op0 is an expression ... */
5670 if ((COMPARISON_CLASS_P (op0
)
5671 || UNARY_CLASS_P (op0
)
5672 || BINARY_CLASS_P (op0
)
5673 || VL_EXP_CLASS_P (op0
)
5674 || EXPRESSION_CLASS_P (op0
))
5675 /* ... and has wrapping overflow, and its type is smaller
5676 than ctype, then we cannot pass through as widening. */
5677 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5678 && (TYPE_PRECISION (ctype
)
5679 > TYPE_PRECISION (TREE_TYPE (op0
))))
5680 /* ... or this is a truncation (t is narrower than op0),
5681 then we cannot pass through this narrowing. */
5682 || (TYPE_PRECISION (type
)
5683 < TYPE_PRECISION (TREE_TYPE (op0
)))
5684 /* ... or signedness changes for division or modulus,
5685 then we cannot pass through this conversion. */
5686 || (code
!= MULT_EXPR
5687 && (TYPE_UNSIGNED (ctype
)
5688 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5689 /* ... or has undefined overflow while the converted to
5690 type has not, we cannot do the operation in the inner type
5691 as that would introduce undefined overflow. */
5692 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5693 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5696 /* Pass the constant down and see if we can make a simplification. If
5697 we can, replace this expression with the inner simplification for
5698 possible later conversion to our or some other type. */
5699 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5700 && TREE_CODE (t2
) == INTEGER_CST
5701 && !TREE_OVERFLOW (t2
)
5702 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5704 ? ctype
: NULL_TREE
,
5705 strict_overflow_p
))))
5710 /* If widening the type changes it from signed to unsigned, then we
5711 must avoid building ABS_EXPR itself as unsigned. */
5712 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5714 tree cstype
= (*signed_type_for
) (ctype
);
5715 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5718 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5719 return fold_convert (ctype
, t1
);
5723 /* If the constant is negative, we cannot simplify this. */
5724 if (tree_int_cst_sgn (c
) == -1)
5728 /* For division and modulus, type can't be unsigned, as e.g.
5729 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5730 For signed types, even with wrapping overflow, this is fine. */
5731 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5733 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5735 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5738 case MIN_EXPR
: case MAX_EXPR
:
5739 /* If widening the type changes the signedness, then we can't perform
5740 this optimization as that changes the result. */
5741 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5744 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5745 sub_strict_overflow_p
= false;
5746 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5747 &sub_strict_overflow_p
)) != 0
5748 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5749 &sub_strict_overflow_p
)) != 0)
5751 if (tree_int_cst_sgn (c
) < 0)
5752 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5753 if (sub_strict_overflow_p
)
5754 *strict_overflow_p
= true;
5755 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5756 fold_convert (ctype
, t2
));
5760 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5761 /* If the second operand is constant, this is a multiplication
5762 or floor division, by a power of two, so we can treat it that
5763 way unless the multiplier or divisor overflows. Signed
5764 left-shift overflow is implementation-defined rather than
5765 undefined in C90, so do not convert signed left shift into
5767 if (TREE_CODE (op1
) == INTEGER_CST
5768 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5769 /* const_binop may not detect overflow correctly,
5770 so check for it explicitly here. */
5771 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5772 && 0 != (t1
= fold_convert (ctype
,
5773 const_binop (LSHIFT_EXPR
,
5776 && !TREE_OVERFLOW (t1
))
5777 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5778 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5780 fold_convert (ctype
, op0
),
5782 c
, code
, wide_type
, strict_overflow_p
);
5785 case PLUS_EXPR
: case MINUS_EXPR
:
5786 /* See if we can eliminate the operation on both sides. If we can, we
5787 can return a new PLUS or MINUS. If we can't, the only remaining
5788 cases where we can do anything are if the second operand is a
5790 sub_strict_overflow_p
= false;
5791 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5792 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5793 if (t1
!= 0 && t2
!= 0
5794 && (code
== MULT_EXPR
5795 /* If not multiplication, we can only do this if both operands
5796 are divisible by c. */
5797 || (multiple_of_p (ctype
, op0
, c
)
5798 && multiple_of_p (ctype
, op1
, c
))))
5800 if (sub_strict_overflow_p
)
5801 *strict_overflow_p
= true;
5802 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5803 fold_convert (ctype
, t2
));
5806 /* If this was a subtraction, negate OP1 and set it to be an addition.
5807 This simplifies the logic below. */
5808 if (tcode
== MINUS_EXPR
)
5810 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5811 /* If OP1 was not easily negatable, the constant may be OP0. */
5812 if (TREE_CODE (op0
) == INTEGER_CST
)
5823 if (TREE_CODE (op1
) != INTEGER_CST
)
5826 /* If either OP1 or C are negative, this optimization is not safe for
5827 some of the division and remainder types while for others we need
5828 to change the code. */
5829 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5831 if (code
== CEIL_DIV_EXPR
)
5832 code
= FLOOR_DIV_EXPR
;
5833 else if (code
== FLOOR_DIV_EXPR
)
5834 code
= CEIL_DIV_EXPR
;
5835 else if (code
!= MULT_EXPR
5836 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5840 /* If it's a multiply or a division/modulus operation of a multiple
5841 of our constant, do the operation and verify it doesn't overflow. */
5842 if (code
== MULT_EXPR
5843 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5845 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5846 fold_convert (ctype
, c
));
5847 /* We allow the constant to overflow with wrapping semantics. */
5849 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5855 /* If we have an unsigned type, we cannot widen the operation since it
5856 will change the result if the original computation overflowed. */
5857 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5860 /* If we were able to eliminate our operation from the first side,
5861 apply our operation to the second side and reform the PLUS. */
5862 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5863 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5865 /* The last case is if we are a multiply. In that case, we can
5866 apply the distributive law to commute the multiply and addition
5867 if the multiplication of the constants doesn't overflow
5868 and overflow is defined. With undefined overflow
5869 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5870 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5871 return fold_build2 (tcode
, ctype
,
5872 fold_build2 (code
, ctype
,
5873 fold_convert (ctype
, op0
),
5874 fold_convert (ctype
, c
)),
5880 /* We have a special case here if we are doing something like
5881 (C * 8) % 4 since we know that's zero. */
5882 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5883 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5884 /* If the multiplication can overflow we cannot optimize this. */
5885 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5886 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5887 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5889 *strict_overflow_p
= true;
5890 return omit_one_operand (type
, integer_zero_node
, op0
);
5893 /* ... fall through ... */
5895 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5896 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5897 /* If we can extract our operation from the LHS, do so and return a
5898 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5899 do something only if the second operand is a constant. */
5901 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5902 strict_overflow_p
)) != 0)
5903 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5904 fold_convert (ctype
, op1
));
5905 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5906 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5907 strict_overflow_p
)) != 0)
5908 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5909 fold_convert (ctype
, t1
));
5910 else if (TREE_CODE (op1
) != INTEGER_CST
)
5913 /* If these are the same operation types, we can associate them
5914 assuming no overflow. */
5917 bool overflow_p
= false;
5918 bool overflow_mul_p
;
5919 signop sign
= TYPE_SIGN (ctype
);
5920 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5921 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5923 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5926 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5927 wide_int_to_tree (ctype
, mul
));
5930 /* If these operations "cancel" each other, we have the main
5931 optimizations of this pass, which occur when either constant is a
5932 multiple of the other, in which case we replace this with either an
5933 operation or CODE or TCODE.
5935 If we have an unsigned type, we cannot do this since it will change
5936 the result if the original computation overflowed. */
5937 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5938 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5939 || (tcode
== MULT_EXPR
5940 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5941 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5942 && code
!= MULT_EXPR
)))
5944 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5946 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5947 *strict_overflow_p
= true;
5948 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5949 fold_convert (ctype
,
5950 const_binop (TRUNC_DIV_EXPR
,
5953 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
5955 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5956 *strict_overflow_p
= true;
5957 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5958 fold_convert (ctype
,
5959 const_binop (TRUNC_DIV_EXPR
,
5972 /* Return a node which has the indicated constant VALUE (either 0 or
5973 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5974 and is of the indicated TYPE. */
5977 constant_boolean_node (bool value
, tree type
)
5979 if (type
== integer_type_node
)
5980 return value
? integer_one_node
: integer_zero_node
;
5981 else if (type
== boolean_type_node
)
5982 return value
? boolean_true_node
: boolean_false_node
;
5983 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5984 return build_vector_from_val (type
,
5985 build_int_cst (TREE_TYPE (type
),
5988 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5992 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5993 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5994 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5995 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5996 COND is the first argument to CODE; otherwise (as in the example
5997 given here), it is the second argument. TYPE is the type of the
5998 original expression. Return NULL_TREE if no simplification is
6002 fold_binary_op_with_conditional_arg (location_t loc
,
6003 enum tree_code code
,
6004 tree type
, tree op0
, tree op1
,
6005 tree cond
, tree arg
, int cond_first_p
)
6007 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6008 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6009 tree test
, true_value
, false_value
;
6010 tree lhs
= NULL_TREE
;
6011 tree rhs
= NULL_TREE
;
6012 enum tree_code cond_code
= COND_EXPR
;
6014 if (TREE_CODE (cond
) == COND_EXPR
6015 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6017 test
= TREE_OPERAND (cond
, 0);
6018 true_value
= TREE_OPERAND (cond
, 1);
6019 false_value
= TREE_OPERAND (cond
, 2);
6020 /* If this operand throws an expression, then it does not make
6021 sense to try to perform a logical or arithmetic operation
6023 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6025 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6030 tree testtype
= TREE_TYPE (cond
);
6032 true_value
= constant_boolean_node (true, testtype
);
6033 false_value
= constant_boolean_node (false, testtype
);
6036 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6037 cond_code
= VEC_COND_EXPR
;
6039 /* This transformation is only worthwhile if we don't have to wrap ARG
6040 in a SAVE_EXPR and the operation can be simplified without recursing
6041 on at least one of the branches once its pushed inside the COND_EXPR. */
6042 if (!TREE_CONSTANT (arg
)
6043 && (TREE_SIDE_EFFECTS (arg
)
6044 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6045 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6048 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6051 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6053 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6055 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6059 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6061 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6063 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6066 /* Check that we have simplified at least one of the branches. */
6067 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6070 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6074 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6076 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6077 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6078 ADDEND is the same as X.
6080 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6081 and finite. The problematic cases are when X is zero, and its mode
6082 has signed zeros. In the case of rounding towards -infinity,
6083 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6084 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6087 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6089 if (!real_zerop (addend
))
6092 /* Don't allow the fold with -fsignaling-nans. */
6093 if (HONOR_SNANS (element_mode (type
)))
6096 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6097 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6100 /* In a vector or complex, we would need to check the sign of all zeros. */
6101 if (TREE_CODE (addend
) != REAL_CST
)
6104 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6105 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6108 /* The mode has signed zeros, and we have to honor their sign.
6109 In this situation, there is only one case we can return true for.
6110 X - 0 is the same as X unless rounding towards -infinity is
6112 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6115 /* Subroutine of fold() that checks comparisons of built-in math
6116 functions against real constants.
6118 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6119 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6120 is the type of the result and ARG0 and ARG1 are the operands of the
6121 comparison. ARG1 must be a TREE_REAL_CST.
6123 The function returns the constant folded tree if a simplification
6124 can be made, and NULL_TREE otherwise. */
6127 fold_mathfn_compare (location_t loc
,
6128 enum built_in_function fcode
, enum tree_code code
,
6129 tree type
, tree arg0
, tree arg1
)
6133 if (BUILTIN_SQRT_P (fcode
))
6135 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6136 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6138 c
= TREE_REAL_CST (arg1
);
6139 if (REAL_VALUE_NEGATIVE (c
))
6141 /* sqrt(x) < y is always false, if y is negative. */
6142 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6143 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6145 /* sqrt(x) > y is always true, if y is negative and we
6146 don't care about NaNs, i.e. negative values of x. */
6147 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6148 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6150 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6151 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6152 build_real (TREE_TYPE (arg
), dconst0
));
6154 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6158 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6159 real_convert (&c2
, mode
, &c2
);
6161 if (REAL_VALUE_ISINF (c2
))
6163 /* sqrt(x) > y is x == +Inf, when y is very large. */
6164 if (HONOR_INFINITIES (mode
))
6165 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6166 build_real (TREE_TYPE (arg
), c2
));
6168 /* sqrt(x) > y is always false, when y is very large
6169 and we don't care about infinities. */
6170 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6173 /* sqrt(x) > c is the same as x > c*c. */
6174 return fold_build2_loc (loc
, code
, type
, arg
,
6175 build_real (TREE_TYPE (arg
), c2
));
6177 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6181 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6182 real_convert (&c2
, mode
, &c2
);
6184 if (REAL_VALUE_ISINF (c2
))
6186 /* sqrt(x) < y is always true, when y is a very large
6187 value and we don't care about NaNs or Infinities. */
6188 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6189 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6191 /* sqrt(x) < y is x != +Inf when y is very large and we
6192 don't care about NaNs. */
6193 if (! HONOR_NANS (mode
))
6194 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6195 build_real (TREE_TYPE (arg
), c2
));
6197 /* sqrt(x) < y is x >= 0 when y is very large and we
6198 don't care about Infinities. */
6199 if (! HONOR_INFINITIES (mode
))
6200 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6201 build_real (TREE_TYPE (arg
), dconst0
));
6203 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6204 arg
= save_expr (arg
);
6205 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6206 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6207 build_real (TREE_TYPE (arg
),
6209 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6210 build_real (TREE_TYPE (arg
),
6214 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6215 if (! HONOR_NANS (mode
))
6216 return fold_build2_loc (loc
, code
, type
, arg
,
6217 build_real (TREE_TYPE (arg
), c2
));
6219 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6220 arg
= save_expr (arg
);
6221 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6222 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6223 build_real (TREE_TYPE (arg
),
6225 fold_build2_loc (loc
, code
, type
, arg
,
6226 build_real (TREE_TYPE (arg
),
6234 /* Subroutine of fold() that optimizes comparisons against Infinities,
6235 either +Inf or -Inf.
6237 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6238 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6239 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6241 The function returns the constant folded tree if a simplification
6242 can be made, and NULL_TREE otherwise. */
6245 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6246 tree arg0
, tree arg1
)
6249 REAL_VALUE_TYPE max
;
6253 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6255 /* For negative infinity swap the sense of the comparison. */
6256 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6258 code
= swap_tree_comparison (code
);
6263 /* x > +Inf is always false, if with ignore sNANs. */
6264 if (HONOR_SNANS (mode
))
6266 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6269 /* x <= +Inf is always true, if we don't case about NaNs. */
6270 if (! HONOR_NANS (mode
))
6271 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6273 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6274 arg0
= save_expr (arg0
);
6275 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6279 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6280 real_maxval (&max
, neg
, mode
);
6281 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6282 arg0
, build_real (TREE_TYPE (arg0
), max
));
6285 /* x < +Inf is always equal to x <= DBL_MAX. */
6286 real_maxval (&max
, neg
, mode
);
6287 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6288 arg0
, build_real (TREE_TYPE (arg0
), max
));
6291 /* x != +Inf is always equal to !(x > DBL_MAX). */
6292 real_maxval (&max
, neg
, mode
);
6293 if (! HONOR_NANS (mode
))
6294 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6295 arg0
, build_real (TREE_TYPE (arg0
), max
));
6297 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6298 arg0
, build_real (TREE_TYPE (arg0
), max
));
6299 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6308 /* Subroutine of fold() that optimizes comparisons of a division by
6309 a nonzero integer constant against an integer constant, i.e.
6312 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6313 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6314 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6316 The function returns the constant folded tree if a simplification
6317 can be made, and NULL_TREE otherwise. */
6320 fold_div_compare (location_t loc
,
6321 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6323 tree prod
, tmp
, hi
, lo
;
6324 tree arg00
= TREE_OPERAND (arg0
, 0);
6325 tree arg01
= TREE_OPERAND (arg0
, 1);
6326 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6327 bool neg_overflow
= false;
6330 /* We have to do this the hard way to detect unsigned overflow.
6331 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6332 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6333 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6334 neg_overflow
= false;
6336 if (sign
== UNSIGNED
)
6338 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6339 build_int_cst (TREE_TYPE (arg01
), 1));
6342 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6343 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6344 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6345 -1, overflow
| TREE_OVERFLOW (prod
));
6347 else if (tree_int_cst_sgn (arg01
) >= 0)
6349 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6350 build_int_cst (TREE_TYPE (arg01
), 1));
6351 switch (tree_int_cst_sgn (arg1
))
6354 neg_overflow
= true;
6355 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6360 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6365 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6375 /* A negative divisor reverses the relational operators. */
6376 code
= swap_tree_comparison (code
);
6378 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6379 build_int_cst (TREE_TYPE (arg01
), 1));
6380 switch (tree_int_cst_sgn (arg1
))
6383 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6388 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6393 neg_overflow
= true;
6394 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6406 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6407 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6408 if (TREE_OVERFLOW (hi
))
6409 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6410 if (TREE_OVERFLOW (lo
))
6411 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6412 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6415 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6416 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6417 if (TREE_OVERFLOW (hi
))
6418 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6419 if (TREE_OVERFLOW (lo
))
6420 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6421 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6424 if (TREE_OVERFLOW (lo
))
6426 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6427 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6429 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6432 if (TREE_OVERFLOW (hi
))
6434 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6435 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6437 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6440 if (TREE_OVERFLOW (hi
))
6442 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6443 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6445 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6448 if (TREE_OVERFLOW (lo
))
6450 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6451 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6453 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6463 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6464 equality/inequality test, then return a simplified form of the test
6465 using a sign testing. Otherwise return NULL. TYPE is the desired
6469 fold_single_bit_test_into_sign_test (location_t loc
,
6470 enum tree_code code
, tree arg0
, tree arg1
,
6473 /* If this is testing a single bit, we can optimize the test. */
6474 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6475 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6476 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6478 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6479 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6480 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6482 if (arg00
!= NULL_TREE
6483 /* This is only a win if casting to a signed type is cheap,
6484 i.e. when arg00's type is not a partial mode. */
6485 && TYPE_PRECISION (TREE_TYPE (arg00
))
6486 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6488 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6489 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6491 fold_convert_loc (loc
, stype
, arg00
),
6492 build_int_cst (stype
, 0));
6499 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6500 equality/inequality test, then return a simplified form of
6501 the test using shifts and logical operations. Otherwise return
6502 NULL. TYPE is the desired result type. */
6505 fold_single_bit_test (location_t loc
, enum tree_code code
,
6506 tree arg0
, tree arg1
, tree result_type
)
6508 /* If this is testing a single bit, we can optimize the test. */
6509 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6510 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6511 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6513 tree inner
= TREE_OPERAND (arg0
, 0);
6514 tree type
= TREE_TYPE (arg0
);
6515 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6516 machine_mode operand_mode
= TYPE_MODE (type
);
6518 tree signed_type
, unsigned_type
, intermediate_type
;
6521 /* First, see if we can fold the single bit test into a sign-bit
6523 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6528 /* Otherwise we have (A & C) != 0 where C is a single bit,
6529 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6530 Similarly for (A & C) == 0. */
6532 /* If INNER is a right shift of a constant and it plus BITNUM does
6533 not overflow, adjust BITNUM and INNER. */
6534 if (TREE_CODE (inner
) == RSHIFT_EXPR
6535 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6536 && bitnum
< TYPE_PRECISION (type
)
6537 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6538 TYPE_PRECISION (type
) - bitnum
))
6540 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6541 inner
= TREE_OPERAND (inner
, 0);
6544 /* If we are going to be able to omit the AND below, we must do our
6545 operations as unsigned. If we must use the AND, we have a choice.
6546 Normally unsigned is faster, but for some machines signed is. */
6547 #ifdef LOAD_EXTEND_OP
6548 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6549 && !flag_syntax_only
) ? 0 : 1;
6554 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6555 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6556 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6557 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6560 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6561 inner
, size_int (bitnum
));
6563 one
= build_int_cst (intermediate_type
, 1);
6565 if (code
== EQ_EXPR
)
6566 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6568 /* Put the AND last so it can combine with more things. */
6569 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6571 /* Make sure to return the proper type. */
6572 inner
= fold_convert_loc (loc
, result_type
, inner
);
6579 /* Check whether we are allowed to reorder operands arg0 and arg1,
6580 such that the evaluation of arg1 occurs before arg0. */
6583 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6585 if (! flag_evaluation_order
)
6587 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6589 return ! TREE_SIDE_EFFECTS (arg0
)
6590 && ! TREE_SIDE_EFFECTS (arg1
);
6593 /* Test whether it is preferable two swap two operands, ARG0 and
6594 ARG1, for example because ARG0 is an integer constant and ARG1
6595 isn't. If REORDER is true, only recommend swapping if we can
6596 evaluate the operands in reverse order. */
6599 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6601 if (CONSTANT_CLASS_P (arg1
))
6603 if (CONSTANT_CLASS_P (arg0
))
6606 STRIP_SIGN_NOPS (arg0
);
6607 STRIP_SIGN_NOPS (arg1
);
6609 if (TREE_CONSTANT (arg1
))
6611 if (TREE_CONSTANT (arg0
))
6614 if (reorder
&& flag_evaluation_order
6615 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6618 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6619 for commutative and comparison operators. Ensuring a canonical
6620 form allows the optimizers to find additional redundancies without
6621 having to explicitly check for both orderings. */
6622 if (TREE_CODE (arg0
) == SSA_NAME
6623 && TREE_CODE (arg1
) == SSA_NAME
6624 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6627 /* Put SSA_NAMEs last. */
6628 if (TREE_CODE (arg1
) == SSA_NAME
)
6630 if (TREE_CODE (arg0
) == SSA_NAME
)
6633 /* Put variables last. */
6642 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6643 ARG0 is extended to a wider type. */
6646 fold_widened_comparison (location_t loc
, enum tree_code code
,
6647 tree type
, tree arg0
, tree arg1
)
6649 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6651 tree shorter_type
, outer_type
;
6655 if (arg0_unw
== arg0
)
6657 shorter_type
= TREE_TYPE (arg0_unw
);
6659 #ifdef HAVE_canonicalize_funcptr_for_compare
6660 /* Disable this optimization if we're casting a function pointer
6661 type on targets that require function pointer canonicalization. */
6662 if (HAVE_canonicalize_funcptr_for_compare
6663 && TREE_CODE (shorter_type
) == POINTER_TYPE
6664 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6668 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6671 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6673 /* If possible, express the comparison in the shorter mode. */
6674 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6675 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6676 && (TREE_TYPE (arg1_unw
) == shorter_type
6677 || ((TYPE_PRECISION (shorter_type
)
6678 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6679 && (TYPE_UNSIGNED (shorter_type
)
6680 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6681 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6682 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6683 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6684 && int_fits_type_p (arg1_unw
, shorter_type
))))
6685 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6686 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6688 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6689 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6690 || !int_fits_type_p (arg1_unw
, shorter_type
))
6693 /* If we are comparing with the integer that does not fit into the range
6694 of the shorter type, the result is known. */
6695 outer_type
= TREE_TYPE (arg1_unw
);
6696 min
= lower_bound_in_type (outer_type
, shorter_type
);
6697 max
= upper_bound_in_type (outer_type
, shorter_type
);
6699 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6701 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6708 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6713 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6719 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6721 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6726 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6728 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6737 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6738 ARG0 just the signedness is changed. */
6741 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6742 tree arg0
, tree arg1
)
6745 tree inner_type
, outer_type
;
6747 if (!CONVERT_EXPR_P (arg0
))
6750 outer_type
= TREE_TYPE (arg0
);
6751 arg0_inner
= TREE_OPERAND (arg0
, 0);
6752 inner_type
= TREE_TYPE (arg0_inner
);
6754 #ifdef HAVE_canonicalize_funcptr_for_compare
6755 /* Disable this optimization if we're casting a function pointer
6756 type on targets that require function pointer canonicalization. */
6757 if (HAVE_canonicalize_funcptr_for_compare
6758 && TREE_CODE (inner_type
) == POINTER_TYPE
6759 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6763 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6766 if (TREE_CODE (arg1
) != INTEGER_CST
6767 && !(CONVERT_EXPR_P (arg1
)
6768 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6771 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6776 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6779 if (TREE_CODE (arg1
) == INTEGER_CST
)
6780 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6781 TREE_OVERFLOW (arg1
));
6783 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6785 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6789 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6790 means A >= Y && A != MAX, but in this case we know that
6791 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6794 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6796 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6798 if (TREE_CODE (bound
) == LT_EXPR
)
6799 a
= TREE_OPERAND (bound
, 0);
6800 else if (TREE_CODE (bound
) == GT_EXPR
)
6801 a
= TREE_OPERAND (bound
, 1);
6805 typea
= TREE_TYPE (a
);
6806 if (!INTEGRAL_TYPE_P (typea
)
6807 && !POINTER_TYPE_P (typea
))
6810 if (TREE_CODE (ineq
) == LT_EXPR
)
6812 a1
= TREE_OPERAND (ineq
, 1);
6813 y
= TREE_OPERAND (ineq
, 0);
6815 else if (TREE_CODE (ineq
) == GT_EXPR
)
6817 a1
= TREE_OPERAND (ineq
, 0);
6818 y
= TREE_OPERAND (ineq
, 1);
6823 if (TREE_TYPE (a1
) != typea
)
6826 if (POINTER_TYPE_P (typea
))
6828 /* Convert the pointer types into integer before taking the difference. */
6829 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6830 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6831 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6834 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6836 if (!diff
|| !integer_onep (diff
))
6839 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6842 /* Fold a sum or difference of at least one multiplication.
6843 Returns the folded tree or NULL if no simplification could be made. */
6846 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6847 tree arg0
, tree arg1
)
6849 tree arg00
, arg01
, arg10
, arg11
;
6850 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6852 /* (A * C) +- (B * C) -> (A+-B) * C.
6853 (A * C) +- A -> A * (C+-1).
6854 We are most concerned about the case where C is a constant,
6855 but other combinations show up during loop reduction. Since
6856 it is not difficult, try all four possibilities. */
6858 if (TREE_CODE (arg0
) == MULT_EXPR
)
6860 arg00
= TREE_OPERAND (arg0
, 0);
6861 arg01
= TREE_OPERAND (arg0
, 1);
6863 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6865 arg00
= build_one_cst (type
);
6870 /* We cannot generate constant 1 for fract. */
6871 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6874 arg01
= build_one_cst (type
);
6876 if (TREE_CODE (arg1
) == MULT_EXPR
)
6878 arg10
= TREE_OPERAND (arg1
, 0);
6879 arg11
= TREE_OPERAND (arg1
, 1);
6881 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6883 arg10
= build_one_cst (type
);
6884 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6885 the purpose of this canonicalization. */
6886 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6887 && negate_expr_p (arg1
)
6888 && code
== PLUS_EXPR
)
6890 arg11
= negate_expr (arg1
);
6898 /* We cannot generate constant 1 for fract. */
6899 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6902 arg11
= build_one_cst (type
);
6906 if (operand_equal_p (arg01
, arg11
, 0))
6907 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6908 else if (operand_equal_p (arg00
, arg10
, 0))
6909 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6910 else if (operand_equal_p (arg00
, arg11
, 0))
6911 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6912 else if (operand_equal_p (arg01
, arg10
, 0))
6913 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6915 /* No identical multiplicands; see if we can find a common
6916 power-of-two factor in non-power-of-two multiplies. This
6917 can help in multi-dimensional array access. */
6918 else if (tree_fits_shwi_p (arg01
)
6919 && tree_fits_shwi_p (arg11
))
6921 HOST_WIDE_INT int01
, int11
, tmp
;
6924 int01
= tree_to_shwi (arg01
);
6925 int11
= tree_to_shwi (arg11
);
6927 /* Move min of absolute values to int11. */
6928 if (absu_hwi (int01
) < absu_hwi (int11
))
6930 tmp
= int01
, int01
= int11
, int11
= tmp
;
6931 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6938 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6939 /* The remainder should not be a constant, otherwise we
6940 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6941 increased the number of multiplications necessary. */
6942 && TREE_CODE (arg10
) != INTEGER_CST
)
6944 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6945 build_int_cst (TREE_TYPE (arg00
),
6950 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6955 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6956 fold_build2_loc (loc
, code
, type
,
6957 fold_convert_loc (loc
, type
, alt0
),
6958 fold_convert_loc (loc
, type
, alt1
)),
6959 fold_convert_loc (loc
, type
, same
));
6964 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6965 specified by EXPR into the buffer PTR of length LEN bytes.
6966 Return the number of bytes placed in the buffer, or zero
6970 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6972 tree type
= TREE_TYPE (expr
);
6973 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6974 int byte
, offset
, word
, words
;
6975 unsigned char value
;
6977 if ((off
== -1 && total_bytes
> len
)
6978 || off
>= total_bytes
)
6982 words
= total_bytes
/ UNITS_PER_WORD
;
6984 for (byte
= 0; byte
< total_bytes
; byte
++)
6986 int bitpos
= byte
* BITS_PER_UNIT
;
6987 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6989 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
6991 if (total_bytes
> UNITS_PER_WORD
)
6993 word
= byte
/ UNITS_PER_WORD
;
6994 if (WORDS_BIG_ENDIAN
)
6995 word
= (words
- 1) - word
;
6996 offset
= word
* UNITS_PER_WORD
;
6997 if (BYTES_BIG_ENDIAN
)
6998 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7000 offset
+= byte
% UNITS_PER_WORD
;
7003 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7005 && offset
- off
< len
)
7006 ptr
[offset
- off
] = value
;
7008 return MIN (len
, total_bytes
- off
);
7012 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7013 specified by EXPR into the buffer PTR of length LEN bytes.
7014 Return the number of bytes placed in the buffer, or zero
7018 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7020 tree type
= TREE_TYPE (expr
);
7021 machine_mode mode
= TYPE_MODE (type
);
7022 int total_bytes
= GET_MODE_SIZE (mode
);
7023 FIXED_VALUE_TYPE value
;
7024 tree i_value
, i_type
;
7026 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7029 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7031 if (NULL_TREE
== i_type
7032 || TYPE_PRECISION (i_type
) != total_bytes
)
7035 value
= TREE_FIXED_CST (expr
);
7036 i_value
= double_int_to_tree (i_type
, value
.data
);
7038 return native_encode_int (i_value
, ptr
, len
, off
);
7042 /* Subroutine of native_encode_expr. Encode the REAL_CST
7043 specified by EXPR into the buffer PTR of length LEN bytes.
7044 Return the number of bytes placed in the buffer, or zero
7048 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7050 tree type
= TREE_TYPE (expr
);
7051 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7052 int byte
, offset
, word
, words
, bitpos
;
7053 unsigned char value
;
7055 /* There are always 32 bits in each long, no matter the size of
7056 the hosts long. We handle floating point representations with
7060 if ((off
== -1 && total_bytes
> len
)
7061 || off
>= total_bytes
)
7065 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7067 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7069 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7070 bitpos
+= BITS_PER_UNIT
)
7072 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7073 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7075 if (UNITS_PER_WORD
< 4)
7077 word
= byte
/ UNITS_PER_WORD
;
7078 if (WORDS_BIG_ENDIAN
)
7079 word
= (words
- 1) - word
;
7080 offset
= word
* UNITS_PER_WORD
;
7081 if (BYTES_BIG_ENDIAN
)
7082 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7084 offset
+= byte
% UNITS_PER_WORD
;
7087 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7088 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7090 && offset
- off
< len
)
7091 ptr
[offset
- off
] = value
;
7093 return MIN (len
, total_bytes
- off
);
7096 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7102 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7107 part
= TREE_REALPART (expr
);
7108 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7112 part
= TREE_IMAGPART (expr
);
7114 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7115 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7119 return rsize
+ isize
;
7123 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7124 specified by EXPR into the buffer PTR of length LEN bytes.
7125 Return the number of bytes placed in the buffer, or zero
7129 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7136 count
= VECTOR_CST_NELTS (expr
);
7137 itype
= TREE_TYPE (TREE_TYPE (expr
));
7138 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7139 for (i
= 0; i
< count
; i
++)
7146 elem
= VECTOR_CST_ELT (expr
, i
);
7147 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7148 if ((off
== -1 && res
!= size
)
7161 /* Subroutine of native_encode_expr. Encode the STRING_CST
7162 specified by EXPR into the buffer PTR of length LEN bytes.
7163 Return the number of bytes placed in the buffer, or zero
7167 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7169 tree type
= TREE_TYPE (expr
);
7170 HOST_WIDE_INT total_bytes
;
7172 if (TREE_CODE (type
) != ARRAY_TYPE
7173 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7174 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7175 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7177 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7178 if ((off
== -1 && total_bytes
> len
)
7179 || off
>= total_bytes
)
7183 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7186 if (off
< TREE_STRING_LENGTH (expr
))
7188 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7189 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7191 memset (ptr
+ written
, 0,
7192 MIN (total_bytes
- written
, len
- written
));
7195 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7196 return MIN (total_bytes
- off
, len
);
7200 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7201 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7202 buffer PTR of length LEN bytes. If OFF is not -1 then start
7203 the encoding at byte offset OFF and encode at most LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero upon failure. */
7207 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7209 switch (TREE_CODE (expr
))
7212 return native_encode_int (expr
, ptr
, len
, off
);
7215 return native_encode_real (expr
, ptr
, len
, off
);
7218 return native_encode_fixed (expr
, ptr
, len
, off
);
7221 return native_encode_complex (expr
, ptr
, len
, off
);
7224 return native_encode_vector (expr
, ptr
, len
, off
);
7227 return native_encode_string (expr
, ptr
, len
, off
);
7235 /* Subroutine of native_interpret_expr. Interpret the contents of
7236 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7237 If the buffer cannot be interpreted, return NULL_TREE. */
7240 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7242 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7244 if (total_bytes
> len
7245 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7248 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7250 return wide_int_to_tree (type
, result
);
7254 /* Subroutine of native_interpret_expr. Interpret the contents of
7255 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7256 If the buffer cannot be interpreted, return NULL_TREE. */
7259 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7261 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7263 FIXED_VALUE_TYPE fixed_value
;
7265 if (total_bytes
> len
7266 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7269 result
= double_int::from_buffer (ptr
, total_bytes
);
7270 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7272 return build_fixed (type
, fixed_value
);
7276 /* Subroutine of native_interpret_expr. Interpret the contents of
7277 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7278 If the buffer cannot be interpreted, return NULL_TREE. */
7281 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7283 machine_mode mode
= TYPE_MODE (type
);
7284 int total_bytes
= GET_MODE_SIZE (mode
);
7285 int byte
, offset
, word
, words
, bitpos
;
7286 unsigned char value
;
7287 /* There are always 32 bits in each long, no matter the size of
7288 the hosts long. We handle floating point representations with
7293 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7294 if (total_bytes
> len
|| total_bytes
> 24)
7296 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7298 memset (tmp
, 0, sizeof (tmp
));
7299 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7300 bitpos
+= BITS_PER_UNIT
)
7302 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7303 if (UNITS_PER_WORD
< 4)
7305 word
= byte
/ UNITS_PER_WORD
;
7306 if (WORDS_BIG_ENDIAN
)
7307 word
= (words
- 1) - word
;
7308 offset
= word
* UNITS_PER_WORD
;
7309 if (BYTES_BIG_ENDIAN
)
7310 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7312 offset
+= byte
% UNITS_PER_WORD
;
7315 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7316 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7318 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7321 real_from_target (&r
, tmp
, mode
);
7322 return build_real (type
, r
);
7326 /* Subroutine of native_interpret_expr. Interpret the contents of
7327 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7328 If the buffer cannot be interpreted, return NULL_TREE. */
7331 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7333 tree etype
, rpart
, ipart
;
7336 etype
= TREE_TYPE (type
);
7337 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7340 rpart
= native_interpret_expr (etype
, ptr
, size
);
7343 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7346 return build_complex (type
, rpart
, ipart
);
7350 /* Subroutine of native_interpret_expr. Interpret the contents of
7351 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7352 If the buffer cannot be interpreted, return NULL_TREE. */
7355 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7361 etype
= TREE_TYPE (type
);
7362 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7363 count
= TYPE_VECTOR_SUBPARTS (type
);
7364 if (size
* count
> len
)
7367 elements
= XALLOCAVEC (tree
, count
);
7368 for (i
= count
- 1; i
>= 0; i
--)
7370 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7375 return build_vector (type
, elements
);
7379 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a constant of type TYPE. For
7381 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7382 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7383 return NULL_TREE. */
7386 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7388 switch (TREE_CODE (type
))
7394 case REFERENCE_TYPE
:
7395 return native_interpret_int (type
, ptr
, len
);
7398 return native_interpret_real (type
, ptr
, len
);
7400 case FIXED_POINT_TYPE
:
7401 return native_interpret_fixed (type
, ptr
, len
);
7404 return native_interpret_complex (type
, ptr
, len
);
7407 return native_interpret_vector (type
, ptr
, len
);
7414 /* Returns true if we can interpret the contents of a native encoding
7418 can_native_interpret_type_p (tree type
)
7420 switch (TREE_CODE (type
))
7426 case REFERENCE_TYPE
:
7427 case FIXED_POINT_TYPE
:
7437 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7438 TYPE at compile-time. If we're unable to perform the conversion
7439 return NULL_TREE. */
7442 fold_view_convert_expr (tree type
, tree expr
)
7444 /* We support up to 512-bit values (for V8DFmode). */
7445 unsigned char buffer
[64];
7448 /* Check that the host and target are sane. */
7449 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7452 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7456 return native_interpret_expr (type
, buffer
, len
);
7459 /* Build an expression for the address of T. Folds away INDIRECT_REF
7460 to avoid confusing the gimplify process. */
7463 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7465 /* The size of the object is not relevant when talking about its address. */
7466 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7467 t
= TREE_OPERAND (t
, 0);
7469 if (TREE_CODE (t
) == INDIRECT_REF
)
7471 t
= TREE_OPERAND (t
, 0);
7473 if (TREE_TYPE (t
) != ptrtype
)
7474 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7476 else if (TREE_CODE (t
) == MEM_REF
7477 && integer_zerop (TREE_OPERAND (t
, 1)))
7478 return TREE_OPERAND (t
, 0);
7479 else if (TREE_CODE (t
) == MEM_REF
7480 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7481 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7482 TREE_OPERAND (t
, 0),
7483 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7484 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7486 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7488 if (TREE_TYPE (t
) != ptrtype
)
7489 t
= fold_convert_loc (loc
, ptrtype
, t
);
7492 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7497 /* Build an expression for the address of T. */
7500 build_fold_addr_expr_loc (location_t loc
, tree t
)
7502 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7504 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7507 static bool vec_cst_ctor_to_array (tree
, tree
*);
7509 /* Fold a unary expression of code CODE and type TYPE with operand
7510 OP0. Return the folded expression if folding is successful.
7511 Otherwise, return NULL_TREE. */
7514 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7518 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7520 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7521 && TREE_CODE_LENGTH (code
) == 1);
7523 tem
= generic_simplify (loc
, code
, type
, op0
);
7530 if (CONVERT_EXPR_CODE_P (code
)
7531 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7533 /* Don't use STRIP_NOPS, because signedness of argument type
7535 STRIP_SIGN_NOPS (arg0
);
7539 /* Strip any conversions that don't change the mode. This
7540 is safe for every expression, except for a comparison
7541 expression because its signedness is derived from its
7544 Note that this is done as an internal manipulation within
7545 the constant folder, in order to find the simplest
7546 representation of the arguments so that their form can be
7547 studied. In any cases, the appropriate type conversions
7548 should be put back in the tree that will get out of the
7554 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7556 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7557 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7558 fold_build1_loc (loc
, code
, type
,
7559 fold_convert_loc (loc
, TREE_TYPE (op0
),
7560 TREE_OPERAND (arg0
, 1))));
7561 else if (TREE_CODE (arg0
) == COND_EXPR
)
7563 tree arg01
= TREE_OPERAND (arg0
, 1);
7564 tree arg02
= TREE_OPERAND (arg0
, 2);
7565 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7566 arg01
= fold_build1_loc (loc
, code
, type
,
7567 fold_convert_loc (loc
,
7568 TREE_TYPE (op0
), arg01
));
7569 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7570 arg02
= fold_build1_loc (loc
, code
, type
,
7571 fold_convert_loc (loc
,
7572 TREE_TYPE (op0
), arg02
));
7573 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7576 /* If this was a conversion, and all we did was to move into
7577 inside the COND_EXPR, bring it back out. But leave it if
7578 it is a conversion from integer to integer and the
7579 result precision is no wider than a word since such a
7580 conversion is cheap and may be optimized away by combine,
7581 while it couldn't if it were outside the COND_EXPR. Then return
7582 so we don't get into an infinite recursion loop taking the
7583 conversion out and then back in. */
7585 if ((CONVERT_EXPR_CODE_P (code
)
7586 || code
== NON_LVALUE_EXPR
)
7587 && TREE_CODE (tem
) == COND_EXPR
7588 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7589 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7590 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7591 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7592 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7593 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7594 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7596 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7597 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7598 || flag_syntax_only
))
7599 tem
= build1_loc (loc
, code
, type
,
7601 TREE_TYPE (TREE_OPERAND
7602 (TREE_OPERAND (tem
, 1), 0)),
7603 TREE_OPERAND (tem
, 0),
7604 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7605 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7613 case NON_LVALUE_EXPR
:
7614 if (!maybe_lvalue_p (op0
))
7615 return fold_convert_loc (loc
, type
, op0
);
7620 case FIX_TRUNC_EXPR
:
7621 if (COMPARISON_CLASS_P (op0
))
7623 /* If we have (type) (a CMP b) and type is an integral type, return
7624 new expression involving the new type. Canonicalize
7625 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7627 Do not fold the result as that would not simplify further, also
7628 folding again results in recursions. */
7629 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7630 return build2_loc (loc
, TREE_CODE (op0
), type
,
7631 TREE_OPERAND (op0
, 0),
7632 TREE_OPERAND (op0
, 1));
7633 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7634 && TREE_CODE (type
) != VECTOR_TYPE
)
7635 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7636 constant_boolean_node (true, type
),
7637 constant_boolean_node (false, type
));
7640 /* Handle (T *)&A.B.C for A being of type T and B and C
7641 living at offset zero. This occurs frequently in
7642 C++ upcasting and then accessing the base. */
7643 if (TREE_CODE (op0
) == ADDR_EXPR
7644 && POINTER_TYPE_P (type
)
7645 && handled_component_p (TREE_OPERAND (op0
, 0)))
7647 HOST_WIDE_INT bitsize
, bitpos
;
7650 int unsignedp
, volatilep
;
7651 tree base
= TREE_OPERAND (op0
, 0);
7652 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7653 &mode
, &unsignedp
, &volatilep
, false);
7654 /* If the reference was to a (constant) zero offset, we can use
7655 the address of the base if it has the same base type
7656 as the result type and the pointer type is unqualified. */
7657 if (! offset
&& bitpos
== 0
7658 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7659 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7660 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7661 return fold_convert_loc (loc
, type
,
7662 build_fold_addr_expr_loc (loc
, base
));
7665 if (TREE_CODE (op0
) == MODIFY_EXPR
7666 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7667 /* Detect assigning a bitfield. */
7668 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7670 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7672 /* Don't leave an assignment inside a conversion
7673 unless assigning a bitfield. */
7674 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7675 /* First do the assignment, then return converted constant. */
7676 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7677 TREE_NO_WARNING (tem
) = 1;
7678 TREE_USED (tem
) = 1;
7682 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7683 constants (if x has signed type, the sign bit cannot be set
7684 in c). This folds extension into the BIT_AND_EXPR.
7685 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7686 very likely don't have maximal range for their precision and this
7687 transformation effectively doesn't preserve non-maximal ranges. */
7688 if (TREE_CODE (type
) == INTEGER_TYPE
7689 && TREE_CODE (op0
) == BIT_AND_EXPR
7690 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7692 tree and_expr
= op0
;
7693 tree and0
= TREE_OPERAND (and_expr
, 0);
7694 tree and1
= TREE_OPERAND (and_expr
, 1);
7697 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7698 || (TYPE_PRECISION (type
)
7699 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7701 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7702 <= HOST_BITS_PER_WIDE_INT
7703 && tree_fits_uhwi_p (and1
))
7705 unsigned HOST_WIDE_INT cst
;
7707 cst
= tree_to_uhwi (and1
);
7708 cst
&= HOST_WIDE_INT_M1U
7709 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7710 change
= (cst
== 0);
7711 #ifdef LOAD_EXTEND_OP
7713 && !flag_syntax_only
7714 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7717 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7718 and0
= fold_convert_loc (loc
, uns
, and0
);
7719 and1
= fold_convert_loc (loc
, uns
, and1
);
7725 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7726 TREE_OVERFLOW (and1
));
7727 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7728 fold_convert_loc (loc
, type
, and0
), tem
);
7732 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7733 when one of the new casts will fold away. Conservatively we assume
7734 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7735 if (POINTER_TYPE_P (type
)
7736 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7737 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7738 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7739 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7740 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7742 tree arg00
= TREE_OPERAND (arg0
, 0);
7743 tree arg01
= TREE_OPERAND (arg0
, 1);
7745 return fold_build_pointer_plus_loc
7746 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7749 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7750 of the same precision, and X is an integer type not narrower than
7751 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7752 if (INTEGRAL_TYPE_P (type
)
7753 && TREE_CODE (op0
) == BIT_NOT_EXPR
7754 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7755 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7756 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7758 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7759 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7760 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7761 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7762 fold_convert_loc (loc
, type
, tem
));
7765 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7766 type of X and Y (integer types only). */
7767 if (INTEGRAL_TYPE_P (type
)
7768 && TREE_CODE (op0
) == MULT_EXPR
7769 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7770 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7772 /* Be careful not to introduce new overflows. */
7774 if (TYPE_OVERFLOW_WRAPS (type
))
7777 mult_type
= unsigned_type_for (type
);
7779 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7781 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7782 fold_convert_loc (loc
, mult_type
,
7783 TREE_OPERAND (op0
, 0)),
7784 fold_convert_loc (loc
, mult_type
,
7785 TREE_OPERAND (op0
, 1)));
7786 return fold_convert_loc (loc
, type
, tem
);
7790 tem
= fold_convert_const (code
, type
, arg0
);
7791 return tem
? tem
: NULL_TREE
;
7793 case ADDR_SPACE_CONVERT_EXPR
:
7794 if (integer_zerop (arg0
))
7795 return fold_convert_const (code
, type
, arg0
);
7798 case FIXED_CONVERT_EXPR
:
7799 tem
= fold_convert_const (code
, type
, arg0
);
7800 return tem
? tem
: NULL_TREE
;
7802 case VIEW_CONVERT_EXPR
:
7803 if (TREE_CODE (op0
) == MEM_REF
)
7804 return fold_build2_loc (loc
, MEM_REF
, type
,
7805 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7807 return fold_view_convert_expr (type
, op0
);
7810 tem
= fold_negate_expr (loc
, arg0
);
7812 return fold_convert_loc (loc
, type
, tem
);
7816 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7817 return fold_abs_const (arg0
, type
);
7818 /* Convert fabs((double)float) into (double)fabsf(float). */
7819 else if (TREE_CODE (arg0
) == NOP_EXPR
7820 && TREE_CODE (type
) == REAL_TYPE
)
7822 tree targ0
= strip_float_extensions (arg0
);
7824 return fold_convert_loc (loc
, type
,
7825 fold_build1_loc (loc
, ABS_EXPR
,
7829 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7830 else if (TREE_CODE (arg0
) == ABS_EXPR
)
7833 /* Strip sign ops from argument. */
7834 if (TREE_CODE (type
) == REAL_TYPE
)
7836 tem
= fold_strip_sign_ops (arg0
);
7838 return fold_build1_loc (loc
, ABS_EXPR
, type
,
7839 fold_convert_loc (loc
, type
, tem
));
7844 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7845 return fold_convert_loc (loc
, type
, arg0
);
7846 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7848 tree itype
= TREE_TYPE (type
);
7849 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
7850 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
7851 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
7852 negate_expr (ipart
));
7854 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7856 tree itype
= TREE_TYPE (type
);
7857 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
7858 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
7859 return build_complex (type
, rpart
, negate_expr (ipart
));
7861 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7862 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
7866 if (TREE_CODE (arg0
) == INTEGER_CST
)
7867 return fold_not_const (arg0
, type
);
7868 /* Convert ~ (-A) to A - 1. */
7869 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7870 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
7871 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
7872 build_int_cst (type
, 1));
7873 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7874 else if (INTEGRAL_TYPE_P (type
)
7875 && ((TREE_CODE (arg0
) == MINUS_EXPR
7876 && integer_onep (TREE_OPERAND (arg0
, 1)))
7877 || (TREE_CODE (arg0
) == PLUS_EXPR
7878 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7879 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
7880 fold_convert_loc (loc
, type
,
7881 TREE_OPERAND (arg0
, 0)));
7882 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7883 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7884 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7885 fold_convert_loc (loc
, type
,
7886 TREE_OPERAND (arg0
, 0)))))
7887 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7888 fold_convert_loc (loc
, type
,
7889 TREE_OPERAND (arg0
, 1)));
7890 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7891 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7892 fold_convert_loc (loc
, type
,
7893 TREE_OPERAND (arg0
, 1)))))
7894 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7895 fold_convert_loc (loc
, type
,
7896 TREE_OPERAND (arg0
, 0)), tem
);
7897 /* Perform BIT_NOT_EXPR on each element individually. */
7898 else if (TREE_CODE (arg0
) == VECTOR_CST
)
7902 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
7904 elements
= XALLOCAVEC (tree
, count
);
7905 for (i
= 0; i
< count
; i
++)
7907 elem
= VECTOR_CST_ELT (arg0
, i
);
7908 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
7909 if (elem
== NULL_TREE
)
7914 return build_vector (type
, elements
);
7919 case TRUTH_NOT_EXPR
:
7920 /* Note that the operand of this must be an int
7921 and its values must be 0 or 1.
7922 ("true" is a fixed value perhaps depending on the language,
7923 but we don't handle values other than 1 correctly yet.) */
7924 tem
= fold_truth_not_expr (loc
, arg0
);
7927 return fold_convert_loc (loc
, type
, tem
);
7930 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7931 return fold_convert_loc (loc
, type
, arg0
);
7932 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7933 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
7934 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7936 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7937 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7938 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7939 TREE_OPERAND (arg0
, 0)),
7940 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7941 TREE_OPERAND (arg0
, 1)));
7942 return fold_convert_loc (loc
, type
, tem
);
7944 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7946 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7947 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7948 TREE_OPERAND (arg0
, 0));
7949 return fold_convert_loc (loc
, type
, tem
);
7951 if (TREE_CODE (arg0
) == CALL_EXPR
)
7953 tree fn
= get_callee_fndecl (arg0
);
7954 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7955 switch (DECL_FUNCTION_CODE (fn
))
7957 CASE_FLT_FN (BUILT_IN_CEXPI
):
7958 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
7960 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
7970 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7971 return build_zero_cst (type
);
7972 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7973 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
7974 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7976 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7977 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7978 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
7979 TREE_OPERAND (arg0
, 0)),
7980 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
7981 TREE_OPERAND (arg0
, 1)));
7982 return fold_convert_loc (loc
, type
, tem
);
7984 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7986 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7987 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7988 return fold_convert_loc (loc
, type
, negate_expr (tem
));
7990 if (TREE_CODE (arg0
) == CALL_EXPR
)
7992 tree fn
= get_callee_fndecl (arg0
);
7993 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7994 switch (DECL_FUNCTION_CODE (fn
))
7996 CASE_FLT_FN (BUILT_IN_CEXPI
):
7997 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7999 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8009 /* Fold *&X to X if X is an lvalue. */
8010 if (TREE_CODE (op0
) == ADDR_EXPR
)
8012 tree op00
= TREE_OPERAND (op0
, 0);
8013 if ((TREE_CODE (op00
) == VAR_DECL
8014 || TREE_CODE (op00
) == PARM_DECL
8015 || TREE_CODE (op00
) == RESULT_DECL
)
8016 && !TREE_READONLY (op00
))
8021 case VEC_UNPACK_LO_EXPR
:
8022 case VEC_UNPACK_HI_EXPR
:
8023 case VEC_UNPACK_FLOAT_LO_EXPR
:
8024 case VEC_UNPACK_FLOAT_HI_EXPR
:
8026 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8028 enum tree_code subcode
;
8030 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8031 if (TREE_CODE (arg0
) != VECTOR_CST
)
8034 elts
= XALLOCAVEC (tree
, nelts
* 2);
8035 if (!vec_cst_ctor_to_array (arg0
, elts
))
8038 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8039 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8042 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8045 subcode
= FLOAT_EXPR
;
8047 for (i
= 0; i
< nelts
; i
++)
8049 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8050 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8054 return build_vector (type
, elts
);
8057 case REDUC_MIN_EXPR
:
8058 case REDUC_MAX_EXPR
:
8059 case REDUC_PLUS_EXPR
:
8061 unsigned int nelts
, i
;
8063 enum tree_code subcode
;
8065 if (TREE_CODE (op0
) != VECTOR_CST
)
8067 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0
));
8069 elts
= XALLOCAVEC (tree
, nelts
);
8070 if (!vec_cst_ctor_to_array (op0
, elts
))
8075 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8076 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8077 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8078 default: gcc_unreachable ();
8081 for (i
= 1; i
< nelts
; i
++)
8083 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8084 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8093 } /* switch (code) */
8097 /* If the operation was a conversion do _not_ mark a resulting constant
8098 with TREE_OVERFLOW if the original constant was not. These conversions
8099 have implementation defined behavior and retaining the TREE_OVERFLOW
8100 flag here would confuse later passes such as VRP. */
8102 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8103 tree type
, tree op0
)
8105 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8107 && TREE_CODE (res
) == INTEGER_CST
8108 && TREE_CODE (op0
) == INTEGER_CST
8109 && CONVERT_EXPR_CODE_P (code
))
8110 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8115 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8116 operands OP0 and OP1. LOC is the location of the resulting expression.
8117 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8118 Return the folded expression if folding is successful. Otherwise,
8119 return NULL_TREE. */
8121 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8122 tree arg0
, tree arg1
, tree op0
, tree op1
)
8126 /* We only do these simplifications if we are optimizing. */
8130 /* Check for things like (A || B) && (A || C). We can convert this
8131 to A || (B && C). Note that either operator can be any of the four
8132 truth and/or operations and the transformation will still be
8133 valid. Also note that we only care about order for the
8134 ANDIF and ORIF operators. If B contains side effects, this
8135 might change the truth-value of A. */
8136 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8137 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8138 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8139 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8140 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8141 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8143 tree a00
= TREE_OPERAND (arg0
, 0);
8144 tree a01
= TREE_OPERAND (arg0
, 1);
8145 tree a10
= TREE_OPERAND (arg1
, 0);
8146 tree a11
= TREE_OPERAND (arg1
, 1);
8147 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8148 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8149 && (code
== TRUTH_AND_EXPR
8150 || code
== TRUTH_OR_EXPR
));
8152 if (operand_equal_p (a00
, a10
, 0))
8153 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8154 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8155 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8156 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8157 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8158 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8159 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8160 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8162 /* This case if tricky because we must either have commutative
8163 operators or else A10 must not have side-effects. */
8165 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8166 && operand_equal_p (a01
, a11
, 0))
8167 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8168 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8172 /* See if we can build a range comparison. */
8173 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8176 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8177 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8179 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8181 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8184 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8185 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8187 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8189 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8192 /* Check for the possibility of merging component references. If our
8193 lhs is another similar operation, try to merge its rhs with our
8194 rhs. Then try to merge our lhs and rhs. */
8195 if (TREE_CODE (arg0
) == code
8196 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8197 TREE_OPERAND (arg0
, 1), arg1
)))
8198 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8200 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8203 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8204 && (code
== TRUTH_AND_EXPR
8205 || code
== TRUTH_ANDIF_EXPR
8206 || code
== TRUTH_OR_EXPR
8207 || code
== TRUTH_ORIF_EXPR
))
8209 enum tree_code ncode
, icode
;
8211 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8212 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8213 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8215 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8216 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8217 We don't want to pack more than two leafs to a non-IF AND/OR
8219 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8220 equal to IF-CODE, then we don't want to add right-hand operand.
8221 If the inner right-hand side of left-hand operand has
8222 side-effects, or isn't simple, then we can't add to it,
8223 as otherwise we might destroy if-sequence. */
8224 if (TREE_CODE (arg0
) == icode
8225 && simple_operand_p_2 (arg1
)
8226 /* Needed for sequence points to handle trappings, and
8228 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8230 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8232 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8235 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8236 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8237 else if (TREE_CODE (arg1
) == icode
8238 && simple_operand_p_2 (arg0
)
8239 /* Needed for sequence points to handle trappings, and
8241 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8243 tem
= fold_build2_loc (loc
, ncode
, type
,
8244 arg0
, TREE_OPERAND (arg1
, 0));
8245 return fold_build2_loc (loc
, icode
, type
, tem
,
8246 TREE_OPERAND (arg1
, 1));
8248 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8250 For sequence point consistancy, we need to check for trapping,
8251 and side-effects. */
8252 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8253 && simple_operand_p_2 (arg1
))
8254 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8260 /* Fold a binary expression of code CODE and type TYPE with operands
8261 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8262 Return the folded expression if folding is successful. Otherwise,
8263 return NULL_TREE. */
8266 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8268 enum tree_code compl_code
;
8270 if (code
== MIN_EXPR
)
8271 compl_code
= MAX_EXPR
;
8272 else if (code
== MAX_EXPR
)
8273 compl_code
= MIN_EXPR
;
8277 /* MIN (MAX (a, b), b) == b. */
8278 if (TREE_CODE (op0
) == compl_code
8279 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8280 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8282 /* MIN (MAX (b, a), b) == b. */
8283 if (TREE_CODE (op0
) == compl_code
8284 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8285 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8286 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8288 /* MIN (a, MAX (a, b)) == a. */
8289 if (TREE_CODE (op1
) == compl_code
8290 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8291 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8292 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8294 /* MIN (a, MAX (b, a)) == a. */
8295 if (TREE_CODE (op1
) == compl_code
8296 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8297 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8298 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8303 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8304 by changing CODE to reduce the magnitude of constants involved in
8305 ARG0 of the comparison.
8306 Returns a canonicalized comparison tree if a simplification was
8307 possible, otherwise returns NULL_TREE.
8308 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8309 valid if signed overflow is undefined. */
8312 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8313 tree arg0
, tree arg1
,
8314 bool *strict_overflow_p
)
8316 enum tree_code code0
= TREE_CODE (arg0
);
8317 tree t
, cst0
= NULL_TREE
;
8321 /* Match A +- CST code arg1 and CST code arg1. We can change the
8322 first form only if overflow is undefined. */
8323 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8324 /* In principle pointers also have undefined overflow behavior,
8325 but that causes problems elsewhere. */
8326 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8327 && (code0
== MINUS_EXPR
8328 || code0
== PLUS_EXPR
)
8329 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8330 || code0
== INTEGER_CST
))
8333 /* Identify the constant in arg0 and its sign. */
8334 if (code0
== INTEGER_CST
)
8337 cst0
= TREE_OPERAND (arg0
, 1);
8338 sgn0
= tree_int_cst_sgn (cst0
);
8340 /* Overflowed constants and zero will cause problems. */
8341 if (integer_zerop (cst0
)
8342 || TREE_OVERFLOW (cst0
))
8345 /* See if we can reduce the magnitude of the constant in
8346 arg0 by changing the comparison code. */
8347 if (code0
== INTEGER_CST
)
8349 /* CST <= arg1 -> CST-1 < arg1. */
8350 if (code
== LE_EXPR
&& sgn0
== 1)
8352 /* -CST < arg1 -> -CST-1 <= arg1. */
8353 else if (code
== LT_EXPR
&& sgn0
== -1)
8355 /* CST > arg1 -> CST-1 >= arg1. */
8356 else if (code
== GT_EXPR
&& sgn0
== 1)
8358 /* -CST >= arg1 -> -CST-1 > arg1. */
8359 else if (code
== GE_EXPR
&& sgn0
== -1)
8363 /* arg1 code' CST' might be more canonical. */
8368 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8370 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8372 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8373 else if (code
== GT_EXPR
8374 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8376 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8377 else if (code
== LE_EXPR
8378 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8380 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8381 else if (code
== GE_EXPR
8382 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8386 *strict_overflow_p
= true;
8389 /* Now build the constant reduced in magnitude. But not if that
8390 would produce one outside of its types range. */
8391 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8393 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8394 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8396 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8397 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8398 /* We cannot swap the comparison here as that would cause us to
8399 endlessly recurse. */
8402 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8403 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8404 if (code0
!= INTEGER_CST
)
8405 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8406 t
= fold_convert (TREE_TYPE (arg1
), t
);
8408 /* If swapping might yield to a more canonical form, do so. */
8410 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8412 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8415 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8416 overflow further. Try to decrease the magnitude of constants involved
8417 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8418 and put sole constants at the second argument position.
8419 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8422 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8423 tree arg0
, tree arg1
)
8426 bool strict_overflow_p
;
8427 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8428 "when reducing constant in comparison");
8430 /* Try canonicalization by simplifying arg0. */
8431 strict_overflow_p
= false;
8432 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8433 &strict_overflow_p
);
8436 if (strict_overflow_p
)
8437 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8441 /* Try canonicalization by simplifying arg1 using the swapped
8443 code
= swap_tree_comparison (code
);
8444 strict_overflow_p
= false;
8445 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8446 &strict_overflow_p
);
8447 if (t
&& strict_overflow_p
)
8448 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8452 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8453 space. This is used to avoid issuing overflow warnings for
8454 expressions like &p->x which can not wrap. */
8457 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8459 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8466 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8467 if (offset
== NULL_TREE
)
8468 wi_offset
= wi::zero (precision
);
8469 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8475 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8476 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8480 if (!wi::fits_uhwi_p (total
))
8483 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8487 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8489 if (TREE_CODE (base
) == ADDR_EXPR
)
8491 HOST_WIDE_INT base_size
;
8493 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8494 if (base_size
> 0 && size
< base_size
)
8498 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8501 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8502 kind INTEGER_CST. This makes sure to properly sign-extend the
8505 static HOST_WIDE_INT
8506 size_low_cst (const_tree t
)
8508 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8509 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8510 if (prec
< HOST_BITS_PER_WIDE_INT
)
8511 return sext_hwi (w
, prec
);
8515 /* Subroutine of fold_binary. This routine performs all of the
8516 transformations that are common to the equality/inequality
8517 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8518 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8519 fold_binary should call fold_binary. Fold a comparison with
8520 tree code CODE and type TYPE with operands OP0 and OP1. Return
8521 the folded comparison or NULL_TREE. */
8524 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8527 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8528 tree arg0
, arg1
, tem
;
8533 STRIP_SIGN_NOPS (arg0
);
8534 STRIP_SIGN_NOPS (arg1
);
8536 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8537 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8538 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8539 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8540 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8541 && TREE_CODE (arg1
) == INTEGER_CST
8542 && !TREE_OVERFLOW (arg1
))
8544 const enum tree_code
8545 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8546 tree const1
= TREE_OPERAND (arg0
, 1);
8547 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8548 tree variable
= TREE_OPERAND (arg0
, 0);
8549 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8551 /* If the constant operation overflowed this can be
8552 simplified as a comparison against INT_MAX/INT_MIN. */
8553 if (TREE_OVERFLOW (new_const
)
8554 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8556 int const1_sgn
= tree_int_cst_sgn (const1
);
8557 enum tree_code code2
= code
;
8559 /* Get the sign of the constant on the lhs if the
8560 operation were VARIABLE + CONST1. */
8561 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8562 const1_sgn
= -const1_sgn
;
8564 /* The sign of the constant determines if we overflowed
8565 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8566 Canonicalize to the INT_MIN overflow by swapping the comparison
8568 if (const1_sgn
== -1)
8569 code2
= swap_tree_comparison (code
);
8571 /* We now can look at the canonicalized case
8572 VARIABLE + 1 CODE2 INT_MIN
8573 and decide on the result. */
8580 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8586 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8595 fold_overflow_warning ("assuming signed overflow does not occur "
8596 "when changing X +- C1 cmp C2 to "
8598 WARN_STRICT_OVERFLOW_COMPARISON
);
8599 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8603 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8604 if (TREE_CODE (arg0
) == MINUS_EXPR
8606 && integer_zerop (arg1
))
8608 /* ??? The transformation is valid for the other operators if overflow
8609 is undefined for the type, but performing it here badly interacts
8610 with the transformation in fold_cond_expr_with_comparison which
8611 attempts to synthetize ABS_EXPR. */
8613 fold_overflow_warning ("assuming signed overflow does not occur "
8614 "when changing X - Y cmp 0 to X cmp Y",
8615 WARN_STRICT_OVERFLOW_COMPARISON
);
8616 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8617 TREE_OPERAND (arg0
, 1));
8620 /* For comparisons of pointers we can decompose it to a compile time
8621 comparison of the base objects and the offsets into the object.
8622 This requires at least one operand being an ADDR_EXPR or a
8623 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8624 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8625 && (TREE_CODE (arg0
) == ADDR_EXPR
8626 || TREE_CODE (arg1
) == ADDR_EXPR
8627 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8628 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8630 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8631 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8633 int volatilep
, unsignedp
;
8634 bool indirect_base0
= false, indirect_base1
= false;
8636 /* Get base and offset for the access. Strip ADDR_EXPR for
8637 get_inner_reference, but put it back by stripping INDIRECT_REF
8638 off the base object if possible. indirect_baseN will be true
8639 if baseN is not an address but refers to the object itself. */
8641 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8643 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8644 &bitsize
, &bitpos0
, &offset0
, &mode
,
8645 &unsignedp
, &volatilep
, false);
8646 if (TREE_CODE (base0
) == INDIRECT_REF
)
8647 base0
= TREE_OPERAND (base0
, 0);
8649 indirect_base0
= true;
8651 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8653 base0
= TREE_OPERAND (arg0
, 0);
8654 STRIP_SIGN_NOPS (base0
);
8655 if (TREE_CODE (base0
) == ADDR_EXPR
)
8657 base0
= TREE_OPERAND (base0
, 0);
8658 indirect_base0
= true;
8660 offset0
= TREE_OPERAND (arg0
, 1);
8661 if (tree_fits_shwi_p (offset0
))
8663 HOST_WIDE_INT off
= size_low_cst (offset0
);
8664 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8666 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8668 bitpos0
= off
* BITS_PER_UNIT
;
8669 offset0
= NULL_TREE
;
8675 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8677 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8678 &bitsize
, &bitpos1
, &offset1
, &mode
,
8679 &unsignedp
, &volatilep
, false);
8680 if (TREE_CODE (base1
) == INDIRECT_REF
)
8681 base1
= TREE_OPERAND (base1
, 0);
8683 indirect_base1
= true;
8685 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8687 base1
= TREE_OPERAND (arg1
, 0);
8688 STRIP_SIGN_NOPS (base1
);
8689 if (TREE_CODE (base1
) == ADDR_EXPR
)
8691 base1
= TREE_OPERAND (base1
, 0);
8692 indirect_base1
= true;
8694 offset1
= TREE_OPERAND (arg1
, 1);
8695 if (tree_fits_shwi_p (offset1
))
8697 HOST_WIDE_INT off
= size_low_cst (offset1
);
8698 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8700 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8702 bitpos1
= off
* BITS_PER_UNIT
;
8703 offset1
= NULL_TREE
;
8708 /* A local variable can never be pointed to by
8709 the default SSA name of an incoming parameter. */
8710 if ((TREE_CODE (arg0
) == ADDR_EXPR
8712 && TREE_CODE (base0
) == VAR_DECL
8713 && auto_var_in_fn_p (base0
, current_function_decl
)
8715 && TREE_CODE (base1
) == SSA_NAME
8716 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8717 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8718 || (TREE_CODE (arg1
) == ADDR_EXPR
8720 && TREE_CODE (base1
) == VAR_DECL
8721 && auto_var_in_fn_p (base1
, current_function_decl
)
8723 && TREE_CODE (base0
) == SSA_NAME
8724 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8725 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8727 if (code
== NE_EXPR
)
8728 return constant_boolean_node (1, type
);
8729 else if (code
== EQ_EXPR
)
8730 return constant_boolean_node (0, type
);
8732 /* If we have equivalent bases we might be able to simplify. */
8733 else if (indirect_base0
== indirect_base1
8734 && operand_equal_p (base0
, base1
, 0))
8736 /* We can fold this expression to a constant if the non-constant
8737 offset parts are equal. */
8738 if ((offset0
== offset1
8739 || (offset0
&& offset1
8740 && operand_equal_p (offset0
, offset1
, 0)))
8743 || (indirect_base0
&& DECL_P (base0
))
8744 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8748 && bitpos0
!= bitpos1
8749 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8750 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8751 fold_overflow_warning (("assuming pointer wraparound does not "
8752 "occur when comparing P +- C1 with "
8754 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8759 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8761 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8763 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8765 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8767 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8769 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8773 /* We can simplify the comparison to a comparison of the variable
8774 offset parts if the constant offset parts are equal.
8775 Be careful to use signed sizetype here because otherwise we
8776 mess with array offsets in the wrong way. This is possible
8777 because pointer arithmetic is restricted to retain within an
8778 object and overflow on pointer differences is undefined as of
8779 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8780 else if (bitpos0
== bitpos1
8782 || (indirect_base0
&& DECL_P (base0
))
8783 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8785 /* By converting to signed sizetype we cover middle-end pointer
8786 arithmetic which operates on unsigned pointer types of size
8787 type size and ARRAY_REF offsets which are properly sign or
8788 zero extended from their type in case it is narrower than
8790 if (offset0
== NULL_TREE
)
8791 offset0
= build_int_cst (ssizetype
, 0);
8793 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8794 if (offset1
== NULL_TREE
)
8795 offset1
= build_int_cst (ssizetype
, 0);
8797 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8800 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8801 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8802 fold_overflow_warning (("assuming pointer wraparound does not "
8803 "occur when comparing P +- C1 with "
8805 WARN_STRICT_OVERFLOW_COMPARISON
);
8807 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8810 /* For non-equal bases we can simplify if they are addresses
8811 of local binding decls or constants. */
8812 else if (indirect_base0
&& indirect_base1
8813 /* We know that !operand_equal_p (base0, base1, 0)
8814 because the if condition was false. But make
8815 sure two decls are not the same. */
8817 && TREE_CODE (arg0
) == ADDR_EXPR
8818 && TREE_CODE (arg1
) == ADDR_EXPR
8819 && (((TREE_CODE (base0
) == VAR_DECL
8820 || TREE_CODE (base0
) == PARM_DECL
)
8821 && (targetm
.binds_local_p (base0
)
8822 || CONSTANT_CLASS_P (base1
)))
8823 || CONSTANT_CLASS_P (base0
))
8824 && (((TREE_CODE (base1
) == VAR_DECL
8825 || TREE_CODE (base1
) == PARM_DECL
)
8826 && (targetm
.binds_local_p (base1
)
8827 || CONSTANT_CLASS_P (base0
)))
8828 || CONSTANT_CLASS_P (base1
)))
8830 if (code
== EQ_EXPR
)
8831 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8833 else if (code
== NE_EXPR
)
8834 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8837 /* For equal offsets we can simplify to a comparison of the
8839 else if (bitpos0
== bitpos1
8841 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8843 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8844 && ((offset0
== offset1
)
8845 || (offset0
&& offset1
8846 && operand_equal_p (offset0
, offset1
, 0))))
8849 base0
= build_fold_addr_expr_loc (loc
, base0
);
8851 base1
= build_fold_addr_expr_loc (loc
, base1
);
8852 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8856 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8857 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8858 the resulting offset is smaller in absolute value than the
8859 original one and has the same sign. */
8860 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8861 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8862 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8863 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8864 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8865 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8866 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8868 tree const1
= TREE_OPERAND (arg0
, 1);
8869 tree const2
= TREE_OPERAND (arg1
, 1);
8870 tree variable1
= TREE_OPERAND (arg0
, 0);
8871 tree variable2
= TREE_OPERAND (arg1
, 0);
8873 const char * const warnmsg
= G_("assuming signed overflow does not "
8874 "occur when combining constants around "
8877 /* Put the constant on the side where it doesn't overflow and is
8878 of lower absolute value and of same sign than before. */
8879 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8880 ? MINUS_EXPR
: PLUS_EXPR
,
8882 if (!TREE_OVERFLOW (cst
)
8883 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8884 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8886 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8887 return fold_build2_loc (loc
, code
, type
,
8889 fold_build2_loc (loc
, TREE_CODE (arg1
),
8894 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8895 ? MINUS_EXPR
: PLUS_EXPR
,
8897 if (!TREE_OVERFLOW (cst
)
8898 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8899 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8901 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8902 return fold_build2_loc (loc
, code
, type
,
8903 fold_build2_loc (loc
, TREE_CODE (arg0
),
8910 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8911 signed arithmetic case. That form is created by the compiler
8912 often enough for folding it to be of value. One example is in
8913 computing loop trip counts after Operator Strength Reduction. */
8914 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8915 && TREE_CODE (arg0
) == MULT_EXPR
8916 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8917 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8918 && integer_zerop (arg1
))
8920 tree const1
= TREE_OPERAND (arg0
, 1);
8921 tree const2
= arg1
; /* zero */
8922 tree variable1
= TREE_OPERAND (arg0
, 0);
8923 enum tree_code cmp_code
= code
;
8925 /* Handle unfolded multiplication by zero. */
8926 if (integer_zerop (const1
))
8927 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
8929 fold_overflow_warning (("assuming signed overflow does not occur when "
8930 "eliminating multiplication in comparison "
8932 WARN_STRICT_OVERFLOW_COMPARISON
);
8934 /* If const1 is negative we swap the sense of the comparison. */
8935 if (tree_int_cst_sgn (const1
) < 0)
8936 cmp_code
= swap_tree_comparison (cmp_code
);
8938 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
8941 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8945 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8947 tree targ0
= strip_float_extensions (arg0
);
8948 tree targ1
= strip_float_extensions (arg1
);
8949 tree newtype
= TREE_TYPE (targ0
);
8951 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8952 newtype
= TREE_TYPE (targ1
);
8954 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8955 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8956 return fold_build2_loc (loc
, code
, type
,
8957 fold_convert_loc (loc
, newtype
, targ0
),
8958 fold_convert_loc (loc
, newtype
, targ1
));
8960 /* (-a) CMP (-b) -> b CMP a */
8961 if (TREE_CODE (arg0
) == NEGATE_EXPR
8962 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8963 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
8964 TREE_OPERAND (arg0
, 0));
8966 if (TREE_CODE (arg1
) == REAL_CST
)
8968 REAL_VALUE_TYPE cst
;
8969 cst
= TREE_REAL_CST (arg1
);
8971 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8972 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8973 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
8974 TREE_OPERAND (arg0
, 0),
8975 build_real (TREE_TYPE (arg1
),
8976 real_value_negate (&cst
)));
8978 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8979 /* a CMP (-0) -> a CMP 0 */
8980 if (REAL_VALUE_MINUS_ZERO (cst
))
8981 return fold_build2_loc (loc
, code
, type
, arg0
,
8982 build_real (TREE_TYPE (arg1
), dconst0
));
8984 /* x != NaN is always true, other ops are always false. */
8985 if (REAL_VALUE_ISNAN (cst
)
8986 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8988 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8989 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
8992 /* Fold comparisons against infinity. */
8993 if (REAL_VALUE_ISINF (cst
)
8994 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
8996 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
8997 if (tem
!= NULL_TREE
)
9002 /* If this is a comparison of a real constant with a PLUS_EXPR
9003 or a MINUS_EXPR of a real constant, we can convert it into a
9004 comparison with a revised real constant as long as no overflow
9005 occurs when unsafe_math_optimizations are enabled. */
9006 if (flag_unsafe_math_optimizations
9007 && TREE_CODE (arg1
) == REAL_CST
9008 && (TREE_CODE (arg0
) == PLUS_EXPR
9009 || TREE_CODE (arg0
) == MINUS_EXPR
)
9010 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9011 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9012 ? MINUS_EXPR
: PLUS_EXPR
,
9013 arg1
, TREE_OPERAND (arg0
, 1)))
9014 && !TREE_OVERFLOW (tem
))
9015 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9017 /* Likewise, we can simplify a comparison of a real constant with
9018 a MINUS_EXPR whose first operand is also a real constant, i.e.
9019 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9020 floating-point types only if -fassociative-math is set. */
9021 if (flag_associative_math
9022 && TREE_CODE (arg1
) == REAL_CST
9023 && TREE_CODE (arg0
) == MINUS_EXPR
9024 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9025 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9027 && !TREE_OVERFLOW (tem
))
9028 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9029 TREE_OPERAND (arg0
, 1), tem
);
9031 /* Fold comparisons against built-in math functions. */
9032 if (TREE_CODE (arg1
) == REAL_CST
9033 && flag_unsafe_math_optimizations
9034 && ! flag_errno_math
)
9036 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9038 if (fcode
!= END_BUILTINS
)
9040 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9041 if (tem
!= NULL_TREE
)
9047 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9048 && CONVERT_EXPR_P (arg0
))
9050 /* If we are widening one operand of an integer comparison,
9051 see if the other operand is similarly being widened. Perhaps we
9052 can do the comparison in the narrower type. */
9053 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9057 /* Or if we are changing signedness. */
9058 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9063 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9064 constant, we can simplify it. */
9065 if (TREE_CODE (arg1
) == INTEGER_CST
9066 && (TREE_CODE (arg0
) == MIN_EXPR
9067 || TREE_CODE (arg0
) == MAX_EXPR
)
9068 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9070 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9075 /* Simplify comparison of something with itself. (For IEEE
9076 floating-point, we can only do some of these simplifications.) */
9077 if (operand_equal_p (arg0
, arg1
, 0))
9082 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9083 || ! HONOR_NANS (element_mode (arg0
)))
9084 return constant_boolean_node (1, type
);
9089 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9090 || ! HONOR_NANS (element_mode (arg0
)))
9091 return constant_boolean_node (1, type
);
9092 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9095 /* For NE, we can only do this simplification if integer
9096 or we don't honor IEEE floating point NaNs. */
9097 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9098 && HONOR_NANS (element_mode (arg0
)))
9100 /* ... fall through ... */
9103 return constant_boolean_node (0, type
);
9109 /* If we are comparing an expression that just has comparisons
9110 of two integer values, arithmetic expressions of those comparisons,
9111 and constants, we can simplify it. There are only three cases
9112 to check: the two values can either be equal, the first can be
9113 greater, or the second can be greater. Fold the expression for
9114 those three values. Since each value must be 0 or 1, we have
9115 eight possibilities, each of which corresponds to the constant 0
9116 or 1 or one of the six possible comparisons.
9118 This handles common cases like (a > b) == 0 but also handles
9119 expressions like ((x > y) - (y > x)) > 0, which supposedly
9120 occur in macroized code. */
9122 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9124 tree cval1
= 0, cval2
= 0;
9127 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9128 /* Don't handle degenerate cases here; they should already
9129 have been handled anyway. */
9130 && cval1
!= 0 && cval2
!= 0
9131 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9132 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9133 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9134 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9135 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9136 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9137 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9139 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9140 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9142 /* We can't just pass T to eval_subst in case cval1 or cval2
9143 was the same as ARG1. */
9146 = fold_build2_loc (loc
, code
, type
,
9147 eval_subst (loc
, arg0
, cval1
, maxval
,
9151 = fold_build2_loc (loc
, code
, type
,
9152 eval_subst (loc
, arg0
, cval1
, maxval
,
9156 = fold_build2_loc (loc
, code
, type
,
9157 eval_subst (loc
, arg0
, cval1
, minval
,
9161 /* All three of these results should be 0 or 1. Confirm they are.
9162 Then use those values to select the proper code to use. */
9164 if (TREE_CODE (high_result
) == INTEGER_CST
9165 && TREE_CODE (equal_result
) == INTEGER_CST
9166 && TREE_CODE (low_result
) == INTEGER_CST
)
9168 /* Make a 3-bit mask with the high-order bit being the
9169 value for `>', the next for '=', and the low for '<'. */
9170 switch ((integer_onep (high_result
) * 4)
9171 + (integer_onep (equal_result
) * 2)
9172 + integer_onep (low_result
))
9176 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9197 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9202 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9203 SET_EXPR_LOCATION (tem
, loc
);
9206 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9211 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9212 into a single range test. */
9213 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9214 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9215 && TREE_CODE (arg1
) == INTEGER_CST
9216 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9217 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9218 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9219 && !TREE_OVERFLOW (arg1
))
9221 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9222 if (tem
!= NULL_TREE
)
9226 /* Fold ~X op ~Y as Y op X. */
9227 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9228 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9230 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9231 return fold_build2_loc (loc
, code
, type
,
9232 fold_convert_loc (loc
, cmp_type
,
9233 TREE_OPERAND (arg1
, 0)),
9234 TREE_OPERAND (arg0
, 0));
9237 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9238 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9239 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9241 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9242 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9243 TREE_OPERAND (arg0
, 0),
9244 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9245 fold_convert_loc (loc
, cmp_type
, arg1
)));
9252 /* Subroutine of fold_binary. Optimize complex multiplications of the
9253 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9254 argument EXPR represents the expression "z" of type TYPE. */
9257 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9259 tree itype
= TREE_TYPE (type
);
9260 tree rpart
, ipart
, tem
;
9262 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9264 rpart
= TREE_OPERAND (expr
, 0);
9265 ipart
= TREE_OPERAND (expr
, 1);
9267 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9269 rpart
= TREE_REALPART (expr
);
9270 ipart
= TREE_IMAGPART (expr
);
9274 expr
= save_expr (expr
);
9275 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9276 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9279 rpart
= save_expr (rpart
);
9280 ipart
= save_expr (ipart
);
9281 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9282 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9283 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9284 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9285 build_zero_cst (itype
));
9289 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9290 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9291 guarantees that P and N have the same least significant log2(M) bits.
9292 N is not otherwise constrained. In particular, N is not normalized to
9293 0 <= N < M as is common. In general, the precise value of P is unknown.
9294 M is chosen as large as possible such that constant N can be determined.
9296 Returns M and sets *RESIDUE to N.
9298 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9299 account. This is not always possible due to PR 35705.
9302 static unsigned HOST_WIDE_INT
9303 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9304 bool allow_func_align
)
9306 enum tree_code code
;
9310 code
= TREE_CODE (expr
);
9311 if (code
== ADDR_EXPR
)
9313 unsigned int bitalign
;
9314 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9315 *residue
/= BITS_PER_UNIT
;
9316 return bitalign
/ BITS_PER_UNIT
;
9318 else if (code
== POINTER_PLUS_EXPR
)
9321 unsigned HOST_WIDE_INT modulus
;
9322 enum tree_code inner_code
;
9324 op0
= TREE_OPERAND (expr
, 0);
9326 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9329 op1
= TREE_OPERAND (expr
, 1);
9331 inner_code
= TREE_CODE (op1
);
9332 if (inner_code
== INTEGER_CST
)
9334 *residue
+= TREE_INT_CST_LOW (op1
);
9337 else if (inner_code
== MULT_EXPR
)
9339 op1
= TREE_OPERAND (op1
, 1);
9340 if (TREE_CODE (op1
) == INTEGER_CST
)
9342 unsigned HOST_WIDE_INT align
;
9344 /* Compute the greatest power-of-2 divisor of op1. */
9345 align
= TREE_INT_CST_LOW (op1
);
9348 /* If align is non-zero and less than *modulus, replace
9349 *modulus with align., If align is 0, then either op1 is 0
9350 or the greatest power-of-2 divisor of op1 doesn't fit in an
9351 unsigned HOST_WIDE_INT. In either case, no additional
9352 constraint is imposed. */
9354 modulus
= MIN (modulus
, align
);
9361 /* If we get here, we were unable to determine anything useful about the
9366 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9367 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9370 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9372 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9374 if (TREE_CODE (arg
) == VECTOR_CST
)
9376 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9377 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9379 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9381 constructor_elt
*elt
;
9383 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9384 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9387 elts
[i
] = elt
->value
;
9391 for (; i
< nelts
; i
++)
9393 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9397 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9398 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9399 NULL_TREE otherwise. */
9402 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9404 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9406 bool need_ctor
= false;
9408 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9409 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9410 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9411 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9414 elts
= XALLOCAVEC (tree
, nelts
* 3);
9415 if (!vec_cst_ctor_to_array (arg0
, elts
)
9416 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9419 for (i
= 0; i
< nelts
; i
++)
9421 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9423 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9428 vec
<constructor_elt
, va_gc
> *v
;
9429 vec_alloc (v
, nelts
);
9430 for (i
= 0; i
< nelts
; i
++)
9431 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9432 return build_constructor (type
, v
);
9435 return build_vector (type
, &elts
[2 * nelts
]);
9438 /* Try to fold a pointer difference of type TYPE two address expressions of
9439 array references AREF0 and AREF1 using location LOC. Return a
9440 simplified expression for the difference or NULL_TREE. */
9443 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9444 tree aref0
, tree aref1
)
9446 tree base0
= TREE_OPERAND (aref0
, 0);
9447 tree base1
= TREE_OPERAND (aref1
, 0);
9448 tree base_offset
= build_int_cst (type
, 0);
9450 /* If the bases are array references as well, recurse. If the bases
9451 are pointer indirections compute the difference of the pointers.
9452 If the bases are equal, we are set. */
9453 if ((TREE_CODE (base0
) == ARRAY_REF
9454 && TREE_CODE (base1
) == ARRAY_REF
9456 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9457 || (INDIRECT_REF_P (base0
)
9458 && INDIRECT_REF_P (base1
)
9459 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9460 TREE_OPERAND (base0
, 0),
9461 TREE_OPERAND (base1
, 0))))
9462 || operand_equal_p (base0
, base1
, 0))
9464 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9465 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9466 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9467 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9468 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9470 fold_build2_loc (loc
, MULT_EXPR
, type
,
9476 /* If the real or vector real constant CST of type TYPE has an exact
9477 inverse, return it, else return NULL. */
9480 exact_inverse (tree type
, tree cst
)
9483 tree unit_type
, *elts
;
9485 unsigned vec_nelts
, i
;
9487 switch (TREE_CODE (cst
))
9490 r
= TREE_REAL_CST (cst
);
9492 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9493 return build_real (type
, r
);
9498 vec_nelts
= VECTOR_CST_NELTS (cst
);
9499 elts
= XALLOCAVEC (tree
, vec_nelts
);
9500 unit_type
= TREE_TYPE (type
);
9501 mode
= TYPE_MODE (unit_type
);
9503 for (i
= 0; i
< vec_nelts
; i
++)
9505 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9506 if (!exact_real_inverse (mode
, &r
))
9508 elts
[i
] = build_real (unit_type
, r
);
9511 return build_vector (type
, elts
);
9518 /* Mask out the tz least significant bits of X of type TYPE where
9519 tz is the number of trailing zeroes in Y. */
9521 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9523 int tz
= wi::ctz (y
);
9525 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9529 /* Return true when T is an address and is known to be nonzero.
9530 For floating point we further ensure that T is not denormal.
9531 Similar logic is present in nonzero_address in rtlanal.h.
9533 If the return value is based on the assumption that signed overflow
9534 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9535 change *STRICT_OVERFLOW_P. */
9538 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9540 tree type
= TREE_TYPE (t
);
9541 enum tree_code code
;
9543 /* Doing something useful for floating point would need more work. */
9544 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9547 code
= TREE_CODE (t
);
9548 switch (TREE_CODE_CLASS (code
))
9551 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9554 case tcc_comparison
:
9555 return tree_binary_nonzero_warnv_p (code
, type
,
9556 TREE_OPERAND (t
, 0),
9557 TREE_OPERAND (t
, 1),
9560 case tcc_declaration
:
9562 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9570 case TRUTH_NOT_EXPR
:
9571 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9574 case TRUTH_AND_EXPR
:
9576 case TRUTH_XOR_EXPR
:
9577 return tree_binary_nonzero_warnv_p (code
, type
,
9578 TREE_OPERAND (t
, 0),
9579 TREE_OPERAND (t
, 1),
9587 case WITH_SIZE_EXPR
:
9589 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9594 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9598 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9603 tree fndecl
= get_callee_fndecl (t
);
9604 if (!fndecl
) return false;
9605 if (flag_delete_null_pointer_checks
&& !flag_check_new
9606 && DECL_IS_OPERATOR_NEW (fndecl
)
9607 && !TREE_NOTHROW (fndecl
))
9609 if (flag_delete_null_pointer_checks
9610 && lookup_attribute ("returns_nonnull",
9611 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9613 return alloca_call_p (t
);
9622 /* Return true when T is an address and is known to be nonzero.
9623 Handle warnings about undefined signed overflow. */
9626 tree_expr_nonzero_p (tree t
)
9628 bool ret
, strict_overflow_p
;
9630 strict_overflow_p
= false;
9631 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9632 if (strict_overflow_p
)
9633 fold_overflow_warning (("assuming signed overflow does not occur when "
9634 "determining that expression is always "
9636 WARN_STRICT_OVERFLOW_MISC
);
9640 /* Fold a binary expression of code CODE and type TYPE with operands
9641 OP0 and OP1. LOC is the location of the resulting expression.
9642 Return the folded expression if folding is successful. Otherwise,
9643 return NULL_TREE. */
9646 fold_binary_loc (location_t loc
,
9647 enum tree_code code
, tree type
, tree op0
, tree op1
)
9649 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9650 tree arg0
, arg1
, tem
;
9651 tree t1
= NULL_TREE
;
9652 bool strict_overflow_p
;
9655 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9656 && TREE_CODE_LENGTH (code
) == 2
9658 && op1
!= NULL_TREE
);
9663 /* Strip any conversions that don't change the mode. This is
9664 safe for every expression, except for a comparison expression
9665 because its signedness is derived from its operands. So, in
9666 the latter case, only strip conversions that don't change the
9667 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9670 Note that this is done as an internal manipulation within the
9671 constant folder, in order to find the simplest representation
9672 of the arguments so that their form can be studied. In any
9673 cases, the appropriate type conversions should be put back in
9674 the tree that will get out of the constant folder. */
9676 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9678 STRIP_SIGN_NOPS (arg0
);
9679 STRIP_SIGN_NOPS (arg1
);
9687 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9688 constant but we can't do arithmetic on them. */
9689 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9690 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9691 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9692 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9693 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9694 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9695 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9697 if (kind
== tcc_binary
)
9699 /* Make sure type and arg0 have the same saturating flag. */
9700 gcc_assert (TYPE_SATURATING (type
)
9701 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9702 tem
= const_binop (code
, arg0
, arg1
);
9704 else if (kind
== tcc_comparison
)
9705 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9709 if (tem
!= NULL_TREE
)
9711 if (TREE_TYPE (tem
) != type
)
9712 tem
= fold_convert_loc (loc
, type
, tem
);
9717 /* If this is a commutative operation, and ARG0 is a constant, move it
9718 to ARG1 to reduce the number of tests below. */
9719 if (commutative_tree_code (code
)
9720 && tree_swap_operands_p (arg0
, arg1
, true))
9721 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9723 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9724 to ARG1 to reduce the number of tests below. */
9725 if (kind
== tcc_comparison
9726 && tree_swap_operands_p (arg0
, arg1
, true))
9727 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9729 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9733 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9735 First check for cases where an arithmetic operation is applied to a
9736 compound, conditional, or comparison operation. Push the arithmetic
9737 operation inside the compound or conditional to see if any folding
9738 can then be done. Convert comparison to conditional for this purpose.
9739 The also optimizes non-constant cases that used to be done in
9742 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9743 one of the operands is a comparison and the other is a comparison, a
9744 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9745 code below would make the expression more complex. Change it to a
9746 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9747 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9749 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9750 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9751 && TREE_CODE (type
) != VECTOR_TYPE
9752 && ((truth_value_p (TREE_CODE (arg0
))
9753 && (truth_value_p (TREE_CODE (arg1
))
9754 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9755 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9756 || (truth_value_p (TREE_CODE (arg1
))
9757 && (truth_value_p (TREE_CODE (arg0
))
9758 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9759 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9761 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9762 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9765 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9766 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9768 if (code
== EQ_EXPR
)
9769 tem
= invert_truthvalue_loc (loc
, tem
);
9771 return fold_convert_loc (loc
, type
, tem
);
9774 if (TREE_CODE_CLASS (code
) == tcc_binary
9775 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9777 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9779 tem
= fold_build2_loc (loc
, code
, type
,
9780 fold_convert_loc (loc
, TREE_TYPE (op0
),
9781 TREE_OPERAND (arg0
, 1)), op1
);
9782 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9785 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9786 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9788 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9789 fold_convert_loc (loc
, TREE_TYPE (op1
),
9790 TREE_OPERAND (arg1
, 1)));
9791 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9795 if (TREE_CODE (arg0
) == COND_EXPR
9796 || TREE_CODE (arg0
) == VEC_COND_EXPR
9797 || COMPARISON_CLASS_P (arg0
))
9799 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9801 /*cond_first_p=*/1);
9802 if (tem
!= NULL_TREE
)
9806 if (TREE_CODE (arg1
) == COND_EXPR
9807 || TREE_CODE (arg1
) == VEC_COND_EXPR
9808 || COMPARISON_CLASS_P (arg1
))
9810 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9812 /*cond_first_p=*/0);
9813 if (tem
!= NULL_TREE
)
9821 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9822 if (TREE_CODE (arg0
) == ADDR_EXPR
9823 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9825 tree iref
= TREE_OPERAND (arg0
, 0);
9826 return fold_build2 (MEM_REF
, type
,
9827 TREE_OPERAND (iref
, 0),
9828 int_const_binop (PLUS_EXPR
, arg1
,
9829 TREE_OPERAND (iref
, 1)));
9832 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9833 if (TREE_CODE (arg0
) == ADDR_EXPR
9834 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9837 HOST_WIDE_INT coffset
;
9838 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9842 return fold_build2 (MEM_REF
, type
,
9843 build_fold_addr_expr (base
),
9844 int_const_binop (PLUS_EXPR
, arg1
,
9845 size_int (coffset
)));
9850 case POINTER_PLUS_EXPR
:
9851 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9852 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9853 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9854 return fold_convert_loc (loc
, type
,
9855 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9856 fold_convert_loc (loc
, sizetype
,
9858 fold_convert_loc (loc
, sizetype
,
9861 /* PTR_CST +p CST -> CST1 */
9862 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9863 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9864 fold_convert_loc (loc
, type
, arg1
));
9869 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9871 /* X + (X / CST) * -CST is X % CST. */
9872 if (TREE_CODE (arg1
) == MULT_EXPR
9873 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9874 && operand_equal_p (arg0
,
9875 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9877 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9878 tree cst1
= TREE_OPERAND (arg1
, 1);
9879 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9881 if (sum
&& integer_zerop (sum
))
9882 return fold_convert_loc (loc
, type
,
9883 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9884 TREE_TYPE (arg0
), arg0
,
9889 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9890 one. Make sure the type is not saturating and has the signedness of
9891 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9892 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9893 if ((TREE_CODE (arg0
) == MULT_EXPR
9894 || TREE_CODE (arg1
) == MULT_EXPR
)
9895 && !TYPE_SATURATING (type
)
9896 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9897 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9898 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9900 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9905 if (! FLOAT_TYPE_P (type
))
9907 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9908 with a constant, and the two constants have no bits in common,
9909 we should treat this as a BIT_IOR_EXPR since this may produce more
9911 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9912 && TREE_CODE (arg1
) == BIT_AND_EXPR
9913 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9914 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9915 && wi::bit_and (TREE_OPERAND (arg0
, 1),
9916 TREE_OPERAND (arg1
, 1)) == 0)
9918 code
= BIT_IOR_EXPR
;
9922 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9923 (plus (plus (mult) (mult)) (foo)) so that we can
9924 take advantage of the factoring cases below. */
9925 if (TYPE_OVERFLOW_WRAPS (type
)
9926 && (((TREE_CODE (arg0
) == PLUS_EXPR
9927 || TREE_CODE (arg0
) == MINUS_EXPR
)
9928 && TREE_CODE (arg1
) == MULT_EXPR
)
9929 || ((TREE_CODE (arg1
) == PLUS_EXPR
9930 || TREE_CODE (arg1
) == MINUS_EXPR
)
9931 && TREE_CODE (arg0
) == MULT_EXPR
)))
9933 tree parg0
, parg1
, parg
, marg
;
9934 enum tree_code pcode
;
9936 if (TREE_CODE (arg1
) == MULT_EXPR
)
9937 parg
= arg0
, marg
= arg1
;
9939 parg
= arg1
, marg
= arg0
;
9940 pcode
= TREE_CODE (parg
);
9941 parg0
= TREE_OPERAND (parg
, 0);
9942 parg1
= TREE_OPERAND (parg
, 1);
9946 if (TREE_CODE (parg0
) == MULT_EXPR
9947 && TREE_CODE (parg1
) != MULT_EXPR
)
9948 return fold_build2_loc (loc
, pcode
, type
,
9949 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9950 fold_convert_loc (loc
, type
,
9952 fold_convert_loc (loc
, type
,
9954 fold_convert_loc (loc
, type
, parg1
));
9955 if (TREE_CODE (parg0
) != MULT_EXPR
9956 && TREE_CODE (parg1
) == MULT_EXPR
)
9958 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9959 fold_convert_loc (loc
, type
, parg0
),
9960 fold_build2_loc (loc
, pcode
, type
,
9961 fold_convert_loc (loc
, type
, marg
),
9962 fold_convert_loc (loc
, type
,
9968 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9969 to __complex__ ( x, y ). This is not the same for SNaNs or
9970 if signed zeros are involved. */
9971 if (!HONOR_SNANS (element_mode (arg0
))
9972 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9973 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9975 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9976 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9977 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9978 bool arg0rz
= false, arg0iz
= false;
9979 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9980 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9982 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9983 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9984 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9986 tree rp
= arg1r
? arg1r
9987 : build1 (REALPART_EXPR
, rtype
, arg1
);
9988 tree ip
= arg0i
? arg0i
9989 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9990 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9992 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9994 tree rp
= arg0r
? arg0r
9995 : build1 (REALPART_EXPR
, rtype
, arg0
);
9996 tree ip
= arg1i
? arg1i
9997 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9998 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10003 if (flag_unsafe_math_optimizations
10004 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10005 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10006 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10009 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10010 We associate floats only if the user has specified
10011 -fassociative-math. */
10012 if (flag_associative_math
10013 && TREE_CODE (arg1
) == PLUS_EXPR
10014 && TREE_CODE (arg0
) != MULT_EXPR
)
10016 tree tree10
= TREE_OPERAND (arg1
, 0);
10017 tree tree11
= TREE_OPERAND (arg1
, 1);
10018 if (TREE_CODE (tree11
) == MULT_EXPR
10019 && TREE_CODE (tree10
) == MULT_EXPR
)
10022 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10023 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10026 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10027 We associate floats only if the user has specified
10028 -fassociative-math. */
10029 if (flag_associative_math
10030 && TREE_CODE (arg0
) == PLUS_EXPR
10031 && TREE_CODE (arg1
) != MULT_EXPR
)
10033 tree tree00
= TREE_OPERAND (arg0
, 0);
10034 tree tree01
= TREE_OPERAND (arg0
, 1);
10035 if (TREE_CODE (tree01
) == MULT_EXPR
10036 && TREE_CODE (tree00
) == MULT_EXPR
)
10039 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10040 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10046 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10047 is a rotate of A by C1 bits. */
10048 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10049 is a rotate of A by B bits. */
10051 enum tree_code code0
, code1
;
10053 code0
= TREE_CODE (arg0
);
10054 code1
= TREE_CODE (arg1
);
10055 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10056 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10057 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10058 TREE_OPERAND (arg1
, 0), 0)
10059 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10060 TYPE_UNSIGNED (rtype
))
10061 /* Only create rotates in complete modes. Other cases are not
10062 expanded properly. */
10063 && (element_precision (rtype
)
10064 == element_precision (TYPE_MODE (rtype
))))
10066 tree tree01
, tree11
;
10067 enum tree_code code01
, code11
;
10069 tree01
= TREE_OPERAND (arg0
, 1);
10070 tree11
= TREE_OPERAND (arg1
, 1);
10071 STRIP_NOPS (tree01
);
10072 STRIP_NOPS (tree11
);
10073 code01
= TREE_CODE (tree01
);
10074 code11
= TREE_CODE (tree11
);
10075 if (code01
== INTEGER_CST
10076 && code11
== INTEGER_CST
10077 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10078 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10080 tem
= build2_loc (loc
, LROTATE_EXPR
,
10081 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10082 TREE_OPERAND (arg0
, 0),
10083 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10084 return fold_convert_loc (loc
, type
, tem
);
10086 else if (code11
== MINUS_EXPR
)
10088 tree tree110
, tree111
;
10089 tree110
= TREE_OPERAND (tree11
, 0);
10090 tree111
= TREE_OPERAND (tree11
, 1);
10091 STRIP_NOPS (tree110
);
10092 STRIP_NOPS (tree111
);
10093 if (TREE_CODE (tree110
) == INTEGER_CST
10094 && 0 == compare_tree_int (tree110
,
10096 (TREE_TYPE (TREE_OPERAND
10098 && operand_equal_p (tree01
, tree111
, 0))
10100 fold_convert_loc (loc
, type
,
10101 build2 ((code0
== LSHIFT_EXPR
10104 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10105 TREE_OPERAND (arg0
, 0), tree01
));
10107 else if (code01
== MINUS_EXPR
)
10109 tree tree010
, tree011
;
10110 tree010
= TREE_OPERAND (tree01
, 0);
10111 tree011
= TREE_OPERAND (tree01
, 1);
10112 STRIP_NOPS (tree010
);
10113 STRIP_NOPS (tree011
);
10114 if (TREE_CODE (tree010
) == INTEGER_CST
10115 && 0 == compare_tree_int (tree010
,
10117 (TREE_TYPE (TREE_OPERAND
10119 && operand_equal_p (tree11
, tree011
, 0))
10120 return fold_convert_loc
10122 build2 ((code0
!= LSHIFT_EXPR
10125 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10126 TREE_OPERAND (arg0
, 0), tree11
));
10132 /* In most languages, can't associate operations on floats through
10133 parentheses. Rather than remember where the parentheses were, we
10134 don't associate floats at all, unless the user has specified
10135 -fassociative-math.
10136 And, we need to make sure type is not saturating. */
10138 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10139 && !TYPE_SATURATING (type
))
10141 tree var0
, con0
, lit0
, minus_lit0
;
10142 tree var1
, con1
, lit1
, minus_lit1
;
10146 /* Split both trees into variables, constants, and literals. Then
10147 associate each group together, the constants with literals,
10148 then the result with variables. This increases the chances of
10149 literals being recombined later and of generating relocatable
10150 expressions for the sum of a constant and literal. */
10151 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10152 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10153 code
== MINUS_EXPR
);
10155 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10156 if (code
== MINUS_EXPR
)
10159 /* With undefined overflow prefer doing association in a type
10160 which wraps on overflow, if that is one of the operand types. */
10161 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10162 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10164 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10165 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10166 atype
= TREE_TYPE (arg0
);
10167 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10168 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10169 atype
= TREE_TYPE (arg1
);
10170 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10173 /* With undefined overflow we can only associate constants with one
10174 variable, and constants whose association doesn't overflow. */
10175 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10176 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10183 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10184 tmp0
= TREE_OPERAND (tmp0
, 0);
10185 if (CONVERT_EXPR_P (tmp0
)
10186 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10187 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10188 <= TYPE_PRECISION (atype
)))
10189 tmp0
= TREE_OPERAND (tmp0
, 0);
10190 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10191 tmp1
= TREE_OPERAND (tmp1
, 0);
10192 if (CONVERT_EXPR_P (tmp1
)
10193 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10194 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10195 <= TYPE_PRECISION (atype
)))
10196 tmp1
= TREE_OPERAND (tmp1
, 0);
10197 /* The only case we can still associate with two variables
10198 is if they are the same, modulo negation and bit-pattern
10199 preserving conversions. */
10200 if (!operand_equal_p (tmp0
, tmp1
, 0))
10205 /* Only do something if we found more than two objects. Otherwise,
10206 nothing has changed and we risk infinite recursion. */
10208 && (2 < ((var0
!= 0) + (var1
!= 0)
10209 + (con0
!= 0) + (con1
!= 0)
10210 + (lit0
!= 0) + (lit1
!= 0)
10211 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10213 bool any_overflows
= false;
10214 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10215 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10216 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10217 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10218 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10219 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10220 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10221 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10224 /* Preserve the MINUS_EXPR if the negative part of the literal is
10225 greater than the positive part. Otherwise, the multiplicative
10226 folding code (i.e extract_muldiv) may be fooled in case
10227 unsigned constants are subtracted, like in the following
10228 example: ((X*2 + 4) - 8U)/2. */
10229 if (minus_lit0
&& lit0
)
10231 if (TREE_CODE (lit0
) == INTEGER_CST
10232 && TREE_CODE (minus_lit0
) == INTEGER_CST
10233 && tree_int_cst_lt (lit0
, minus_lit0
))
10235 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10236 MINUS_EXPR
, atype
);
10241 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10242 MINUS_EXPR
, atype
);
10247 /* Don't introduce overflows through reassociation. */
10249 && ((lit0
&& TREE_OVERFLOW (lit0
))
10250 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10257 fold_convert_loc (loc
, type
,
10258 associate_trees (loc
, var0
, minus_lit0
,
10259 MINUS_EXPR
, atype
));
10262 con0
= associate_trees (loc
, con0
, minus_lit0
,
10263 MINUS_EXPR
, atype
);
10265 fold_convert_loc (loc
, type
,
10266 associate_trees (loc
, var0
, con0
,
10267 PLUS_EXPR
, atype
));
10271 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10273 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10281 /* Pointer simplifications for subtraction, simple reassociations. */
10282 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10284 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10285 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10286 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10288 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10289 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10290 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10291 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10292 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10293 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10295 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10298 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10299 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10301 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10302 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10303 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10304 fold_convert_loc (loc
, type
, arg1
));
10306 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10308 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10310 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10312 tree arg10
= fold_convert_loc (loc
, type
,
10313 TREE_OPERAND (arg1
, 0));
10314 tree arg11
= fold_convert_loc (loc
, type
,
10315 TREE_OPERAND (arg1
, 1));
10316 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10317 fold_convert_loc (loc
, type
, arg0
),
10320 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10323 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10324 if (TREE_CODE (arg0
) == NEGATE_EXPR
10325 && negate_expr_p (arg1
)
10326 && reorder_operands_p (arg0
, arg1
))
10327 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10328 fold_convert_loc (loc
, type
,
10329 negate_expr (arg1
)),
10330 fold_convert_loc (loc
, type
,
10331 TREE_OPERAND (arg0
, 0)));
10332 /* Convert -A - 1 to ~A. */
10333 if (TREE_CODE (arg0
) == NEGATE_EXPR
10334 && integer_each_onep (arg1
)
10335 && !TYPE_OVERFLOW_TRAPS (type
))
10336 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10337 fold_convert_loc (loc
, type
,
10338 TREE_OPERAND (arg0
, 0)));
10340 /* Convert -1 - A to ~A. */
10341 if (TREE_CODE (type
) != COMPLEX_TYPE
10342 && integer_all_onesp (arg0
))
10343 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10346 /* X - (X / Y) * Y is X % Y. */
10347 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10348 && TREE_CODE (arg1
) == MULT_EXPR
10349 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10350 && operand_equal_p (arg0
,
10351 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10352 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10353 TREE_OPERAND (arg1
, 1), 0))
10355 fold_convert_loc (loc
, type
,
10356 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10357 arg0
, TREE_OPERAND (arg1
, 1)));
10359 if (! FLOAT_TYPE_P (type
))
10361 /* Fold A - (A & B) into ~B & A. */
10362 if (!TREE_SIDE_EFFECTS (arg0
)
10363 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10365 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10367 tree arg10
= fold_convert_loc (loc
, type
,
10368 TREE_OPERAND (arg1
, 0));
10369 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10370 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10372 fold_convert_loc (loc
, type
, arg0
));
10374 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10376 tree arg11
= fold_convert_loc (loc
,
10377 type
, TREE_OPERAND (arg1
, 1));
10378 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10379 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10381 fold_convert_loc (loc
, type
, arg0
));
10385 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10386 any power of 2 minus 1. */
10387 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10388 && TREE_CODE (arg1
) == BIT_AND_EXPR
10389 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10390 TREE_OPERAND (arg1
, 0), 0))
10392 tree mask0
= TREE_OPERAND (arg0
, 1);
10393 tree mask1
= TREE_OPERAND (arg1
, 1);
10394 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10396 if (operand_equal_p (tem
, mask1
, 0))
10398 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10399 TREE_OPERAND (arg0
, 0), mask1
);
10400 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10405 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10406 __complex__ ( x, -y ). This is not the same for SNaNs or if
10407 signed zeros are involved. */
10408 if (!HONOR_SNANS (element_mode (arg0
))
10409 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10410 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10412 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10413 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10414 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10415 bool arg0rz
= false, arg0iz
= false;
10416 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10417 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10419 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10420 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10421 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10423 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10425 : build1 (REALPART_EXPR
, rtype
, arg1
));
10426 tree ip
= arg0i
? arg0i
10427 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10428 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10430 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10432 tree rp
= arg0r
? arg0r
10433 : build1 (REALPART_EXPR
, rtype
, arg0
);
10434 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10436 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10437 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10442 /* A - B -> A + (-B) if B is easily negatable. */
10443 if (negate_expr_p (arg1
)
10444 && !TYPE_OVERFLOW_SANITIZED (type
)
10445 && ((FLOAT_TYPE_P (type
)
10446 /* Avoid this transformation if B is a positive REAL_CST. */
10447 && (TREE_CODE (arg1
) != REAL_CST
10448 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10449 || INTEGRAL_TYPE_P (type
)))
10450 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10451 fold_convert_loc (loc
, type
, arg0
),
10452 fold_convert_loc (loc
, type
,
10453 negate_expr (arg1
)));
10455 /* Try folding difference of addresses. */
10457 HOST_WIDE_INT diff
;
10459 if ((TREE_CODE (arg0
) == ADDR_EXPR
10460 || TREE_CODE (arg1
) == ADDR_EXPR
)
10461 && ptr_difference_const (arg0
, arg1
, &diff
))
10462 return build_int_cst_type (type
, diff
);
10465 /* Fold &a[i] - &a[j] to i-j. */
10466 if (TREE_CODE (arg0
) == ADDR_EXPR
10467 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10468 && TREE_CODE (arg1
) == ADDR_EXPR
10469 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10471 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10472 TREE_OPERAND (arg0
, 0),
10473 TREE_OPERAND (arg1
, 0));
10478 if (FLOAT_TYPE_P (type
)
10479 && flag_unsafe_math_optimizations
10480 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10481 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10482 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10485 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10486 one. Make sure the type is not saturating and has the signedness of
10487 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10488 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10489 if ((TREE_CODE (arg0
) == MULT_EXPR
10490 || TREE_CODE (arg1
) == MULT_EXPR
)
10491 && !TYPE_SATURATING (type
)
10492 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10493 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10494 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10496 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10504 /* (-A) * (-B) -> A * B */
10505 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10506 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10507 fold_convert_loc (loc
, type
,
10508 TREE_OPERAND (arg0
, 0)),
10509 fold_convert_loc (loc
, type
,
10510 negate_expr (arg1
)));
10511 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10512 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10513 fold_convert_loc (loc
, type
,
10514 negate_expr (arg0
)),
10515 fold_convert_loc (loc
, type
,
10516 TREE_OPERAND (arg1
, 0)));
10518 if (! FLOAT_TYPE_P (type
))
10520 /* Transform x * -C into -x * C if x is easily negatable. */
10521 if (TREE_CODE (arg1
) == INTEGER_CST
10522 && tree_int_cst_sgn (arg1
) == -1
10523 && negate_expr_p (arg0
)
10524 && (tem
= negate_expr (arg1
)) != arg1
10525 && !TREE_OVERFLOW (tem
))
10526 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10527 fold_convert_loc (loc
, type
,
10528 negate_expr (arg0
)),
10531 /* (a * (1 << b)) is (a << b) */
10532 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10533 && integer_onep (TREE_OPERAND (arg1
, 0)))
10534 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10535 TREE_OPERAND (arg1
, 1));
10536 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10537 && integer_onep (TREE_OPERAND (arg0
, 0)))
10538 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10539 TREE_OPERAND (arg0
, 1));
10541 /* (A + A) * C -> A * 2 * C */
10542 if (TREE_CODE (arg0
) == PLUS_EXPR
10543 && TREE_CODE (arg1
) == INTEGER_CST
10544 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10545 TREE_OPERAND (arg0
, 1), 0))
10546 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10547 omit_one_operand_loc (loc
, type
,
10548 TREE_OPERAND (arg0
, 0),
10549 TREE_OPERAND (arg0
, 1)),
10550 fold_build2_loc (loc
, MULT_EXPR
, type
,
10551 build_int_cst (type
, 2) , arg1
));
10553 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10554 sign-changing only. */
10555 if (TREE_CODE (arg1
) == INTEGER_CST
10556 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10557 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10558 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10560 strict_overflow_p
= false;
10561 if (TREE_CODE (arg1
) == INTEGER_CST
10562 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10563 &strict_overflow_p
)))
10565 if (strict_overflow_p
)
10566 fold_overflow_warning (("assuming signed overflow does not "
10567 "occur when simplifying "
10569 WARN_STRICT_OVERFLOW_MISC
);
10570 return fold_convert_loc (loc
, type
, tem
);
10573 /* Optimize z * conj(z) for integer complex numbers. */
10574 if (TREE_CODE (arg0
) == CONJ_EXPR
10575 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10576 return fold_mult_zconjz (loc
, type
, arg1
);
10577 if (TREE_CODE (arg1
) == CONJ_EXPR
10578 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10579 return fold_mult_zconjz (loc
, type
, arg0
);
10583 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10584 the result for floating point types due to rounding so it is applied
10585 only if -fassociative-math was specify. */
10586 if (flag_associative_math
10587 && TREE_CODE (arg0
) == RDIV_EXPR
10588 && TREE_CODE (arg1
) == REAL_CST
10589 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10591 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10594 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10595 TREE_OPERAND (arg0
, 1));
10598 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10599 if (operand_equal_p (arg0
, arg1
, 0))
10601 tree tem
= fold_strip_sign_ops (arg0
);
10602 if (tem
!= NULL_TREE
)
10604 tem
= fold_convert_loc (loc
, type
, tem
);
10605 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10609 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10610 This is not the same for NaNs or if signed zeros are
10612 if (!HONOR_NANS (element_mode (arg0
))
10613 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10614 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10615 && TREE_CODE (arg1
) == COMPLEX_CST
10616 && real_zerop (TREE_REALPART (arg1
)))
10618 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10619 if (real_onep (TREE_IMAGPART (arg1
)))
10621 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10622 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10624 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10625 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10627 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10628 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10629 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10633 /* Optimize z * conj(z) for floating point complex numbers.
10634 Guarded by flag_unsafe_math_optimizations as non-finite
10635 imaginary components don't produce scalar results. */
10636 if (flag_unsafe_math_optimizations
10637 && TREE_CODE (arg0
) == CONJ_EXPR
10638 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10639 return fold_mult_zconjz (loc
, type
, arg1
);
10640 if (flag_unsafe_math_optimizations
10641 && TREE_CODE (arg1
) == CONJ_EXPR
10642 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10643 return fold_mult_zconjz (loc
, type
, arg0
);
10645 if (flag_unsafe_math_optimizations
)
10647 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10648 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10650 /* Optimizations of root(...)*root(...). */
10651 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10654 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10655 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10657 /* Optimize sqrt(x)*sqrt(x) as x. */
10658 if (BUILTIN_SQRT_P (fcode0
)
10659 && operand_equal_p (arg00
, arg10
, 0)
10660 && ! HONOR_SNANS (element_mode (type
)))
10663 /* Optimize root(x)*root(y) as root(x*y). */
10664 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10665 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10666 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10669 /* Optimize expN(x)*expN(y) as expN(x+y). */
10670 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10672 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10673 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10674 CALL_EXPR_ARG (arg0
, 0),
10675 CALL_EXPR_ARG (arg1
, 0));
10676 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10679 /* Optimizations of pow(...)*pow(...). */
10680 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10681 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10682 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10684 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10685 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10686 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10687 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10689 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10690 if (operand_equal_p (arg01
, arg11
, 0))
10692 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10693 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10695 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10698 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10699 if (operand_equal_p (arg00
, arg10
, 0))
10701 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10702 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10704 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10708 /* Optimize tan(x)*cos(x) as sin(x). */
10709 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10710 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10711 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10712 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10713 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10714 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10715 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10716 CALL_EXPR_ARG (arg1
, 0), 0))
10718 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10720 if (sinfn
!= NULL_TREE
)
10721 return build_call_expr_loc (loc
, sinfn
, 1,
10722 CALL_EXPR_ARG (arg0
, 0));
10725 /* Optimize x*pow(x,c) as pow(x,c+1). */
10726 if (fcode1
== BUILT_IN_POW
10727 || fcode1
== BUILT_IN_POWF
10728 || fcode1
== BUILT_IN_POWL
)
10730 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10731 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10732 if (TREE_CODE (arg11
) == REAL_CST
10733 && !TREE_OVERFLOW (arg11
)
10734 && operand_equal_p (arg0
, arg10
, 0))
10736 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10740 c
= TREE_REAL_CST (arg11
);
10741 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10742 arg
= build_real (type
, c
);
10743 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10747 /* Optimize pow(x,c)*x as pow(x,c+1). */
10748 if (fcode0
== BUILT_IN_POW
10749 || fcode0
== BUILT_IN_POWF
10750 || fcode0
== BUILT_IN_POWL
)
10752 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10753 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10754 if (TREE_CODE (arg01
) == REAL_CST
10755 && !TREE_OVERFLOW (arg01
)
10756 && operand_equal_p (arg1
, arg00
, 0))
10758 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10762 c
= TREE_REAL_CST (arg01
);
10763 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10764 arg
= build_real (type
, c
);
10765 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10769 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10770 if (!in_gimple_form
10772 && operand_equal_p (arg0
, arg1
, 0))
10774 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10778 tree arg
= build_real (type
, dconst2
);
10779 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10788 /* ~X | X is -1. */
10789 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10790 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10792 t1
= build_zero_cst (type
);
10793 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10794 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10797 /* X | ~X is -1. */
10798 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10799 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10801 t1
= build_zero_cst (type
);
10802 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10803 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10806 /* Canonicalize (X & C1) | C2. */
10807 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10808 && TREE_CODE (arg1
) == INTEGER_CST
10809 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10811 int width
= TYPE_PRECISION (type
), w
;
10812 wide_int c1
= TREE_OPERAND (arg0
, 1);
10813 wide_int c2
= arg1
;
10815 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10816 if ((c1
& c2
) == c1
)
10817 return omit_one_operand_loc (loc
, type
, arg1
,
10818 TREE_OPERAND (arg0
, 0));
10820 wide_int msk
= wi::mask (width
, false,
10821 TYPE_PRECISION (TREE_TYPE (arg1
)));
10823 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10824 if (msk
.and_not (c1
| c2
) == 0)
10825 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10826 TREE_OPERAND (arg0
, 0), arg1
);
10828 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10829 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10830 mode which allows further optimizations. */
10833 wide_int c3
= c1
.and_not (c2
);
10834 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10836 wide_int mask
= wi::mask (w
, false,
10837 TYPE_PRECISION (type
));
10838 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10846 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10847 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10848 TREE_OPERAND (arg0
, 0),
10849 wide_int_to_tree (type
,
10854 /* (X & ~Y) | (~X & Y) is X ^ Y */
10855 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10856 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10858 tree a0
, a1
, l0
, l1
, n0
, n1
;
10860 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10861 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10863 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10864 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10866 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10867 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10869 if ((operand_equal_p (n0
, a0
, 0)
10870 && operand_equal_p (n1
, a1
, 0))
10871 || (operand_equal_p (n0
, a1
, 0)
10872 && operand_equal_p (n1
, a0
, 0)))
10873 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10876 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10877 if (t1
!= NULL_TREE
)
10880 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10882 This results in more efficient code for machines without a NAND
10883 instruction. Combine will canonicalize to the first form
10884 which will allow use of NAND instructions provided by the
10885 backend if they exist. */
10886 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10887 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10890 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10891 build2 (BIT_AND_EXPR
, type
,
10892 fold_convert_loc (loc
, type
,
10893 TREE_OPERAND (arg0
, 0)),
10894 fold_convert_loc (loc
, type
,
10895 TREE_OPERAND (arg1
, 0))));
10898 /* See if this can be simplified into a rotate first. If that
10899 is unsuccessful continue in the association code. */
10903 /* ~X ^ X is -1. */
10904 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10905 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10907 t1
= build_zero_cst (type
);
10908 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10909 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10912 /* X ^ ~X is -1. */
10913 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10914 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10916 t1
= build_zero_cst (type
);
10917 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10918 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10921 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10922 with a constant, and the two constants have no bits in common,
10923 we should treat this as a BIT_IOR_EXPR since this may produce more
10924 simplifications. */
10925 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10926 && TREE_CODE (arg1
) == BIT_AND_EXPR
10927 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10928 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10929 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10930 TREE_OPERAND (arg1
, 1)) == 0)
10932 code
= BIT_IOR_EXPR
;
10936 /* (X | Y) ^ X -> Y & ~ X*/
10937 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10938 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10940 tree t2
= TREE_OPERAND (arg0
, 1);
10941 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10943 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10944 fold_convert_loc (loc
, type
, t2
),
10945 fold_convert_loc (loc
, type
, t1
));
10949 /* (Y | X) ^ X -> Y & ~ X*/
10950 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10951 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10953 tree t2
= TREE_OPERAND (arg0
, 0);
10954 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10956 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10957 fold_convert_loc (loc
, type
, t2
),
10958 fold_convert_loc (loc
, type
, t1
));
10962 /* X ^ (X | Y) -> Y & ~ X*/
10963 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10964 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10966 tree t2
= TREE_OPERAND (arg1
, 1);
10967 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10969 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10970 fold_convert_loc (loc
, type
, t2
),
10971 fold_convert_loc (loc
, type
, t1
));
10975 /* X ^ (Y | X) -> Y & ~ X*/
10976 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10977 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10979 tree t2
= TREE_OPERAND (arg1
, 0);
10980 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10982 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10983 fold_convert_loc (loc
, type
, t2
),
10984 fold_convert_loc (loc
, type
, t1
));
10988 /* Convert ~X ^ ~Y to X ^ Y. */
10989 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10990 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10991 return fold_build2_loc (loc
, code
, type
,
10992 fold_convert_loc (loc
, type
,
10993 TREE_OPERAND (arg0
, 0)),
10994 fold_convert_loc (loc
, type
,
10995 TREE_OPERAND (arg1
, 0)));
10997 /* Convert ~X ^ C to X ^ ~C. */
10998 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10999 && TREE_CODE (arg1
) == INTEGER_CST
)
11000 return fold_build2_loc (loc
, code
, type
,
11001 fold_convert_loc (loc
, type
,
11002 TREE_OPERAND (arg0
, 0)),
11003 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11005 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11006 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11007 && INTEGRAL_TYPE_P (type
)
11008 && integer_onep (TREE_OPERAND (arg0
, 1))
11009 && integer_onep (arg1
))
11010 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11011 build_zero_cst (TREE_TYPE (arg0
)));
11013 /* Fold (X & Y) ^ Y as ~X & Y. */
11014 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11015 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11017 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11018 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11019 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11020 fold_convert_loc (loc
, type
, arg1
));
11022 /* Fold (X & Y) ^ X as ~Y & X. */
11023 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11024 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11025 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11027 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11028 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11029 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11030 fold_convert_loc (loc
, type
, arg1
));
11032 /* Fold X ^ (X & Y) as X & ~Y. */
11033 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11034 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11036 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11037 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11038 fold_convert_loc (loc
, type
, arg0
),
11039 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11041 /* Fold X ^ (Y & X) as ~Y & X. */
11042 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11043 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11044 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11046 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11047 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11048 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11049 fold_convert_loc (loc
, type
, arg0
));
11052 /* See if this can be simplified into a rotate first. If that
11053 is unsuccessful continue in the association code. */
11057 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11058 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11059 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11060 || (TREE_CODE (arg0
) == EQ_EXPR
11061 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11062 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11063 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11065 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11066 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11067 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11068 || (TREE_CODE (arg1
) == EQ_EXPR
11069 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11070 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11071 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11073 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11074 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11075 && INTEGRAL_TYPE_P (type
)
11076 && integer_onep (TREE_OPERAND (arg0
, 1))
11077 && integer_onep (arg1
))
11080 tem
= TREE_OPERAND (arg0
, 0);
11081 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11082 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11084 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11085 build_zero_cst (TREE_TYPE (tem
)));
11087 /* Fold ~X & 1 as (X & 1) == 0. */
11088 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11089 && INTEGRAL_TYPE_P (type
)
11090 && integer_onep (arg1
))
11093 tem
= TREE_OPERAND (arg0
, 0);
11094 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11095 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11097 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11098 build_zero_cst (TREE_TYPE (tem
)));
11100 /* Fold !X & 1 as X == 0. */
11101 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11102 && integer_onep (arg1
))
11104 tem
= TREE_OPERAND (arg0
, 0);
11105 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11106 build_zero_cst (TREE_TYPE (tem
)));
11109 /* Fold (X ^ Y) & Y as ~X & Y. */
11110 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11111 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11113 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11114 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11115 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11116 fold_convert_loc (loc
, type
, arg1
));
11118 /* Fold (X ^ Y) & X as ~Y & X. */
11119 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11120 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11121 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11123 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11124 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11125 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11126 fold_convert_loc (loc
, type
, arg1
));
11128 /* Fold X & (X ^ Y) as X & ~Y. */
11129 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11130 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11132 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11133 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11134 fold_convert_loc (loc
, type
, arg0
),
11135 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11137 /* Fold X & (Y ^ X) as ~Y & X. */
11138 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11139 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11140 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11142 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11143 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11144 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11145 fold_convert_loc (loc
, type
, arg0
));
11148 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11149 multiple of 1 << CST. */
11150 if (TREE_CODE (arg1
) == INTEGER_CST
)
11152 wide_int cst1
= arg1
;
11153 wide_int ncst1
= -cst1
;
11154 if ((cst1
& ncst1
) == ncst1
11155 && multiple_of_p (type
, arg0
,
11156 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11157 return fold_convert_loc (loc
, type
, arg0
);
11160 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11162 if (TREE_CODE (arg1
) == INTEGER_CST
11163 && TREE_CODE (arg0
) == MULT_EXPR
11164 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11166 wide_int warg1
= arg1
;
11167 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11170 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11172 else if (masked
!= warg1
)
11174 /* Avoid the transform if arg1 is a mask of some
11175 mode which allows further optimizations. */
11176 int pop
= wi::popcount (warg1
);
11177 if (!(pop
>= BITS_PER_UNIT
11178 && exact_log2 (pop
) != -1
11179 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11180 return fold_build2_loc (loc
, code
, type
, op0
,
11181 wide_int_to_tree (type
, masked
));
11185 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11186 ((A & N) + B) & M -> (A + B) & M
11187 Similarly if (N & M) == 0,
11188 ((A | N) + B) & M -> (A + B) & M
11189 and for - instead of + (or unary - instead of +)
11190 and/or ^ instead of |.
11191 If B is constant and (B & M) == 0, fold into A & M. */
11192 if (TREE_CODE (arg1
) == INTEGER_CST
)
11194 wide_int cst1
= arg1
;
11195 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11196 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11197 && (TREE_CODE (arg0
) == PLUS_EXPR
11198 || TREE_CODE (arg0
) == MINUS_EXPR
11199 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11200 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11201 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11207 /* Now we know that arg0 is (C + D) or (C - D) or
11208 -C and arg1 (M) is == (1LL << cst) - 1.
11209 Store C into PMOP[0] and D into PMOP[1]. */
11210 pmop
[0] = TREE_OPERAND (arg0
, 0);
11212 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11214 pmop
[1] = TREE_OPERAND (arg0
, 1);
11218 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11221 for (; which
>= 0; which
--)
11222 switch (TREE_CODE (pmop
[which
]))
11227 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11230 cst0
= TREE_OPERAND (pmop
[which
], 1);
11232 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11237 else if (cst0
!= 0)
11239 /* If C or D is of the form (A & N) where
11240 (N & M) == M, or of the form (A | N) or
11241 (A ^ N) where (N & M) == 0, replace it with A. */
11242 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11245 /* If C or D is a N where (N & M) == 0, it can be
11246 omitted (assumed 0). */
11247 if ((TREE_CODE (arg0
) == PLUS_EXPR
11248 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11249 && (cst1
& pmop
[which
]) == 0)
11250 pmop
[which
] = NULL
;
11256 /* Only build anything new if we optimized one or both arguments
11258 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11259 || (TREE_CODE (arg0
) != NEGATE_EXPR
11260 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11262 tree utype
= TREE_TYPE (arg0
);
11263 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11265 /* Perform the operations in a type that has defined
11266 overflow behavior. */
11267 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11268 if (pmop
[0] != NULL
)
11269 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11270 if (pmop
[1] != NULL
)
11271 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11274 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11275 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11276 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11278 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11279 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11281 else if (pmop
[0] != NULL
)
11283 else if (pmop
[1] != NULL
)
11286 return build_int_cst (type
, 0);
11288 else if (pmop
[0] == NULL
)
11289 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11291 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11293 /* TEM is now the new binary +, - or unary - replacement. */
11294 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11295 fold_convert_loc (loc
, utype
, arg1
));
11296 return fold_convert_loc (loc
, type
, tem
);
11301 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11302 if (t1
!= NULL_TREE
)
11304 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11305 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11306 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11308 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11310 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11313 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11316 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11318 This results in more efficient code for machines without a NOR
11319 instruction. Combine will canonicalize to the first form
11320 which will allow use of NOR instructions provided by the
11321 backend if they exist. */
11322 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11323 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11325 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11326 build2 (BIT_IOR_EXPR
, type
,
11327 fold_convert_loc (loc
, type
,
11328 TREE_OPERAND (arg0
, 0)),
11329 fold_convert_loc (loc
, type
,
11330 TREE_OPERAND (arg1
, 0))));
11333 /* If arg0 is derived from the address of an object or function, we may
11334 be able to fold this expression using the object or function's
11336 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11338 unsigned HOST_WIDE_INT modulus
, residue
;
11339 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11341 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11342 integer_onep (arg1
));
11344 /* This works because modulus is a power of 2. If this weren't the
11345 case, we'd have to replace it by its greatest power-of-2
11346 divisor: modulus & -modulus. */
11348 return build_int_cst (type
, residue
& low
);
11351 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11352 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11353 if the new mask might be further optimized. */
11354 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11355 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11356 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11357 && TREE_CODE (arg1
) == INTEGER_CST
11358 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11359 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11360 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11361 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11363 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11364 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11365 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11366 tree shift_type
= TREE_TYPE (arg0
);
11368 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11369 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11370 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11371 && TYPE_PRECISION (TREE_TYPE (arg0
))
11372 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11374 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11375 tree arg00
= TREE_OPERAND (arg0
, 0);
11376 /* See if more bits can be proven as zero because of
11378 if (TREE_CODE (arg00
) == NOP_EXPR
11379 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11381 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11382 if (TYPE_PRECISION (inner_type
)
11383 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11384 && TYPE_PRECISION (inner_type
) < prec
)
11386 prec
= TYPE_PRECISION (inner_type
);
11387 /* See if we can shorten the right shift. */
11389 shift_type
= inner_type
;
11390 /* Otherwise X >> C1 is all zeros, so we'll optimize
11391 it into (X, 0) later on by making sure zerobits
11395 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11398 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11399 zerobits
<<= prec
- shiftc
;
11401 /* For arithmetic shift if sign bit could be set, zerobits
11402 can contain actually sign bits, so no transformation is
11403 possible, unless MASK masks them all away. In that
11404 case the shift needs to be converted into logical shift. */
11405 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11406 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11408 if ((mask
& zerobits
) == 0)
11409 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11415 /* ((X << 16) & 0xff00) is (X, 0). */
11416 if ((mask
& zerobits
) == mask
)
11417 return omit_one_operand_loc (loc
, type
,
11418 build_int_cst (type
, 0), arg0
);
11420 newmask
= mask
| zerobits
;
11421 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11423 /* Only do the transformation if NEWMASK is some integer
11425 for (prec
= BITS_PER_UNIT
;
11426 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11427 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11429 if (prec
< HOST_BITS_PER_WIDE_INT
11430 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11434 if (shift_type
!= TREE_TYPE (arg0
))
11436 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11437 fold_convert_loc (loc
, shift_type
,
11438 TREE_OPERAND (arg0
, 0)),
11439 TREE_OPERAND (arg0
, 1));
11440 tem
= fold_convert_loc (loc
, type
, tem
);
11444 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11445 if (!tree_int_cst_equal (newmaskt
, arg1
))
11446 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11454 /* Don't touch a floating-point divide by zero unless the mode
11455 of the constant can represent infinity. */
11456 if (TREE_CODE (arg1
) == REAL_CST
11457 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11458 && real_zerop (arg1
))
11461 /* (-A) / (-B) -> A / B */
11462 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11463 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11464 TREE_OPERAND (arg0
, 0),
11465 negate_expr (arg1
));
11466 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11467 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11468 negate_expr (arg0
),
11469 TREE_OPERAND (arg1
, 0));
11471 /* Convert A/B/C to A/(B*C). */
11472 if (flag_reciprocal_math
11473 && TREE_CODE (arg0
) == RDIV_EXPR
)
11474 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11475 fold_build2_loc (loc
, MULT_EXPR
, type
,
11476 TREE_OPERAND (arg0
, 1), arg1
));
11478 /* Convert A/(B/C) to (A/B)*C. */
11479 if (flag_reciprocal_math
11480 && TREE_CODE (arg1
) == RDIV_EXPR
)
11481 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11482 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11483 TREE_OPERAND (arg1
, 0)),
11484 TREE_OPERAND (arg1
, 1));
11486 /* Convert C1/(X*C2) into (C1/C2)/X. */
11487 if (flag_reciprocal_math
11488 && TREE_CODE (arg1
) == MULT_EXPR
11489 && TREE_CODE (arg0
) == REAL_CST
11490 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11492 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11493 TREE_OPERAND (arg1
, 1));
11495 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11496 TREE_OPERAND (arg1
, 0));
11499 if (flag_unsafe_math_optimizations
)
11501 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11502 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11504 /* Optimize sin(x)/cos(x) as tan(x). */
11505 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11506 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11507 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11508 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11509 CALL_EXPR_ARG (arg1
, 0), 0))
11511 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11513 if (tanfn
!= NULL_TREE
)
11514 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11517 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11518 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11519 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11520 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11521 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11522 CALL_EXPR_ARG (arg1
, 0), 0))
11524 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11526 if (tanfn
!= NULL_TREE
)
11528 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11529 CALL_EXPR_ARG (arg0
, 0));
11530 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11531 build_real (type
, dconst1
), tmp
);
11535 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11536 NaNs or Infinities. */
11537 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11538 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11539 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11541 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11542 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11544 if (! HONOR_NANS (element_mode (arg00
))
11545 && ! HONOR_INFINITIES (element_mode (arg00
))
11546 && operand_equal_p (arg00
, arg01
, 0))
11548 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11550 if (cosfn
!= NULL_TREE
)
11551 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11555 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11556 NaNs or Infinities. */
11557 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11558 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11559 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11561 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11562 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11564 if (! HONOR_NANS (element_mode (arg00
))
11565 && ! HONOR_INFINITIES (element_mode (arg00
))
11566 && operand_equal_p (arg00
, arg01
, 0))
11568 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11570 if (cosfn
!= NULL_TREE
)
11572 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11573 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11574 build_real (type
, dconst1
),
11580 /* Optimize pow(x,c)/x as pow(x,c-1). */
11581 if (fcode0
== BUILT_IN_POW
11582 || fcode0
== BUILT_IN_POWF
11583 || fcode0
== BUILT_IN_POWL
)
11585 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11586 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11587 if (TREE_CODE (arg01
) == REAL_CST
11588 && !TREE_OVERFLOW (arg01
)
11589 && operand_equal_p (arg1
, arg00
, 0))
11591 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11595 c
= TREE_REAL_CST (arg01
);
11596 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11597 arg
= build_real (type
, c
);
11598 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11602 /* Optimize a/root(b/c) into a*root(c/b). */
11603 if (BUILTIN_ROOT_P (fcode1
))
11605 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11607 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11609 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11610 tree b
= TREE_OPERAND (rootarg
, 0);
11611 tree c
= TREE_OPERAND (rootarg
, 1);
11613 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11615 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11616 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11620 /* Optimize x/expN(y) into x*expN(-y). */
11621 if (BUILTIN_EXPONENT_P (fcode1
))
11623 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11624 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11625 arg1
= build_call_expr_loc (loc
,
11627 fold_convert_loc (loc
, type
, arg
));
11628 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11631 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11632 if (fcode1
== BUILT_IN_POW
11633 || fcode1
== BUILT_IN_POWF
11634 || fcode1
== BUILT_IN_POWL
)
11636 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11637 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11638 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11639 tree neg11
= fold_convert_loc (loc
, type
,
11640 negate_expr (arg11
));
11641 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11642 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11647 case TRUNC_DIV_EXPR
:
11648 /* Optimize (X & (-A)) / A where A is a power of 2,
11650 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11651 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11652 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11654 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11655 arg1
, TREE_OPERAND (arg0
, 1));
11656 if (sum
&& integer_zerop (sum
)) {
11657 tree pow2
= build_int_cst (integer_type_node
,
11658 wi::exact_log2 (arg1
));
11659 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11660 TREE_OPERAND (arg0
, 0), pow2
);
11666 case FLOOR_DIV_EXPR
:
11667 /* Simplify A / (B << N) where A and B are positive and B is
11668 a power of 2, to A >> (N + log2(B)). */
11669 strict_overflow_p
= false;
11670 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11671 && (TYPE_UNSIGNED (type
)
11672 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11674 tree sval
= TREE_OPERAND (arg1
, 0);
11675 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11677 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11678 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11679 wi::exact_log2 (sval
));
11681 if (strict_overflow_p
)
11682 fold_overflow_warning (("assuming signed overflow does not "
11683 "occur when simplifying A / (B << N)"),
11684 WARN_STRICT_OVERFLOW_MISC
);
11686 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11688 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11689 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11695 case ROUND_DIV_EXPR
:
11696 case CEIL_DIV_EXPR
:
11697 case EXACT_DIV_EXPR
:
11698 if (integer_zerop (arg1
))
11701 /* Convert -A / -B to A / B when the type is signed and overflow is
11703 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11704 && TREE_CODE (arg0
) == NEGATE_EXPR
11705 && negate_expr_p (arg1
))
11707 if (INTEGRAL_TYPE_P (type
))
11708 fold_overflow_warning (("assuming signed overflow does not occur "
11709 "when distributing negation across "
11711 WARN_STRICT_OVERFLOW_MISC
);
11712 return fold_build2_loc (loc
, code
, type
,
11713 fold_convert_loc (loc
, type
,
11714 TREE_OPERAND (arg0
, 0)),
11715 fold_convert_loc (loc
, type
,
11716 negate_expr (arg1
)));
11718 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11719 && TREE_CODE (arg1
) == NEGATE_EXPR
11720 && negate_expr_p (arg0
))
11722 if (INTEGRAL_TYPE_P (type
))
11723 fold_overflow_warning (("assuming signed overflow does not occur "
11724 "when distributing negation across "
11726 WARN_STRICT_OVERFLOW_MISC
);
11727 return fold_build2_loc (loc
, code
, type
,
11728 fold_convert_loc (loc
, type
,
11729 negate_expr (arg0
)),
11730 fold_convert_loc (loc
, type
,
11731 TREE_OPERAND (arg1
, 0)));
11734 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11735 operation, EXACT_DIV_EXPR.
11737 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11738 At one time others generated faster code, it's not clear if they do
11739 after the last round to changes to the DIV code in expmed.c. */
11740 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11741 && multiple_of_p (type
, arg0
, arg1
))
11742 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11744 strict_overflow_p
= false;
11745 if (TREE_CODE (arg1
) == INTEGER_CST
11746 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11747 &strict_overflow_p
)))
11749 if (strict_overflow_p
)
11750 fold_overflow_warning (("assuming signed overflow does not occur "
11751 "when simplifying division"),
11752 WARN_STRICT_OVERFLOW_MISC
);
11753 return fold_convert_loc (loc
, type
, tem
);
11758 case CEIL_MOD_EXPR
:
11759 case FLOOR_MOD_EXPR
:
11760 case ROUND_MOD_EXPR
:
11761 case TRUNC_MOD_EXPR
:
11762 /* X % -Y is the same as X % Y. */
11763 if (code
== TRUNC_MOD_EXPR
11764 && !TYPE_UNSIGNED (type
)
11765 && TREE_CODE (arg1
) == NEGATE_EXPR
11766 && !TYPE_OVERFLOW_TRAPS (type
))
11767 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11768 fold_convert_loc (loc
, type
,
11769 TREE_OPERAND (arg1
, 0)));
11771 strict_overflow_p
= false;
11772 if (TREE_CODE (arg1
) == INTEGER_CST
11773 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11774 &strict_overflow_p
)))
11776 if (strict_overflow_p
)
11777 fold_overflow_warning (("assuming signed overflow does not occur "
11778 "when simplifying modulus"),
11779 WARN_STRICT_OVERFLOW_MISC
);
11780 return fold_convert_loc (loc
, type
, tem
);
11783 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11784 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11785 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11786 && (TYPE_UNSIGNED (type
)
11787 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11790 /* Also optimize A % (C << N) where C is a power of 2,
11791 to A & ((C << N) - 1). */
11792 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11793 c
= TREE_OPERAND (arg1
, 0);
11795 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11798 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11799 build_int_cst (TREE_TYPE (arg1
), 1));
11800 if (strict_overflow_p
)
11801 fold_overflow_warning (("assuming signed overflow does not "
11802 "occur when simplifying "
11803 "X % (power of two)"),
11804 WARN_STRICT_OVERFLOW_MISC
);
11805 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11806 fold_convert_loc (loc
, type
, arg0
),
11807 fold_convert_loc (loc
, type
, mask
));
11817 /* Since negative shift count is not well-defined,
11818 don't try to compute it in the compiler. */
11819 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11822 prec
= element_precision (type
);
11824 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11825 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
11826 && tree_to_uhwi (arg1
) < prec
11827 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11828 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11830 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11831 + tree_to_uhwi (arg1
));
11833 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11834 being well defined. */
11837 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11839 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11840 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
11841 TREE_OPERAND (arg0
, 0));
11846 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11847 build_int_cst (TREE_TYPE (arg1
), low
));
11850 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11851 into x & ((unsigned)-1 >> c) for unsigned types. */
11852 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11853 || (TYPE_UNSIGNED (type
)
11854 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11855 && tree_fits_uhwi_p (arg1
)
11856 && tree_to_uhwi (arg1
) < prec
11857 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11858 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11860 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11861 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
11867 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11869 lshift
= build_minus_one_cst (type
);
11870 lshift
= const_binop (code
, lshift
, arg1
);
11872 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11876 /* If we have a rotate of a bit operation with the rotate count and
11877 the second operand of the bit operation both constant,
11878 permute the two operations. */
11879 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11880 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11881 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11882 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11883 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11884 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11885 fold_build2_loc (loc
, code
, type
,
11886 TREE_OPERAND (arg0
, 0), arg1
),
11887 fold_build2_loc (loc
, code
, type
,
11888 TREE_OPERAND (arg0
, 1), arg1
));
11890 /* Two consecutive rotates adding up to the some integer
11891 multiple of the precision of the type can be ignored. */
11892 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11893 && TREE_CODE (arg0
) == RROTATE_EXPR
11894 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11895 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
11897 return TREE_OPERAND (arg0
, 0);
11899 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11900 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11901 if the latter can be further optimized. */
11902 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11903 && TREE_CODE (arg0
) == BIT_AND_EXPR
11904 && TREE_CODE (arg1
) == INTEGER_CST
11905 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11907 tree mask
= fold_build2_loc (loc
, code
, type
,
11908 fold_convert_loc (loc
, type
,
11909 TREE_OPERAND (arg0
, 1)),
11911 tree shift
= fold_build2_loc (loc
, code
, type
,
11912 fold_convert_loc (loc
, type
,
11913 TREE_OPERAND (arg0
, 0)),
11915 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
11923 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11929 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11934 case TRUTH_ANDIF_EXPR
:
11935 /* Note that the operands of this must be ints
11936 and their values must be 0 or 1.
11937 ("true" is a fixed value perhaps depending on the language.) */
11938 /* If first arg is constant zero, return it. */
11939 if (integer_zerop (arg0
))
11940 return fold_convert_loc (loc
, type
, arg0
);
11941 case TRUTH_AND_EXPR
:
11942 /* If either arg is constant true, drop it. */
11943 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11944 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11945 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11946 /* Preserve sequence points. */
11947 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11948 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11949 /* If second arg is constant zero, result is zero, but first arg
11950 must be evaluated. */
11951 if (integer_zerop (arg1
))
11952 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11953 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11954 case will be handled here. */
11955 if (integer_zerop (arg0
))
11956 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11958 /* !X && X is always false. */
11959 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11960 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11961 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11962 /* X && !X is always false. */
11963 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11964 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11965 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11967 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11968 means A >= Y && A != MAX, but in this case we know that
11971 if (!TREE_SIDE_EFFECTS (arg0
)
11972 && !TREE_SIDE_EFFECTS (arg1
))
11974 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11975 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11976 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11978 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11979 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11980 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11983 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11989 case TRUTH_ORIF_EXPR
:
11990 /* Note that the operands of this must be ints
11991 and their values must be 0 or true.
11992 ("true" is a fixed value perhaps depending on the language.) */
11993 /* If first arg is constant true, return it. */
11994 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11995 return fold_convert_loc (loc
, type
, arg0
);
11996 case TRUTH_OR_EXPR
:
11997 /* If either arg is constant zero, drop it. */
11998 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11999 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12000 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12001 /* Preserve sequence points. */
12002 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12003 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12004 /* If second arg is constant true, result is true, but we must
12005 evaluate first arg. */
12006 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12007 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12008 /* Likewise for first arg, but note this only occurs here for
12010 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12011 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12013 /* !X || X is always true. */
12014 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12015 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12016 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12017 /* X || !X is always true. */
12018 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12019 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12020 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12022 /* (X && !Y) || (!X && Y) is X ^ Y */
12023 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12024 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12026 tree a0
, a1
, l0
, l1
, n0
, n1
;
12028 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12029 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12031 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12032 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12034 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12035 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12037 if ((operand_equal_p (n0
, a0
, 0)
12038 && operand_equal_p (n1
, a1
, 0))
12039 || (operand_equal_p (n0
, a1
, 0)
12040 && operand_equal_p (n1
, a0
, 0)))
12041 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12044 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12050 case TRUTH_XOR_EXPR
:
12051 /* If the second arg is constant zero, drop it. */
12052 if (integer_zerop (arg1
))
12053 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12054 /* If the second arg is constant true, this is a logical inversion. */
12055 if (integer_onep (arg1
))
12057 tem
= invert_truthvalue_loc (loc
, arg0
);
12058 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12060 /* Identical arguments cancel to zero. */
12061 if (operand_equal_p (arg0
, arg1
, 0))
12062 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12064 /* !X ^ X is always true. */
12065 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12066 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12067 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12069 /* X ^ !X is always true. */
12070 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12071 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12072 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12081 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12082 if (tem
!= NULL_TREE
)
12085 /* bool_var != 0 becomes bool_var. */
12086 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12087 && code
== NE_EXPR
)
12088 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12090 /* bool_var == 1 becomes bool_var. */
12091 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12092 && code
== EQ_EXPR
)
12093 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12095 /* bool_var != 1 becomes !bool_var. */
12096 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12097 && code
== NE_EXPR
)
12098 return fold_convert_loc (loc
, type
,
12099 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12100 TREE_TYPE (arg0
), arg0
));
12102 /* bool_var == 0 becomes !bool_var. */
12103 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12104 && code
== EQ_EXPR
)
12105 return fold_convert_loc (loc
, type
,
12106 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12107 TREE_TYPE (arg0
), arg0
));
12109 /* !exp != 0 becomes !exp */
12110 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12111 && code
== NE_EXPR
)
12112 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12114 /* If this is an equality comparison of the address of two non-weak,
12115 unaliased symbols neither of which are extern (since we do not
12116 have access to attributes for externs), then we know the result. */
12117 if (TREE_CODE (arg0
) == ADDR_EXPR
12118 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12119 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12120 && ! lookup_attribute ("alias",
12121 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12122 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12123 && TREE_CODE (arg1
) == ADDR_EXPR
12124 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12125 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12126 && ! lookup_attribute ("alias",
12127 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12128 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12130 /* We know that we're looking at the address of two
12131 non-weak, unaliased, static _DECL nodes.
12133 It is both wasteful and incorrect to call operand_equal_p
12134 to compare the two ADDR_EXPR nodes. It is wasteful in that
12135 all we need to do is test pointer equality for the arguments
12136 to the two ADDR_EXPR nodes. It is incorrect to use
12137 operand_equal_p as that function is NOT equivalent to a
12138 C equality test. It can in fact return false for two
12139 objects which would test as equal using the C equality
12141 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12142 return constant_boolean_node (equal
12143 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12147 /* Similarly for a NEGATE_EXPR. */
12148 if (TREE_CODE (arg0
) == NEGATE_EXPR
12149 && TREE_CODE (arg1
) == INTEGER_CST
12150 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12152 && TREE_CODE (tem
) == INTEGER_CST
12153 && !TREE_OVERFLOW (tem
))
12154 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12156 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12157 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12158 && TREE_CODE (arg1
) == INTEGER_CST
12159 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12160 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12161 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12162 fold_convert_loc (loc
,
12165 TREE_OPERAND (arg0
, 1)));
12167 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12168 if ((TREE_CODE (arg0
) == PLUS_EXPR
12169 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12170 || TREE_CODE (arg0
) == MINUS_EXPR
)
12171 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12174 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12175 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12177 tree val
= TREE_OPERAND (arg0
, 1);
12178 return omit_two_operands_loc (loc
, type
,
12179 fold_build2_loc (loc
, code
, type
,
12181 build_int_cst (TREE_TYPE (val
),
12183 TREE_OPERAND (arg0
, 0), arg1
);
12186 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12187 if (TREE_CODE (arg0
) == MINUS_EXPR
12188 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12189 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12192 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12194 return omit_two_operands_loc (loc
, type
,
12196 ? boolean_true_node
: boolean_false_node
,
12197 TREE_OPERAND (arg0
, 1), arg1
);
12200 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12201 if (TREE_CODE (arg0
) == ABS_EXPR
12202 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12203 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12205 /* If this is an EQ or NE comparison with zero and ARG0 is
12206 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12207 two operations, but the latter can be done in one less insn
12208 on machines that have only two-operand insns or on which a
12209 constant cannot be the first operand. */
12210 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12211 && integer_zerop (arg1
))
12213 tree arg00
= TREE_OPERAND (arg0
, 0);
12214 tree arg01
= TREE_OPERAND (arg0
, 1);
12215 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12216 && integer_onep (TREE_OPERAND (arg00
, 0)))
12218 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12219 arg01
, TREE_OPERAND (arg00
, 1));
12220 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12221 build_int_cst (TREE_TYPE (arg0
), 1));
12222 return fold_build2_loc (loc
, code
, type
,
12223 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12226 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12227 && integer_onep (TREE_OPERAND (arg01
, 0)))
12229 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12230 arg00
, TREE_OPERAND (arg01
, 1));
12231 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12232 build_int_cst (TREE_TYPE (arg0
), 1));
12233 return fold_build2_loc (loc
, code
, type
,
12234 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12239 /* If this is an NE or EQ comparison of zero against the result of a
12240 signed MOD operation whose second operand is a power of 2, make
12241 the MOD operation unsigned since it is simpler and equivalent. */
12242 if (integer_zerop (arg1
)
12243 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12244 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12245 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12246 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12247 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12248 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12250 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12251 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12252 fold_convert_loc (loc
, newtype
,
12253 TREE_OPERAND (arg0
, 0)),
12254 fold_convert_loc (loc
, newtype
,
12255 TREE_OPERAND (arg0
, 1)));
12257 return fold_build2_loc (loc
, code
, type
, newmod
,
12258 fold_convert_loc (loc
, newtype
, arg1
));
12261 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12262 C1 is a valid shift constant, and C2 is a power of two, i.e.
12264 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12265 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12266 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12268 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12269 && integer_zerop (arg1
))
12271 tree itype
= TREE_TYPE (arg0
);
12272 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12273 prec
= TYPE_PRECISION (itype
);
12275 /* Check for a valid shift count. */
12276 if (wi::ltu_p (arg001
, prec
))
12278 tree arg01
= TREE_OPERAND (arg0
, 1);
12279 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12280 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12281 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12282 can be rewritten as (X & (C2 << C1)) != 0. */
12283 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12285 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12286 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12287 return fold_build2_loc (loc
, code
, type
, tem
,
12288 fold_convert_loc (loc
, itype
, arg1
));
12290 /* Otherwise, for signed (arithmetic) shifts,
12291 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12292 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12293 else if (!TYPE_UNSIGNED (itype
))
12294 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12295 arg000
, build_int_cst (itype
, 0));
12296 /* Otherwise, of unsigned (logical) shifts,
12297 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12298 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12300 return omit_one_operand_loc (loc
, type
,
12301 code
== EQ_EXPR
? integer_one_node
12302 : integer_zero_node
,
12307 /* If we have (A & C) == C where C is a power of 2, convert this into
12308 (A & C) != 0. Similarly for NE_EXPR. */
12309 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12310 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12311 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12312 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12313 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12314 integer_zero_node
));
12316 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12317 bit, then fold the expression into A < 0 or A >= 0. */
12318 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12322 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12323 Similarly for NE_EXPR. */
12324 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12325 && TREE_CODE (arg1
) == INTEGER_CST
12326 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12328 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12329 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12330 TREE_OPERAND (arg0
, 1));
12332 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12333 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12335 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12336 if (integer_nonzerop (dandnotc
))
12337 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12340 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12341 Similarly for NE_EXPR. */
12342 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12343 && TREE_CODE (arg1
) == INTEGER_CST
12344 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12346 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12348 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12349 TREE_OPERAND (arg0
, 1),
12350 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12351 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12352 if (integer_nonzerop (candnotd
))
12353 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12356 /* If this is a comparison of a field, we may be able to simplify it. */
12357 if ((TREE_CODE (arg0
) == COMPONENT_REF
12358 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12359 /* Handle the constant case even without -O
12360 to make sure the warnings are given. */
12361 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12363 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12368 /* Optimize comparisons of strlen vs zero to a compare of the
12369 first character of the string vs zero. To wit,
12370 strlen(ptr) == 0 => *ptr == 0
12371 strlen(ptr) != 0 => *ptr != 0
12372 Other cases should reduce to one of these two (or a constant)
12373 due to the return value of strlen being unsigned. */
12374 if (TREE_CODE (arg0
) == CALL_EXPR
12375 && integer_zerop (arg1
))
12377 tree fndecl
= get_callee_fndecl (arg0
);
12380 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12381 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12382 && call_expr_nargs (arg0
) == 1
12383 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12385 tree iref
= build_fold_indirect_ref_loc (loc
,
12386 CALL_EXPR_ARG (arg0
, 0));
12387 return fold_build2_loc (loc
, code
, type
, iref
,
12388 build_int_cst (TREE_TYPE (iref
), 0));
12392 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12393 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12394 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12395 && integer_zerop (arg1
)
12396 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12398 tree arg00
= TREE_OPERAND (arg0
, 0);
12399 tree arg01
= TREE_OPERAND (arg0
, 1);
12400 tree itype
= TREE_TYPE (arg00
);
12401 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
12403 if (TYPE_UNSIGNED (itype
))
12405 itype
= signed_type_for (itype
);
12406 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12408 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12409 type
, arg00
, build_zero_cst (itype
));
12413 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12414 if (integer_zerop (arg1
)
12415 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12416 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12417 TREE_OPERAND (arg0
, 1));
12419 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12420 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12421 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12422 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12423 build_zero_cst (TREE_TYPE (arg0
)));
12424 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12425 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12426 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12427 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12428 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12429 build_zero_cst (TREE_TYPE (arg0
)));
12431 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12432 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12433 && TREE_CODE (arg1
) == INTEGER_CST
12434 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12435 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12436 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12437 TREE_OPERAND (arg0
, 1), arg1
));
12439 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12440 (X & C) == 0 when C is a single bit. */
12441 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12442 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12443 && integer_zerop (arg1
)
12444 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12446 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12447 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12448 TREE_OPERAND (arg0
, 1));
12449 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12451 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12455 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12456 constant C is a power of two, i.e. a single bit. */
12457 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12458 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12459 && integer_zerop (arg1
)
12460 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12461 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12462 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12464 tree arg00
= TREE_OPERAND (arg0
, 0);
12465 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12466 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12469 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12470 when is C is a power of two, i.e. a single bit. */
12471 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12472 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12473 && integer_zerop (arg1
)
12474 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12475 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12476 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12478 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12479 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12480 arg000
, TREE_OPERAND (arg0
, 1));
12481 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12482 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12485 if (integer_zerop (arg1
)
12486 && tree_expr_nonzero_p (arg0
))
12488 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12489 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12492 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12493 if (TREE_CODE (arg0
) == NEGATE_EXPR
12494 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12495 return fold_build2_loc (loc
, code
, type
,
12496 TREE_OPERAND (arg0
, 0),
12497 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12498 TREE_OPERAND (arg1
, 0)));
12500 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12501 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12502 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12504 tree arg00
= TREE_OPERAND (arg0
, 0);
12505 tree arg01
= TREE_OPERAND (arg0
, 1);
12506 tree arg10
= TREE_OPERAND (arg1
, 0);
12507 tree arg11
= TREE_OPERAND (arg1
, 1);
12508 tree itype
= TREE_TYPE (arg0
);
12510 if (operand_equal_p (arg01
, arg11
, 0))
12511 return fold_build2_loc (loc
, code
, type
,
12512 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12513 fold_build2_loc (loc
,
12514 BIT_XOR_EXPR
, itype
,
12517 build_zero_cst (itype
));
12519 if (operand_equal_p (arg01
, arg10
, 0))
12520 return fold_build2_loc (loc
, code
, type
,
12521 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12522 fold_build2_loc (loc
,
12523 BIT_XOR_EXPR
, itype
,
12526 build_zero_cst (itype
));
12528 if (operand_equal_p (arg00
, arg11
, 0))
12529 return fold_build2_loc (loc
, code
, type
,
12530 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12531 fold_build2_loc (loc
,
12532 BIT_XOR_EXPR
, itype
,
12535 build_zero_cst (itype
));
12537 if (operand_equal_p (arg00
, arg10
, 0))
12538 return fold_build2_loc (loc
, code
, type
,
12539 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12540 fold_build2_loc (loc
,
12541 BIT_XOR_EXPR
, itype
,
12544 build_zero_cst (itype
));
12547 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12548 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12550 tree arg00
= TREE_OPERAND (arg0
, 0);
12551 tree arg01
= TREE_OPERAND (arg0
, 1);
12552 tree arg10
= TREE_OPERAND (arg1
, 0);
12553 tree arg11
= TREE_OPERAND (arg1
, 1);
12554 tree itype
= TREE_TYPE (arg0
);
12556 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12557 operand_equal_p guarantees no side-effects so we don't need
12558 to use omit_one_operand on Z. */
12559 if (operand_equal_p (arg01
, arg11
, 0))
12560 return fold_build2_loc (loc
, code
, type
, arg00
,
12561 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12563 if (operand_equal_p (arg01
, arg10
, 0))
12564 return fold_build2_loc (loc
, code
, type
, arg00
,
12565 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12567 if (operand_equal_p (arg00
, arg11
, 0))
12568 return fold_build2_loc (loc
, code
, type
, arg01
,
12569 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12571 if (operand_equal_p (arg00
, arg10
, 0))
12572 return fold_build2_loc (loc
, code
, type
, arg01
,
12573 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12576 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12577 if (TREE_CODE (arg01
) == INTEGER_CST
12578 && TREE_CODE (arg11
) == INTEGER_CST
)
12580 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12581 fold_convert_loc (loc
, itype
, arg11
));
12582 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12583 return fold_build2_loc (loc
, code
, type
, tem
,
12584 fold_convert_loc (loc
, itype
, arg10
));
12588 /* Attempt to simplify equality/inequality comparisons of complex
12589 values. Only lower the comparison if the result is known or
12590 can be simplified to a single scalar comparison. */
12591 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12592 || TREE_CODE (arg0
) == COMPLEX_CST
)
12593 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12594 || TREE_CODE (arg1
) == COMPLEX_CST
))
12596 tree real0
, imag0
, real1
, imag1
;
12599 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12601 real0
= TREE_OPERAND (arg0
, 0);
12602 imag0
= TREE_OPERAND (arg0
, 1);
12606 real0
= TREE_REALPART (arg0
);
12607 imag0
= TREE_IMAGPART (arg0
);
12610 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12612 real1
= TREE_OPERAND (arg1
, 0);
12613 imag1
= TREE_OPERAND (arg1
, 1);
12617 real1
= TREE_REALPART (arg1
);
12618 imag1
= TREE_IMAGPART (arg1
);
12621 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12622 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12624 if (integer_zerop (rcond
))
12626 if (code
== EQ_EXPR
)
12627 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12629 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12633 if (code
== NE_EXPR
)
12634 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12636 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12640 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12641 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12643 if (integer_zerop (icond
))
12645 if (code
== EQ_EXPR
)
12646 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12648 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12652 if (code
== NE_EXPR
)
12653 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12655 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12666 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12667 if (tem
!= NULL_TREE
)
12670 /* Transform comparisons of the form X +- C CMP X. */
12671 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12672 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12673 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12674 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12675 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12676 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12678 tree arg01
= TREE_OPERAND (arg0
, 1);
12679 enum tree_code code0
= TREE_CODE (arg0
);
12682 if (TREE_CODE (arg01
) == REAL_CST
)
12683 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12685 is_positive
= tree_int_cst_sgn (arg01
);
12687 /* (X - c) > X becomes false. */
12688 if (code
== GT_EXPR
12689 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12690 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12692 if (TREE_CODE (arg01
) == INTEGER_CST
12693 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12694 fold_overflow_warning (("assuming signed overflow does not "
12695 "occur when assuming that (X - c) > X "
12696 "is always false"),
12697 WARN_STRICT_OVERFLOW_ALL
);
12698 return constant_boolean_node (0, type
);
12701 /* Likewise (X + c) < X becomes false. */
12702 if (code
== LT_EXPR
12703 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12704 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12706 if (TREE_CODE (arg01
) == INTEGER_CST
12707 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12708 fold_overflow_warning (("assuming signed overflow does not "
12709 "occur when assuming that "
12710 "(X + c) < X is always false"),
12711 WARN_STRICT_OVERFLOW_ALL
);
12712 return constant_boolean_node (0, type
);
12715 /* Convert (X - c) <= X to true. */
12716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12718 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12719 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12721 if (TREE_CODE (arg01
) == INTEGER_CST
12722 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12723 fold_overflow_warning (("assuming signed overflow does not "
12724 "occur when assuming that "
12725 "(X - c) <= X is always true"),
12726 WARN_STRICT_OVERFLOW_ALL
);
12727 return constant_boolean_node (1, type
);
12730 /* Convert (X + c) >= X to true. */
12731 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12733 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12734 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12736 if (TREE_CODE (arg01
) == INTEGER_CST
12737 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12738 fold_overflow_warning (("assuming signed overflow does not "
12739 "occur when assuming that "
12740 "(X + c) >= X is always true"),
12741 WARN_STRICT_OVERFLOW_ALL
);
12742 return constant_boolean_node (1, type
);
12745 if (TREE_CODE (arg01
) == INTEGER_CST
)
12747 /* Convert X + c > X and X - c < X to true for integers. */
12748 if (code
== GT_EXPR
12749 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12750 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12752 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12753 fold_overflow_warning (("assuming signed overflow does "
12754 "not occur when assuming that "
12755 "(X + c) > X is always true"),
12756 WARN_STRICT_OVERFLOW_ALL
);
12757 return constant_boolean_node (1, type
);
12760 if (code
== LT_EXPR
12761 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12762 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12764 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12765 fold_overflow_warning (("assuming signed overflow does "
12766 "not occur when assuming that "
12767 "(X - c) < X is always true"),
12768 WARN_STRICT_OVERFLOW_ALL
);
12769 return constant_boolean_node (1, type
);
12772 /* Convert X + c <= X and X - c >= X to false for integers. */
12773 if (code
== LE_EXPR
12774 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12775 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12777 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12778 fold_overflow_warning (("assuming signed overflow does "
12779 "not occur when assuming that "
12780 "(X + c) <= X is always false"),
12781 WARN_STRICT_OVERFLOW_ALL
);
12782 return constant_boolean_node (0, type
);
12785 if (code
== GE_EXPR
12786 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12787 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12789 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12790 fold_overflow_warning (("assuming signed overflow does "
12791 "not occur when assuming that "
12792 "(X - c) >= X is always false"),
12793 WARN_STRICT_OVERFLOW_ALL
);
12794 return constant_boolean_node (0, type
);
12799 /* Comparisons with the highest or lowest possible integer of
12800 the specified precision will have known values. */
12802 tree arg1_type
= TREE_TYPE (arg1
);
12803 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12805 if (TREE_CODE (arg1
) == INTEGER_CST
12806 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12808 wide_int max
= wi::max_value (arg1_type
);
12809 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12810 wide_int min
= wi::min_value (arg1_type
);
12812 if (wi::eq_p (arg1
, max
))
12816 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12819 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12822 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12825 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12827 /* The GE_EXPR and LT_EXPR cases above are not normally
12828 reached because of previous transformations. */
12833 else if (wi::eq_p (arg1
, max
- 1))
12837 arg1
= const_binop (PLUS_EXPR
, arg1
,
12838 build_int_cst (TREE_TYPE (arg1
), 1));
12839 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12840 fold_convert_loc (loc
,
12841 TREE_TYPE (arg1
), arg0
),
12844 arg1
= const_binop (PLUS_EXPR
, arg1
,
12845 build_int_cst (TREE_TYPE (arg1
), 1));
12846 return fold_build2_loc (loc
, NE_EXPR
, type
,
12847 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12853 else if (wi::eq_p (arg1
, min
))
12857 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12860 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12863 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12866 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12871 else if (wi::eq_p (arg1
, min
+ 1))
12875 arg1
= const_binop (MINUS_EXPR
, arg1
,
12876 build_int_cst (TREE_TYPE (arg1
), 1));
12877 return fold_build2_loc (loc
, NE_EXPR
, type
,
12878 fold_convert_loc (loc
,
12879 TREE_TYPE (arg1
), arg0
),
12882 arg1
= const_binop (MINUS_EXPR
, arg1
,
12883 build_int_cst (TREE_TYPE (arg1
), 1));
12884 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12885 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12892 else if (wi::eq_p (arg1
, signed_max
)
12893 && TYPE_UNSIGNED (arg1_type
)
12894 /* We will flip the signedness of the comparison operator
12895 associated with the mode of arg1, so the sign bit is
12896 specified by this mode. Check that arg1 is the signed
12897 max associated with this sign bit. */
12898 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
12899 /* signed_type does not work on pointer types. */
12900 && INTEGRAL_TYPE_P (arg1_type
))
12902 /* The following case also applies to X < signed_max+1
12903 and X >= signed_max+1 because previous transformations. */
12904 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12906 tree st
= signed_type_for (arg1_type
);
12907 return fold_build2_loc (loc
,
12908 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12909 type
, fold_convert_loc (loc
, st
, arg0
),
12910 build_int_cst (st
, 0));
12916 /* If we are comparing an ABS_EXPR with a constant, we can
12917 convert all the cases into explicit comparisons, but they may
12918 well not be faster than doing the ABS and one comparison.
12919 But ABS (X) <= C is a range comparison, which becomes a subtraction
12920 and a comparison, and is probably faster. */
12921 if (code
== LE_EXPR
12922 && TREE_CODE (arg1
) == INTEGER_CST
12923 && TREE_CODE (arg0
) == ABS_EXPR
12924 && ! TREE_SIDE_EFFECTS (arg0
)
12925 && (0 != (tem
= negate_expr (arg1
)))
12926 && TREE_CODE (tem
) == INTEGER_CST
12927 && !TREE_OVERFLOW (tem
))
12928 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12929 build2 (GE_EXPR
, type
,
12930 TREE_OPERAND (arg0
, 0), tem
),
12931 build2 (LE_EXPR
, type
,
12932 TREE_OPERAND (arg0
, 0), arg1
));
12934 /* Convert ABS_EXPR<x> >= 0 to true. */
12935 strict_overflow_p
= false;
12936 if (code
== GE_EXPR
12937 && (integer_zerop (arg1
)
12938 || (! HONOR_NANS (element_mode (arg0
))
12939 && real_zerop (arg1
)))
12940 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12942 if (strict_overflow_p
)
12943 fold_overflow_warning (("assuming signed overflow does not occur "
12944 "when simplifying comparison of "
12945 "absolute value and zero"),
12946 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12947 return omit_one_operand_loc (loc
, type
,
12948 constant_boolean_node (true, type
),
12952 /* Convert ABS_EXPR<x> < 0 to false. */
12953 strict_overflow_p
= false;
12954 if (code
== LT_EXPR
12955 && (integer_zerop (arg1
) || real_zerop (arg1
))
12956 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12958 if (strict_overflow_p
)
12959 fold_overflow_warning (("assuming signed overflow does not occur "
12960 "when simplifying comparison of "
12961 "absolute value and zero"),
12962 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12963 return omit_one_operand_loc (loc
, type
,
12964 constant_boolean_node (false, type
),
12968 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12969 and similarly for >= into !=. */
12970 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12971 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12972 && TREE_CODE (arg1
) == LSHIFT_EXPR
12973 && integer_onep (TREE_OPERAND (arg1
, 0)))
12974 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12975 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12976 TREE_OPERAND (arg1
, 1)),
12977 build_zero_cst (TREE_TYPE (arg0
)));
12979 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12980 otherwise Y might be >= # of bits in X's type and thus e.g.
12981 (unsigned char) (1 << Y) for Y 15 might be 0.
12982 If the cast is widening, then 1 << Y should have unsigned type,
12983 otherwise if Y is number of bits in the signed shift type minus 1,
12984 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12985 31 might be 0xffffffff80000000. */
12986 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12987 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12988 && CONVERT_EXPR_P (arg1
)
12989 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12990 && (element_precision (TREE_TYPE (arg1
))
12991 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12992 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12993 || (element_precision (TREE_TYPE (arg1
))
12994 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12995 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12997 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12998 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12999 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13000 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13001 build_zero_cst (TREE_TYPE (arg0
)));
13006 case UNORDERED_EXPR
:
13014 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13016 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13017 if (t1
!= NULL_TREE
)
13021 /* If the first operand is NaN, the result is constant. */
13022 if (TREE_CODE (arg0
) == REAL_CST
13023 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13024 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13026 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13027 ? integer_zero_node
13028 : integer_one_node
;
13029 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13032 /* If the second operand is NaN, the result is constant. */
13033 if (TREE_CODE (arg1
) == REAL_CST
13034 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13035 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13037 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13038 ? integer_zero_node
13039 : integer_one_node
;
13040 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13043 /* Simplify unordered comparison of something with itself. */
13044 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13045 && operand_equal_p (arg0
, arg1
, 0))
13046 return constant_boolean_node (1, type
);
13048 if (code
== LTGT_EXPR
13049 && !flag_trapping_math
13050 && operand_equal_p (arg0
, arg1
, 0))
13051 return constant_boolean_node (0, type
);
13053 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13055 tree targ0
= strip_float_extensions (arg0
);
13056 tree targ1
= strip_float_extensions (arg1
);
13057 tree newtype
= TREE_TYPE (targ0
);
13059 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13060 newtype
= TREE_TYPE (targ1
);
13062 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13063 return fold_build2_loc (loc
, code
, type
,
13064 fold_convert_loc (loc
, newtype
, targ0
),
13065 fold_convert_loc (loc
, newtype
, targ1
));
13070 case COMPOUND_EXPR
:
13071 /* When pedantic, a compound expression can be neither an lvalue
13072 nor an integer constant expression. */
13073 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13075 /* Don't let (0, 0) be null pointer constant. */
13076 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13077 : fold_convert_loc (loc
, type
, arg1
);
13078 return pedantic_non_lvalue_loc (loc
, tem
);
13081 if ((TREE_CODE (arg0
) == REAL_CST
13082 && TREE_CODE (arg1
) == REAL_CST
)
13083 || (TREE_CODE (arg0
) == INTEGER_CST
13084 && TREE_CODE (arg1
) == INTEGER_CST
))
13085 return build_complex (type
, arg0
, arg1
);
13089 /* An ASSERT_EXPR should never be passed to fold_binary. */
13090 gcc_unreachable ();
13092 case VEC_PACK_TRUNC_EXPR
:
13093 case VEC_PACK_FIX_TRUNC_EXPR
:
13095 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13098 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13099 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13100 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13103 elts
= XALLOCAVEC (tree
, nelts
);
13104 if (!vec_cst_ctor_to_array (arg0
, elts
)
13105 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13108 for (i
= 0; i
< nelts
; i
++)
13110 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13111 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13112 TREE_TYPE (type
), elts
[i
]);
13113 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13117 return build_vector (type
, elts
);
13120 case VEC_WIDEN_MULT_LO_EXPR
:
13121 case VEC_WIDEN_MULT_HI_EXPR
:
13122 case VEC_WIDEN_MULT_EVEN_EXPR
:
13123 case VEC_WIDEN_MULT_ODD_EXPR
:
13125 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13126 unsigned int out
, ofs
, scale
;
13129 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13130 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13131 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13134 elts
= XALLOCAVEC (tree
, nelts
* 4);
13135 if (!vec_cst_ctor_to_array (arg0
, elts
)
13136 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13139 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13140 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13141 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13142 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13143 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13144 scale
= 1, ofs
= 0;
13145 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13146 scale
= 1, ofs
= 1;
13148 for (out
= 0; out
< nelts
; out
++)
13150 unsigned int in1
= (out
<< scale
) + ofs
;
13151 unsigned int in2
= in1
+ nelts
* 2;
13154 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13155 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13157 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13159 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13160 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13164 return build_vector (type
, elts
);
13169 } /* switch (code) */
13172 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13173 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13177 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13179 switch (TREE_CODE (*tp
))
13185 *walk_subtrees
= 0;
13187 /* ... fall through ... */
13194 /* Return whether the sub-tree ST contains a label which is accessible from
13195 outside the sub-tree. */
13198 contains_label_p (tree st
)
13201 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13204 /* Fold a ternary expression of code CODE and type TYPE with operands
13205 OP0, OP1, and OP2. Return the folded expression if folding is
13206 successful. Otherwise, return NULL_TREE. */
13209 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13210 tree op0
, tree op1
, tree op2
)
13213 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13214 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13216 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13217 && TREE_CODE_LENGTH (code
) == 3);
13219 /* If this is a commutative operation, and OP0 is a constant, move it
13220 to OP1 to reduce the number of tests below. */
13221 if (commutative_ternary_tree_code (code
)
13222 && tree_swap_operands_p (op0
, op1
, true))
13223 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13225 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13229 /* Strip any conversions that don't change the mode. This is safe
13230 for every expression, except for a comparison expression because
13231 its signedness is derived from its operands. So, in the latter
13232 case, only strip conversions that don't change the signedness.
13234 Note that this is done as an internal manipulation within the
13235 constant folder, in order to find the simplest representation of
13236 the arguments so that their form can be studied. In any cases,
13237 the appropriate type conversions should be put back in the tree
13238 that will get out of the constant folder. */
13259 case COMPONENT_REF
:
13260 if (TREE_CODE (arg0
) == CONSTRUCTOR
13261 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13263 unsigned HOST_WIDE_INT idx
;
13265 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13272 case VEC_COND_EXPR
:
13273 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13274 so all simple results must be passed through pedantic_non_lvalue. */
13275 if (TREE_CODE (arg0
) == INTEGER_CST
)
13277 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13278 tem
= integer_zerop (arg0
) ? op2
: op1
;
13279 /* Only optimize constant conditions when the selected branch
13280 has the same type as the COND_EXPR. This avoids optimizing
13281 away "c ? x : throw", where the throw has a void type.
13282 Avoid throwing away that operand which contains label. */
13283 if ((!TREE_SIDE_EFFECTS (unused_op
)
13284 || !contains_label_p (unused_op
))
13285 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13286 || VOID_TYPE_P (type
)))
13287 return pedantic_non_lvalue_loc (loc
, tem
);
13290 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13292 if ((TREE_CODE (arg1
) == VECTOR_CST
13293 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13294 && (TREE_CODE (arg2
) == VECTOR_CST
13295 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13297 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13298 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13299 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13300 for (i
= 0; i
< nelts
; i
++)
13302 tree val
= VECTOR_CST_ELT (arg0
, i
);
13303 if (integer_all_onesp (val
))
13305 else if (integer_zerop (val
))
13306 sel
[i
] = nelts
+ i
;
13307 else /* Currently unreachable. */
13310 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13311 if (t
!= NULL_TREE
)
13316 /* If we have A op B ? A : C, we may be able to convert this to a
13317 simpler expression, depending on the operation and the values
13318 of B and C. Signed zeros prevent all of these transformations,
13319 for reasons given above each one.
13321 Also try swapping the arguments and inverting the conditional. */
13322 if (COMPARISON_CLASS_P (arg0
)
13323 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13324 arg1
, TREE_OPERAND (arg0
, 1))
13325 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
13327 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13332 if (COMPARISON_CLASS_P (arg0
)
13333 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13335 TREE_OPERAND (arg0
, 1))
13336 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
13338 location_t loc0
= expr_location_or (arg0
, loc
);
13339 tem
= fold_invert_truthvalue (loc0
, arg0
);
13340 if (tem
&& COMPARISON_CLASS_P (tem
))
13342 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13348 /* If the second operand is simpler than the third, swap them
13349 since that produces better jump optimization results. */
13350 if (truth_value_p (TREE_CODE (arg0
))
13351 && tree_swap_operands_p (op1
, op2
, false))
13353 location_t loc0
= expr_location_or (arg0
, loc
);
13354 /* See if this can be inverted. If it can't, possibly because
13355 it was a floating-point inequality comparison, don't do
13357 tem
= fold_invert_truthvalue (loc0
, arg0
);
13359 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13362 /* Convert A ? 1 : 0 to simply A. */
13363 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13364 : (integer_onep (op1
)
13365 && !VECTOR_TYPE_P (type
)))
13366 && integer_zerop (op2
)
13367 /* If we try to convert OP0 to our type, the
13368 call to fold will try to move the conversion inside
13369 a COND, which will recurse. In that case, the COND_EXPR
13370 is probably the best choice, so leave it alone. */
13371 && type
== TREE_TYPE (arg0
))
13372 return pedantic_non_lvalue_loc (loc
, arg0
);
13374 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13375 over COND_EXPR in cases such as floating point comparisons. */
13376 if (integer_zerop (op1
)
13377 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13378 : (integer_onep (op2
)
13379 && !VECTOR_TYPE_P (type
)))
13380 && truth_value_p (TREE_CODE (arg0
)))
13381 return pedantic_non_lvalue_loc (loc
,
13382 fold_convert_loc (loc
, type
,
13383 invert_truthvalue_loc (loc
,
13386 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13387 if (TREE_CODE (arg0
) == LT_EXPR
13388 && integer_zerop (TREE_OPERAND (arg0
, 1))
13389 && integer_zerop (op2
)
13390 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13392 /* sign_bit_p looks through both zero and sign extensions,
13393 but for this optimization only sign extensions are
13395 tree tem2
= TREE_OPERAND (arg0
, 0);
13396 while (tem
!= tem2
)
13398 if (TREE_CODE (tem2
) != NOP_EXPR
13399 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13404 tem2
= TREE_OPERAND (tem2
, 0);
13406 /* sign_bit_p only checks ARG1 bits within A's precision.
13407 If <sign bit of A> has wider type than A, bits outside
13408 of A's precision in <sign bit of A> need to be checked.
13409 If they are all 0, this optimization needs to be done
13410 in unsigned A's type, if they are all 1 in signed A's type,
13411 otherwise this can't be done. */
13413 && TYPE_PRECISION (TREE_TYPE (tem
))
13414 < TYPE_PRECISION (TREE_TYPE (arg1
))
13415 && TYPE_PRECISION (TREE_TYPE (tem
))
13416 < TYPE_PRECISION (type
))
13418 int inner_width
, outer_width
;
13421 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13422 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13423 if (outer_width
> TYPE_PRECISION (type
))
13424 outer_width
= TYPE_PRECISION (type
);
13426 wide_int mask
= wi::shifted_mask
13427 (inner_width
, outer_width
- inner_width
, false,
13428 TYPE_PRECISION (TREE_TYPE (arg1
)));
13430 wide_int common
= mask
& arg1
;
13431 if (common
== mask
)
13433 tem_type
= signed_type_for (TREE_TYPE (tem
));
13434 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13436 else if (common
== 0)
13438 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13439 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13447 fold_convert_loc (loc
, type
,
13448 fold_build2_loc (loc
, BIT_AND_EXPR
,
13449 TREE_TYPE (tem
), tem
,
13450 fold_convert_loc (loc
,
13455 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13456 already handled above. */
13457 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13458 && integer_onep (TREE_OPERAND (arg0
, 1))
13459 && integer_zerop (op2
)
13460 && integer_pow2p (arg1
))
13462 tree tem
= TREE_OPERAND (arg0
, 0);
13464 if (TREE_CODE (tem
) == RSHIFT_EXPR
13465 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13466 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13467 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13468 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13469 TREE_OPERAND (tem
, 0), arg1
);
13472 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13473 is probably obsolete because the first operand should be a
13474 truth value (that's why we have the two cases above), but let's
13475 leave it in until we can confirm this for all front-ends. */
13476 if (integer_zerop (op2
)
13477 && TREE_CODE (arg0
) == NE_EXPR
13478 && integer_zerop (TREE_OPERAND (arg0
, 1))
13479 && integer_pow2p (arg1
)
13480 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13481 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13482 arg1
, OEP_ONLY_CONST
))
13483 return pedantic_non_lvalue_loc (loc
,
13484 fold_convert_loc (loc
, type
,
13485 TREE_OPERAND (arg0
, 0)));
13487 /* Disable the transformations below for vectors, since
13488 fold_binary_op_with_conditional_arg may undo them immediately,
13489 yielding an infinite loop. */
13490 if (code
== VEC_COND_EXPR
)
13493 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13494 if (integer_zerop (op2
)
13495 && truth_value_p (TREE_CODE (arg0
))
13496 && truth_value_p (TREE_CODE (arg1
))
13497 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13498 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13499 : TRUTH_ANDIF_EXPR
,
13500 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
13502 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13503 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13504 && truth_value_p (TREE_CODE (arg0
))
13505 && truth_value_p (TREE_CODE (arg1
))
13506 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13508 location_t loc0
= expr_location_or (arg0
, loc
);
13509 /* Only perform transformation if ARG0 is easily inverted. */
13510 tem
= fold_invert_truthvalue (loc0
, arg0
);
13512 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13515 type
, fold_convert_loc (loc
, type
, tem
),
13519 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13520 if (integer_zerop (arg1
)
13521 && truth_value_p (TREE_CODE (arg0
))
13522 && truth_value_p (TREE_CODE (op2
))
13523 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13525 location_t loc0
= expr_location_or (arg0
, loc
);
13526 /* Only perform transformation if ARG0 is easily inverted. */
13527 tem
= fold_invert_truthvalue (loc0
, arg0
);
13529 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13530 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13531 type
, fold_convert_loc (loc
, type
, tem
),
13535 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13536 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13537 && truth_value_p (TREE_CODE (arg0
))
13538 && truth_value_p (TREE_CODE (op2
))
13539 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13540 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13541 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13542 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13547 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13548 of fold_ternary on them. */
13549 gcc_unreachable ();
13551 case BIT_FIELD_REF
:
13552 if ((TREE_CODE (arg0
) == VECTOR_CST
13553 || (TREE_CODE (arg0
) == CONSTRUCTOR
13554 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
13555 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13556 || (TREE_CODE (type
) == VECTOR_TYPE
13557 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
13559 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13560 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
13561 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13562 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13565 && (idx
% width
) == 0
13566 && (n
% width
) == 0
13567 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13572 if (TREE_CODE (arg0
) == VECTOR_CST
)
13575 return VECTOR_CST_ELT (arg0
, idx
);
13577 tree
*vals
= XALLOCAVEC (tree
, n
);
13578 for (unsigned i
= 0; i
< n
; ++i
)
13579 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
13580 return build_vector (type
, vals
);
13583 /* Constructor elements can be subvectors. */
13584 unsigned HOST_WIDE_INT k
= 1;
13585 if (CONSTRUCTOR_NELTS (arg0
) != 0)
13587 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
13588 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
13589 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
13592 /* We keep an exact subset of the constructor elements. */
13593 if ((idx
% k
) == 0 && (n
% k
) == 0)
13595 if (CONSTRUCTOR_NELTS (arg0
) == 0)
13596 return build_constructor (type
, NULL
);
13601 if (idx
< CONSTRUCTOR_NELTS (arg0
))
13602 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13603 return build_zero_cst (type
);
13606 vec
<constructor_elt
, va_gc
> *vals
;
13607 vec_alloc (vals
, n
);
13608 for (unsigned i
= 0;
13609 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
13611 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
13613 (arg0
, idx
+ i
)->value
);
13614 return build_constructor (type
, vals
);
13616 /* The bitfield references a single constructor element. */
13617 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
13619 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
13620 return build_zero_cst (type
);
13622 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
13624 return fold_build3_loc (loc
, code
, type
,
13625 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
13626 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
13631 /* A bit-field-ref that referenced the full argument can be stripped. */
13632 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13633 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
13634 && integer_zerop (op2
))
13635 return fold_convert_loc (loc
, type
, arg0
);
13637 /* On constants we can use native encode/interpret to constant
13638 fold (nearly) all BIT_FIELD_REFs. */
13639 if (CONSTANT_CLASS_P (arg0
)
13640 && can_native_interpret_type_p (type
)
13641 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
13642 /* This limitation should not be necessary, we just need to
13643 round this up to mode size. */
13644 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
13645 /* Need bit-shifting of the buffer to relax the following. */
13646 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
13648 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13649 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13650 unsigned HOST_WIDE_INT clen
;
13651 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
13652 /* ??? We cannot tell native_encode_expr to start at
13653 some random byte only. So limit us to a reasonable amount
13657 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
13658 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
13660 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
13662 tree v
= native_interpret_expr (type
,
13663 b
+ bitpos
/ BITS_PER_UNIT
,
13664 bitsize
/ BITS_PER_UNIT
);
13674 /* For integers we can decompose the FMA if possible. */
13675 if (TREE_CODE (arg0
) == INTEGER_CST
13676 && TREE_CODE (arg1
) == INTEGER_CST
)
13677 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13678 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13679 if (integer_zerop (arg2
))
13680 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13682 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13684 case VEC_PERM_EXPR
:
13685 if (TREE_CODE (arg2
) == VECTOR_CST
)
13687 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
13688 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
13689 unsigned char *sel2
= sel
+ nelts
;
13690 bool need_mask_canon
= false;
13691 bool need_mask_canon2
= false;
13692 bool all_in_vec0
= true;
13693 bool all_in_vec1
= true;
13694 bool maybe_identity
= true;
13695 bool single_arg
= (op0
== op1
);
13696 bool changed
= false;
13698 mask2
= 2 * nelts
- 1;
13699 mask
= single_arg
? (nelts
- 1) : mask2
;
13700 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
13701 for (i
= 0; i
< nelts
; i
++)
13703 tree val
= VECTOR_CST_ELT (arg2
, i
);
13704 if (TREE_CODE (val
) != INTEGER_CST
)
13707 /* Make sure that the perm value is in an acceptable
13710 need_mask_canon
|= wi::gtu_p (t
, mask
);
13711 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
13712 sel
[i
] = t
.to_uhwi () & mask
;
13713 sel2
[i
] = t
.to_uhwi () & mask2
;
13715 if (sel
[i
] < nelts
)
13716 all_in_vec1
= false;
13718 all_in_vec0
= false;
13720 if ((sel
[i
] & (nelts
-1)) != i
)
13721 maybe_identity
= false;
13724 if (maybe_identity
)
13734 else if (all_in_vec1
)
13737 for (i
= 0; i
< nelts
; i
++)
13739 need_mask_canon
= true;
13742 if ((TREE_CODE (op0
) == VECTOR_CST
13743 || TREE_CODE (op0
) == CONSTRUCTOR
)
13744 && (TREE_CODE (op1
) == VECTOR_CST
13745 || TREE_CODE (op1
) == CONSTRUCTOR
))
13747 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
13748 if (t
!= NULL_TREE
)
13752 if (op0
== op1
&& !single_arg
)
13755 /* Some targets are deficient and fail to expand a single
13756 argument permutation while still allowing an equivalent
13757 2-argument version. */
13758 if (need_mask_canon
&& arg2
== op2
13759 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
13760 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
13762 need_mask_canon
= need_mask_canon2
;
13766 if (need_mask_canon
&& arg2
== op2
)
13768 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
13769 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
13770 for (i
= 0; i
< nelts
; i
++)
13771 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
13772 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
13777 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
13783 } /* switch (code) */
13786 /* Perform constant folding and related simplification of EXPR.
13787 The related simplifications include x*1 => x, x*0 => 0, etc.,
13788 and application of the associative law.
13789 NOP_EXPR conversions may be removed freely (as long as we
13790 are careful not to change the type of the overall expression).
13791 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13792 but we can constant-fold them if they have constant operands. */
13794 #ifdef ENABLE_FOLD_CHECKING
13795 # define fold(x) fold_1 (x)
13796 static tree
fold_1 (tree
);
13802 const tree t
= expr
;
13803 enum tree_code code
= TREE_CODE (t
);
13804 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13806 location_t loc
= EXPR_LOCATION (expr
);
13808 /* Return right away if a constant. */
13809 if (kind
== tcc_constant
)
13812 /* CALL_EXPR-like objects with variable numbers of operands are
13813 treated specially. */
13814 if (kind
== tcc_vl_exp
)
13816 if (code
== CALL_EXPR
)
13818 tem
= fold_call_expr (loc
, expr
, false);
13819 return tem
? tem
: expr
;
13824 if (IS_EXPR_CODE_CLASS (kind
))
13826 tree type
= TREE_TYPE (t
);
13827 tree op0
, op1
, op2
;
13829 switch (TREE_CODE_LENGTH (code
))
13832 op0
= TREE_OPERAND (t
, 0);
13833 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13834 return tem
? tem
: expr
;
13836 op0
= TREE_OPERAND (t
, 0);
13837 op1
= TREE_OPERAND (t
, 1);
13838 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13839 return tem
? tem
: expr
;
13841 op0
= TREE_OPERAND (t
, 0);
13842 op1
= TREE_OPERAND (t
, 1);
13843 op2
= TREE_OPERAND (t
, 2);
13844 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13845 return tem
? tem
: expr
;
13855 tree op0
= TREE_OPERAND (t
, 0);
13856 tree op1
= TREE_OPERAND (t
, 1);
13858 if (TREE_CODE (op1
) == INTEGER_CST
13859 && TREE_CODE (op0
) == CONSTRUCTOR
13860 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13862 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
13863 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
13864 unsigned HOST_WIDE_INT begin
= 0;
13866 /* Find a matching index by means of a binary search. */
13867 while (begin
!= end
)
13869 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13870 tree index
= (*elts
)[middle
].index
;
13872 if (TREE_CODE (index
) == INTEGER_CST
13873 && tree_int_cst_lt (index
, op1
))
13874 begin
= middle
+ 1;
13875 else if (TREE_CODE (index
) == INTEGER_CST
13876 && tree_int_cst_lt (op1
, index
))
13878 else if (TREE_CODE (index
) == RANGE_EXPR
13879 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13880 begin
= middle
+ 1;
13881 else if (TREE_CODE (index
) == RANGE_EXPR
13882 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13885 return (*elts
)[middle
].value
;
13892 /* Return a VECTOR_CST if possible. */
13895 tree type
= TREE_TYPE (t
);
13896 if (TREE_CODE (type
) != VECTOR_TYPE
)
13899 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13900 unsigned HOST_WIDE_INT idx
, pos
= 0;
13903 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13905 if (!CONSTANT_CLASS_P (value
))
13907 if (TREE_CODE (value
) == VECTOR_CST
)
13909 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
13910 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
13913 vec
[pos
++] = value
;
13915 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
13916 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
13918 return build_vector (type
, vec
);
13922 return fold (DECL_INITIAL (t
));
13926 } /* switch (code) */
13929 #ifdef ENABLE_FOLD_CHECKING
13932 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13933 hash_table
<pointer_hash
<const tree_node
> > *);
13934 static void fold_check_failed (const_tree
, const_tree
);
13935 void print_fold_checksum (const_tree
);
13937 /* When --enable-checking=fold, compute a digest of expr before
13938 and after actual fold call to see if fold did not accidentally
13939 change original expr. */
13945 struct md5_ctx ctx
;
13946 unsigned char checksum_before
[16], checksum_after
[16];
13947 hash_table
<pointer_hash
<const tree_node
> > ht (32);
13949 md5_init_ctx (&ctx
);
13950 fold_checksum_tree (expr
, &ctx
, &ht
);
13951 md5_finish_ctx (&ctx
, checksum_before
);
13954 ret
= fold_1 (expr
);
13956 md5_init_ctx (&ctx
);
13957 fold_checksum_tree (expr
, &ctx
, &ht
);
13958 md5_finish_ctx (&ctx
, checksum_after
);
13960 if (memcmp (checksum_before
, checksum_after
, 16))
13961 fold_check_failed (expr
, ret
);
13967 print_fold_checksum (const_tree expr
)
13969 struct md5_ctx ctx
;
13970 unsigned char checksum
[16], cnt
;
13971 hash_table
<pointer_hash
<const tree_node
> > ht (32);
13973 md5_init_ctx (&ctx
);
13974 fold_checksum_tree (expr
, &ctx
, &ht
);
13975 md5_finish_ctx (&ctx
, checksum
);
13976 for (cnt
= 0; cnt
< 16; ++cnt
)
13977 fprintf (stderr
, "%02x", checksum
[cnt
]);
13978 putc ('\n', stderr
);
13982 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13984 internal_error ("fold check: original tree changed by fold");
13988 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
13989 hash_table
<pointer_hash
<const tree_node
> > *ht
)
13991 const tree_node
**slot
;
13992 enum tree_code code
;
13993 union tree_node buf
;
13999 slot
= ht
->find_slot (expr
, INSERT
);
14003 code
= TREE_CODE (expr
);
14004 if (TREE_CODE_CLASS (code
) == tcc_declaration
14005 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14007 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14008 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14009 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14010 expr
= (tree
) &buf
;
14012 else if (TREE_CODE_CLASS (code
) == tcc_type
14013 && (TYPE_POINTER_TO (expr
)
14014 || TYPE_REFERENCE_TO (expr
)
14015 || TYPE_CACHED_VALUES_P (expr
)
14016 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14017 || TYPE_NEXT_VARIANT (expr
)))
14019 /* Allow these fields to be modified. */
14021 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14022 expr
= tmp
= (tree
) &buf
;
14023 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14024 TYPE_POINTER_TO (tmp
) = NULL
;
14025 TYPE_REFERENCE_TO (tmp
) = NULL
;
14026 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14027 if (TYPE_CACHED_VALUES_P (tmp
))
14029 TYPE_CACHED_VALUES_P (tmp
) = 0;
14030 TYPE_CACHED_VALUES (tmp
) = NULL
;
14033 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14034 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14035 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14036 if (TREE_CODE_CLASS (code
) != tcc_type
14037 && TREE_CODE_CLASS (code
) != tcc_declaration
14038 && code
!= TREE_LIST
14039 && code
!= SSA_NAME
14040 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14041 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14042 switch (TREE_CODE_CLASS (code
))
14048 md5_process_bytes (TREE_STRING_POINTER (expr
),
14049 TREE_STRING_LENGTH (expr
), ctx
);
14052 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14053 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14056 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14057 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14063 case tcc_exceptional
:
14067 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14068 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14069 expr
= TREE_CHAIN (expr
);
14070 goto recursive_label
;
14073 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14074 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14080 case tcc_expression
:
14081 case tcc_reference
:
14082 case tcc_comparison
:
14085 case tcc_statement
:
14087 len
= TREE_OPERAND_LENGTH (expr
);
14088 for (i
= 0; i
< len
; ++i
)
14089 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14091 case tcc_declaration
:
14092 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14093 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14094 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14096 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14097 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14098 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14099 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14100 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14103 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14105 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14107 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14108 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14110 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14114 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14115 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14116 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14117 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14118 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14119 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14120 if (INTEGRAL_TYPE_P (expr
)
14121 || SCALAR_FLOAT_TYPE_P (expr
))
14123 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14124 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14126 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14127 if (TREE_CODE (expr
) == RECORD_TYPE
14128 || TREE_CODE (expr
) == UNION_TYPE
14129 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14130 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14131 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14138 /* Helper function for outputting the checksum of a tree T. When
14139 debugging with gdb, you can "define mynext" to be "next" followed
14140 by "call debug_fold_checksum (op0)", then just trace down till the
14143 DEBUG_FUNCTION
void
14144 debug_fold_checksum (const_tree t
)
14147 unsigned char checksum
[16];
14148 struct md5_ctx ctx
;
14149 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14151 md5_init_ctx (&ctx
);
14152 fold_checksum_tree (t
, &ctx
, &ht
);
14153 md5_finish_ctx (&ctx
, checksum
);
14156 for (i
= 0; i
< 16; i
++)
14157 fprintf (stderr
, "%d ", checksum
[i
]);
14159 fprintf (stderr
, "\n");
14164 /* Fold a unary tree expression with code CODE of type TYPE with an
14165 operand OP0. LOC is the location of the resulting expression.
14166 Return a folded expression if successful. Otherwise, return a tree
14167 expression with code CODE of type TYPE with an operand OP0. */
14170 fold_build1_stat_loc (location_t loc
,
14171 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14174 #ifdef ENABLE_FOLD_CHECKING
14175 unsigned char checksum_before
[16], checksum_after
[16];
14176 struct md5_ctx ctx
;
14177 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14179 md5_init_ctx (&ctx
);
14180 fold_checksum_tree (op0
, &ctx
, &ht
);
14181 md5_finish_ctx (&ctx
, checksum_before
);
14185 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14187 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14189 #ifdef ENABLE_FOLD_CHECKING
14190 md5_init_ctx (&ctx
);
14191 fold_checksum_tree (op0
, &ctx
, &ht
);
14192 md5_finish_ctx (&ctx
, checksum_after
);
14194 if (memcmp (checksum_before
, checksum_after
, 16))
14195 fold_check_failed (op0
, tem
);
14200 /* Fold a binary tree expression with code CODE of type TYPE with
14201 operands OP0 and OP1. LOC is the location of the resulting
14202 expression. Return a folded expression if successful. Otherwise,
14203 return a tree expression with code CODE of type TYPE with operands
14207 fold_build2_stat_loc (location_t loc
,
14208 enum tree_code code
, tree type
, tree op0
, tree op1
14212 #ifdef ENABLE_FOLD_CHECKING
14213 unsigned char checksum_before_op0
[16],
14214 checksum_before_op1
[16],
14215 checksum_after_op0
[16],
14216 checksum_after_op1
[16];
14217 struct md5_ctx ctx
;
14218 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14220 md5_init_ctx (&ctx
);
14221 fold_checksum_tree (op0
, &ctx
, &ht
);
14222 md5_finish_ctx (&ctx
, checksum_before_op0
);
14225 md5_init_ctx (&ctx
);
14226 fold_checksum_tree (op1
, &ctx
, &ht
);
14227 md5_finish_ctx (&ctx
, checksum_before_op1
);
14231 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14233 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14235 #ifdef ENABLE_FOLD_CHECKING
14236 md5_init_ctx (&ctx
);
14237 fold_checksum_tree (op0
, &ctx
, &ht
);
14238 md5_finish_ctx (&ctx
, checksum_after_op0
);
14241 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14242 fold_check_failed (op0
, tem
);
14244 md5_init_ctx (&ctx
);
14245 fold_checksum_tree (op1
, &ctx
, &ht
);
14246 md5_finish_ctx (&ctx
, checksum_after_op1
);
14248 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14249 fold_check_failed (op1
, tem
);
14254 /* Fold a ternary tree expression with code CODE of type TYPE with
14255 operands OP0, OP1, and OP2. Return a folded expression if
14256 successful. Otherwise, return a tree expression with code CODE of
14257 type TYPE with operands OP0, OP1, and OP2. */
14260 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14261 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14264 #ifdef ENABLE_FOLD_CHECKING
14265 unsigned char checksum_before_op0
[16],
14266 checksum_before_op1
[16],
14267 checksum_before_op2
[16],
14268 checksum_after_op0
[16],
14269 checksum_after_op1
[16],
14270 checksum_after_op2
[16];
14271 struct md5_ctx ctx
;
14272 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14274 md5_init_ctx (&ctx
);
14275 fold_checksum_tree (op0
, &ctx
, &ht
);
14276 md5_finish_ctx (&ctx
, checksum_before_op0
);
14279 md5_init_ctx (&ctx
);
14280 fold_checksum_tree (op1
, &ctx
, &ht
);
14281 md5_finish_ctx (&ctx
, checksum_before_op1
);
14284 md5_init_ctx (&ctx
);
14285 fold_checksum_tree (op2
, &ctx
, &ht
);
14286 md5_finish_ctx (&ctx
, checksum_before_op2
);
14290 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14291 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14293 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14295 #ifdef ENABLE_FOLD_CHECKING
14296 md5_init_ctx (&ctx
);
14297 fold_checksum_tree (op0
, &ctx
, &ht
);
14298 md5_finish_ctx (&ctx
, checksum_after_op0
);
14301 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14302 fold_check_failed (op0
, tem
);
14304 md5_init_ctx (&ctx
);
14305 fold_checksum_tree (op1
, &ctx
, &ht
);
14306 md5_finish_ctx (&ctx
, checksum_after_op1
);
14309 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14310 fold_check_failed (op1
, tem
);
14312 md5_init_ctx (&ctx
);
14313 fold_checksum_tree (op2
, &ctx
, &ht
);
14314 md5_finish_ctx (&ctx
, checksum_after_op2
);
14316 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14317 fold_check_failed (op2
, tem
);
14322 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14323 arguments in ARGARRAY, and a null static chain.
14324 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14325 of type TYPE from the given operands as constructed by build_call_array. */
14328 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14329 int nargs
, tree
*argarray
)
14332 #ifdef ENABLE_FOLD_CHECKING
14333 unsigned char checksum_before_fn
[16],
14334 checksum_before_arglist
[16],
14335 checksum_after_fn
[16],
14336 checksum_after_arglist
[16];
14337 struct md5_ctx ctx
;
14338 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14341 md5_init_ctx (&ctx
);
14342 fold_checksum_tree (fn
, &ctx
, &ht
);
14343 md5_finish_ctx (&ctx
, checksum_before_fn
);
14346 md5_init_ctx (&ctx
);
14347 for (i
= 0; i
< nargs
; i
++)
14348 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14349 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14353 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14355 #ifdef ENABLE_FOLD_CHECKING
14356 md5_init_ctx (&ctx
);
14357 fold_checksum_tree (fn
, &ctx
, &ht
);
14358 md5_finish_ctx (&ctx
, checksum_after_fn
);
14361 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14362 fold_check_failed (fn
, tem
);
14364 md5_init_ctx (&ctx
);
14365 for (i
= 0; i
< nargs
; i
++)
14366 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14367 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14369 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14370 fold_check_failed (NULL_TREE
, tem
);
14375 /* Perform constant folding and related simplification of initializer
14376 expression EXPR. These behave identically to "fold_buildN" but ignore
14377 potential run-time traps and exceptions that fold must preserve. */
14379 #define START_FOLD_INIT \
14380 int saved_signaling_nans = flag_signaling_nans;\
14381 int saved_trapping_math = flag_trapping_math;\
14382 int saved_rounding_math = flag_rounding_math;\
14383 int saved_trapv = flag_trapv;\
14384 int saved_folding_initializer = folding_initializer;\
14385 flag_signaling_nans = 0;\
14386 flag_trapping_math = 0;\
14387 flag_rounding_math = 0;\
14389 folding_initializer = 1;
14391 #define END_FOLD_INIT \
14392 flag_signaling_nans = saved_signaling_nans;\
14393 flag_trapping_math = saved_trapping_math;\
14394 flag_rounding_math = saved_rounding_math;\
14395 flag_trapv = saved_trapv;\
14396 folding_initializer = saved_folding_initializer;
14399 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14400 tree type
, tree op
)
14405 result
= fold_build1_loc (loc
, code
, type
, op
);
14412 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14413 tree type
, tree op0
, tree op1
)
14418 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14425 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14426 int nargs
, tree
*argarray
)
14431 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14437 #undef START_FOLD_INIT
14438 #undef END_FOLD_INIT
14440 /* Determine if first argument is a multiple of second argument. Return 0 if
14441 it is not, or we cannot easily determined it to be.
14443 An example of the sort of thing we care about (at this point; this routine
14444 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14445 fold cases do now) is discovering that
14447 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14453 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14455 This code also handles discovering that
14457 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14459 is a multiple of 8 so we don't have to worry about dealing with a
14460 possible remainder.
14462 Note that we *look* inside a SAVE_EXPR only to determine how it was
14463 calculated; it is not safe for fold to do much of anything else with the
14464 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14465 at run time. For example, the latter example above *cannot* be implemented
14466 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14467 evaluation time of the original SAVE_EXPR is not necessarily the same at
14468 the time the new expression is evaluated. The only optimization of this
14469 sort that would be valid is changing
14471 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14475 SAVE_EXPR (I) * SAVE_EXPR (J)
14477 (where the same SAVE_EXPR (J) is used in the original and the
14478 transformed version). */
14481 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14483 if (operand_equal_p (top
, bottom
, 0))
14486 if (TREE_CODE (type
) != INTEGER_TYPE
)
14489 switch (TREE_CODE (top
))
14492 /* Bitwise and provides a power of two multiple. If the mask is
14493 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14494 if (!integer_pow2p (bottom
))
14499 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14500 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14504 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14505 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14508 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14512 op1
= TREE_OPERAND (top
, 1);
14513 /* const_binop may not detect overflow correctly,
14514 so check for it explicitly here. */
14515 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
14516 && 0 != (t1
= fold_convert (type
,
14517 const_binop (LSHIFT_EXPR
,
14520 && !TREE_OVERFLOW (t1
))
14521 return multiple_of_p (type
, t1
, bottom
);
14526 /* Can't handle conversions from non-integral or wider integral type. */
14527 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14528 || (TYPE_PRECISION (type
)
14529 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14532 /* .. fall through ... */
14535 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14538 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14539 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14542 if (TREE_CODE (bottom
) != INTEGER_CST
14543 || integer_zerop (bottom
)
14544 || (TYPE_UNSIGNED (type
)
14545 && (tree_int_cst_sgn (top
) < 0
14546 || tree_int_cst_sgn (bottom
) < 0)))
14548 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14556 /* Return true if CODE or TYPE is known to be non-negative. */
14559 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14561 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14562 && truth_value_p (code
))
14563 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14564 have a signed:1 type (where the value is -1 and 0). */
14569 /* Return true if (CODE OP0) is known to be non-negative. If the return
14570 value is based on the assumption that signed overflow is undefined,
14571 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14572 *STRICT_OVERFLOW_P. */
14575 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14576 bool *strict_overflow_p
)
14578 if (TYPE_UNSIGNED (type
))
14584 /* We can't return 1 if flag_wrapv is set because
14585 ABS_EXPR<INT_MIN> = INT_MIN. */
14586 if (!INTEGRAL_TYPE_P (type
))
14588 if (TYPE_OVERFLOW_UNDEFINED (type
))
14590 *strict_overflow_p
= true;
14595 case NON_LVALUE_EXPR
:
14597 case FIX_TRUNC_EXPR
:
14598 return tree_expr_nonnegative_warnv_p (op0
,
14599 strict_overflow_p
);
14603 tree inner_type
= TREE_TYPE (op0
);
14604 tree outer_type
= type
;
14606 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14608 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14609 return tree_expr_nonnegative_warnv_p (op0
,
14610 strict_overflow_p
);
14611 if (INTEGRAL_TYPE_P (inner_type
))
14613 if (TYPE_UNSIGNED (inner_type
))
14615 return tree_expr_nonnegative_warnv_p (op0
,
14616 strict_overflow_p
);
14619 else if (INTEGRAL_TYPE_P (outer_type
))
14621 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14622 return tree_expr_nonnegative_warnv_p (op0
,
14623 strict_overflow_p
);
14624 if (INTEGRAL_TYPE_P (inner_type
))
14625 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14626 && TYPE_UNSIGNED (inner_type
);
14632 return tree_simple_nonnegative_warnv_p (code
, type
);
14635 /* We don't know sign of `t', so be conservative and return false. */
14639 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14640 value is based on the assumption that signed overflow is undefined,
14641 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14642 *STRICT_OVERFLOW_P. */
14645 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14646 tree op1
, bool *strict_overflow_p
)
14648 if (TYPE_UNSIGNED (type
))
14653 case POINTER_PLUS_EXPR
:
14655 if (FLOAT_TYPE_P (type
))
14656 return (tree_expr_nonnegative_warnv_p (op0
,
14658 && tree_expr_nonnegative_warnv_p (op1
,
14659 strict_overflow_p
));
14661 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14662 both unsigned and at least 2 bits shorter than the result. */
14663 if (TREE_CODE (type
) == INTEGER_TYPE
14664 && TREE_CODE (op0
) == NOP_EXPR
14665 && TREE_CODE (op1
) == NOP_EXPR
)
14667 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14668 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14669 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14670 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14672 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14673 TYPE_PRECISION (inner2
)) + 1;
14674 return prec
< TYPE_PRECISION (type
);
14680 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14682 /* x * x is always non-negative for floating point x
14683 or without overflow. */
14684 if (operand_equal_p (op0
, op1
, 0)
14685 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
14686 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
14688 if (TYPE_OVERFLOW_UNDEFINED (type
))
14689 *strict_overflow_p
= true;
14694 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14695 both unsigned and their total bits is shorter than the result. */
14696 if (TREE_CODE (type
) == INTEGER_TYPE
14697 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14698 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14700 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14701 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14703 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14704 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14707 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14708 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14710 if (TREE_CODE (op0
) == INTEGER_CST
)
14711 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14713 if (TREE_CODE (op1
) == INTEGER_CST
)
14714 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14716 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14717 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14719 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14720 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14721 : TYPE_PRECISION (inner0
);
14723 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14724 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14725 : TYPE_PRECISION (inner1
);
14727 return precision0
+ precision1
< TYPE_PRECISION (type
);
14734 return (tree_expr_nonnegative_warnv_p (op0
,
14736 || tree_expr_nonnegative_warnv_p (op1
,
14737 strict_overflow_p
));
14743 case TRUNC_DIV_EXPR
:
14744 case CEIL_DIV_EXPR
:
14745 case FLOOR_DIV_EXPR
:
14746 case ROUND_DIV_EXPR
:
14747 return (tree_expr_nonnegative_warnv_p (op0
,
14749 && tree_expr_nonnegative_warnv_p (op1
,
14750 strict_overflow_p
));
14752 case TRUNC_MOD_EXPR
:
14753 case CEIL_MOD_EXPR
:
14754 case FLOOR_MOD_EXPR
:
14755 case ROUND_MOD_EXPR
:
14756 return tree_expr_nonnegative_warnv_p (op0
,
14757 strict_overflow_p
);
14759 return tree_simple_nonnegative_warnv_p (code
, type
);
14762 /* We don't know sign of `t', so be conservative and return false. */
14766 /* Return true if T is known to be non-negative. If the return
14767 value is based on the assumption that signed overflow is undefined,
14768 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14769 *STRICT_OVERFLOW_P. */
14772 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14774 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14777 switch (TREE_CODE (t
))
14780 return tree_int_cst_sgn (t
) >= 0;
14783 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14786 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14789 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14791 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14792 strict_overflow_p
));
14794 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14797 /* We don't know sign of `t', so be conservative and return false. */
14801 /* Return true if T is known to be non-negative. If the return
14802 value is based on the assumption that signed overflow is undefined,
14803 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14804 *STRICT_OVERFLOW_P. */
14807 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14808 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14810 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14811 switch (DECL_FUNCTION_CODE (fndecl
))
14813 CASE_FLT_FN (BUILT_IN_ACOS
):
14814 CASE_FLT_FN (BUILT_IN_ACOSH
):
14815 CASE_FLT_FN (BUILT_IN_CABS
):
14816 CASE_FLT_FN (BUILT_IN_COSH
):
14817 CASE_FLT_FN (BUILT_IN_ERFC
):
14818 CASE_FLT_FN (BUILT_IN_EXP
):
14819 CASE_FLT_FN (BUILT_IN_EXP10
):
14820 CASE_FLT_FN (BUILT_IN_EXP2
):
14821 CASE_FLT_FN (BUILT_IN_FABS
):
14822 CASE_FLT_FN (BUILT_IN_FDIM
):
14823 CASE_FLT_FN (BUILT_IN_HYPOT
):
14824 CASE_FLT_FN (BUILT_IN_POW10
):
14825 CASE_INT_FN (BUILT_IN_FFS
):
14826 CASE_INT_FN (BUILT_IN_PARITY
):
14827 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14828 CASE_INT_FN (BUILT_IN_CLZ
):
14829 CASE_INT_FN (BUILT_IN_CLRSB
):
14830 case BUILT_IN_BSWAP32
:
14831 case BUILT_IN_BSWAP64
:
14835 CASE_FLT_FN (BUILT_IN_SQRT
):
14836 /* sqrt(-0.0) is -0.0. */
14837 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
14839 return tree_expr_nonnegative_warnv_p (arg0
,
14840 strict_overflow_p
);
14842 CASE_FLT_FN (BUILT_IN_ASINH
):
14843 CASE_FLT_FN (BUILT_IN_ATAN
):
14844 CASE_FLT_FN (BUILT_IN_ATANH
):
14845 CASE_FLT_FN (BUILT_IN_CBRT
):
14846 CASE_FLT_FN (BUILT_IN_CEIL
):
14847 CASE_FLT_FN (BUILT_IN_ERF
):
14848 CASE_FLT_FN (BUILT_IN_EXPM1
):
14849 CASE_FLT_FN (BUILT_IN_FLOOR
):
14850 CASE_FLT_FN (BUILT_IN_FMOD
):
14851 CASE_FLT_FN (BUILT_IN_FREXP
):
14852 CASE_FLT_FN (BUILT_IN_ICEIL
):
14853 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14854 CASE_FLT_FN (BUILT_IN_IRINT
):
14855 CASE_FLT_FN (BUILT_IN_IROUND
):
14856 CASE_FLT_FN (BUILT_IN_LCEIL
):
14857 CASE_FLT_FN (BUILT_IN_LDEXP
):
14858 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14859 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14860 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14861 CASE_FLT_FN (BUILT_IN_LLRINT
):
14862 CASE_FLT_FN (BUILT_IN_LLROUND
):
14863 CASE_FLT_FN (BUILT_IN_LRINT
):
14864 CASE_FLT_FN (BUILT_IN_LROUND
):
14865 CASE_FLT_FN (BUILT_IN_MODF
):
14866 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14867 CASE_FLT_FN (BUILT_IN_RINT
):
14868 CASE_FLT_FN (BUILT_IN_ROUND
):
14869 CASE_FLT_FN (BUILT_IN_SCALB
):
14870 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14871 CASE_FLT_FN (BUILT_IN_SCALBN
):
14872 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14873 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14874 CASE_FLT_FN (BUILT_IN_SINH
):
14875 CASE_FLT_FN (BUILT_IN_TANH
):
14876 CASE_FLT_FN (BUILT_IN_TRUNC
):
14877 /* True if the 1st argument is nonnegative. */
14878 return tree_expr_nonnegative_warnv_p (arg0
,
14879 strict_overflow_p
);
14881 CASE_FLT_FN (BUILT_IN_FMAX
):
14882 /* True if the 1st OR 2nd arguments are nonnegative. */
14883 return (tree_expr_nonnegative_warnv_p (arg0
,
14885 || (tree_expr_nonnegative_warnv_p (arg1
,
14886 strict_overflow_p
)));
14888 CASE_FLT_FN (BUILT_IN_FMIN
):
14889 /* True if the 1st AND 2nd arguments are nonnegative. */
14890 return (tree_expr_nonnegative_warnv_p (arg0
,
14892 && (tree_expr_nonnegative_warnv_p (arg1
,
14893 strict_overflow_p
)));
14895 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14896 /* True if the 2nd argument is nonnegative. */
14897 return tree_expr_nonnegative_warnv_p (arg1
,
14898 strict_overflow_p
);
14900 CASE_FLT_FN (BUILT_IN_POWI
):
14901 /* True if the 1st argument is nonnegative or the second
14902 argument is an even integer. */
14903 if (TREE_CODE (arg1
) == INTEGER_CST
14904 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14906 return tree_expr_nonnegative_warnv_p (arg0
,
14907 strict_overflow_p
);
14909 CASE_FLT_FN (BUILT_IN_POW
):
14910 /* True if the 1st argument is nonnegative or the second
14911 argument is an even integer valued real. */
14912 if (TREE_CODE (arg1
) == REAL_CST
)
14917 c
= TREE_REAL_CST (arg1
);
14918 n
= real_to_integer (&c
);
14921 REAL_VALUE_TYPE cint
;
14922 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14923 if (real_identical (&c
, &cint
))
14927 return tree_expr_nonnegative_warnv_p (arg0
,
14928 strict_overflow_p
);
14933 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14937 /* Return true if T is known to be non-negative. If the return
14938 value is based on the assumption that signed overflow is undefined,
14939 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14940 *STRICT_OVERFLOW_P. */
14943 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14945 enum tree_code code
= TREE_CODE (t
);
14946 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14953 tree temp
= TARGET_EXPR_SLOT (t
);
14954 t
= TARGET_EXPR_INITIAL (t
);
14956 /* If the initializer is non-void, then it's a normal expression
14957 that will be assigned to the slot. */
14958 if (!VOID_TYPE_P (t
))
14959 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14961 /* Otherwise, the initializer sets the slot in some way. One common
14962 way is an assignment statement at the end of the initializer. */
14965 if (TREE_CODE (t
) == BIND_EXPR
)
14966 t
= expr_last (BIND_EXPR_BODY (t
));
14967 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14968 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14969 t
= expr_last (TREE_OPERAND (t
, 0));
14970 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14975 if (TREE_CODE (t
) == MODIFY_EXPR
14976 && TREE_OPERAND (t
, 0) == temp
)
14977 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14978 strict_overflow_p
);
14985 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14986 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14988 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14989 get_callee_fndecl (t
),
14992 strict_overflow_p
);
14994 case COMPOUND_EXPR
:
14996 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14997 strict_overflow_p
);
14999 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15000 strict_overflow_p
);
15002 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15003 strict_overflow_p
);
15006 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15010 /* We don't know sign of `t', so be conservative and return false. */
15014 /* Return true if T is known to be non-negative. If the return
15015 value is based on the assumption that signed overflow is undefined,
15016 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15017 *STRICT_OVERFLOW_P. */
15020 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15022 enum tree_code code
;
15023 if (t
== error_mark_node
)
15026 code
= TREE_CODE (t
);
15027 switch (TREE_CODE_CLASS (code
))
15030 case tcc_comparison
:
15031 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15033 TREE_OPERAND (t
, 0),
15034 TREE_OPERAND (t
, 1),
15035 strict_overflow_p
);
15038 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15040 TREE_OPERAND (t
, 0),
15041 strict_overflow_p
);
15044 case tcc_declaration
:
15045 case tcc_reference
:
15046 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15054 case TRUTH_AND_EXPR
:
15055 case TRUTH_OR_EXPR
:
15056 case TRUTH_XOR_EXPR
:
15057 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15059 TREE_OPERAND (t
, 0),
15060 TREE_OPERAND (t
, 1),
15061 strict_overflow_p
);
15062 case TRUTH_NOT_EXPR
:
15063 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15065 TREE_OPERAND (t
, 0),
15066 strict_overflow_p
);
15073 case WITH_SIZE_EXPR
:
15075 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15078 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15082 /* Return true if `t' is known to be non-negative. Handle warnings
15083 about undefined signed overflow. */
15086 tree_expr_nonnegative_p (tree t
)
15088 bool ret
, strict_overflow_p
;
15090 strict_overflow_p
= false;
15091 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15092 if (strict_overflow_p
)
15093 fold_overflow_warning (("assuming signed overflow does not occur when "
15094 "determining that expression is always "
15096 WARN_STRICT_OVERFLOW_MISC
);
15101 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15102 For floating point we further ensure that T is not denormal.
15103 Similar logic is present in nonzero_address in rtlanal.h.
15105 If the return value is based on the assumption that signed overflow
15106 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15107 change *STRICT_OVERFLOW_P. */
15110 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15111 bool *strict_overflow_p
)
15116 return tree_expr_nonzero_warnv_p (op0
,
15117 strict_overflow_p
);
15121 tree inner_type
= TREE_TYPE (op0
);
15122 tree outer_type
= type
;
15124 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15125 && tree_expr_nonzero_warnv_p (op0
,
15126 strict_overflow_p
));
15130 case NON_LVALUE_EXPR
:
15131 return tree_expr_nonzero_warnv_p (op0
,
15132 strict_overflow_p
);
15141 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15142 For floating point we further ensure that T is not denormal.
15143 Similar logic is present in nonzero_address in rtlanal.h.
15145 If the return value is based on the assumption that signed overflow
15146 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15147 change *STRICT_OVERFLOW_P. */
15150 tree_binary_nonzero_warnv_p (enum tree_code code
,
15153 tree op1
, bool *strict_overflow_p
)
15155 bool sub_strict_overflow_p
;
15158 case POINTER_PLUS_EXPR
:
15160 if (TYPE_OVERFLOW_UNDEFINED (type
))
15162 /* With the presence of negative values it is hard
15163 to say something. */
15164 sub_strict_overflow_p
= false;
15165 if (!tree_expr_nonnegative_warnv_p (op0
,
15166 &sub_strict_overflow_p
)
15167 || !tree_expr_nonnegative_warnv_p (op1
,
15168 &sub_strict_overflow_p
))
15170 /* One of operands must be positive and the other non-negative. */
15171 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15172 overflows, on a twos-complement machine the sum of two
15173 nonnegative numbers can never be zero. */
15174 return (tree_expr_nonzero_warnv_p (op0
,
15176 || tree_expr_nonzero_warnv_p (op1
,
15177 strict_overflow_p
));
15182 if (TYPE_OVERFLOW_UNDEFINED (type
))
15184 if (tree_expr_nonzero_warnv_p (op0
,
15186 && tree_expr_nonzero_warnv_p (op1
,
15187 strict_overflow_p
))
15189 *strict_overflow_p
= true;
15196 sub_strict_overflow_p
= false;
15197 if (tree_expr_nonzero_warnv_p (op0
,
15198 &sub_strict_overflow_p
)
15199 && tree_expr_nonzero_warnv_p (op1
,
15200 &sub_strict_overflow_p
))
15202 if (sub_strict_overflow_p
)
15203 *strict_overflow_p
= true;
15208 sub_strict_overflow_p
= false;
15209 if (tree_expr_nonzero_warnv_p (op0
,
15210 &sub_strict_overflow_p
))
15212 if (sub_strict_overflow_p
)
15213 *strict_overflow_p
= true;
15215 /* When both operands are nonzero, then MAX must be too. */
15216 if (tree_expr_nonzero_warnv_p (op1
,
15217 strict_overflow_p
))
15220 /* MAX where operand 0 is positive is positive. */
15221 return tree_expr_nonnegative_warnv_p (op0
,
15222 strict_overflow_p
);
15224 /* MAX where operand 1 is positive is positive. */
15225 else if (tree_expr_nonzero_warnv_p (op1
,
15226 &sub_strict_overflow_p
)
15227 && tree_expr_nonnegative_warnv_p (op1
,
15228 &sub_strict_overflow_p
))
15230 if (sub_strict_overflow_p
)
15231 *strict_overflow_p
= true;
15237 return (tree_expr_nonzero_warnv_p (op1
,
15239 || tree_expr_nonzero_warnv_p (op0
,
15240 strict_overflow_p
));
15249 /* Return true when T is an address and is known to be nonzero.
15250 For floating point we further ensure that T is not denormal.
15251 Similar logic is present in nonzero_address in rtlanal.h.
15253 If the return value is based on the assumption that signed overflow
15254 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15255 change *STRICT_OVERFLOW_P. */
15258 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15260 bool sub_strict_overflow_p
;
15261 switch (TREE_CODE (t
))
15264 return !integer_zerop (t
);
15268 tree base
= TREE_OPERAND (t
, 0);
15270 if (!DECL_P (base
))
15271 base
= get_base_address (base
);
15276 /* For objects in symbol table check if we know they are non-zero.
15277 Don't do anything for variables and functions before symtab is built;
15278 it is quite possible that they will be declared weak later. */
15279 if (DECL_P (base
) && decl_in_symtab_p (base
))
15281 struct symtab_node
*symbol
;
15283 symbol
= symtab_node::get_create (base
);
15285 return symbol
->nonzero_address ();
15290 /* Function local objects are never NULL. */
15292 && (DECL_CONTEXT (base
)
15293 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15294 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15297 /* Constants are never weak. */
15298 if (CONSTANT_CLASS_P (base
))
15305 sub_strict_overflow_p
= false;
15306 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15307 &sub_strict_overflow_p
)
15308 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15309 &sub_strict_overflow_p
))
15311 if (sub_strict_overflow_p
)
15312 *strict_overflow_p
= true;
15323 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15324 attempt to fold the expression to a constant without modifying TYPE,
15327 If the expression could be simplified to a constant, then return
15328 the constant. If the expression would not be simplified to a
15329 constant, then return NULL_TREE. */
15332 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15334 tree tem
= fold_binary (code
, type
, op0
, op1
);
15335 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15338 /* Given the components of a unary expression CODE, TYPE and OP0,
15339 attempt to fold the expression to a constant without modifying
15342 If the expression could be simplified to a constant, then return
15343 the constant. If the expression would not be simplified to a
15344 constant, then return NULL_TREE. */
15347 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15349 tree tem
= fold_unary (code
, type
, op0
);
15350 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15353 /* If EXP represents referencing an element in a constant string
15354 (either via pointer arithmetic or array indexing), return the
15355 tree representing the value accessed, otherwise return NULL. */
15358 fold_read_from_constant_string (tree exp
)
15360 if ((TREE_CODE (exp
) == INDIRECT_REF
15361 || TREE_CODE (exp
) == ARRAY_REF
)
15362 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15364 tree exp1
= TREE_OPERAND (exp
, 0);
15367 location_t loc
= EXPR_LOCATION (exp
);
15369 if (TREE_CODE (exp
) == INDIRECT_REF
)
15370 string
= string_constant (exp1
, &index
);
15373 tree low_bound
= array_ref_low_bound (exp
);
15374 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15376 /* Optimize the special-case of a zero lower bound.
15378 We convert the low_bound to sizetype to avoid some problems
15379 with constant folding. (E.g. suppose the lower bound is 1,
15380 and its mode is QI. Without the conversion,l (ARRAY
15381 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15382 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15383 if (! integer_zerop (low_bound
))
15384 index
= size_diffop_loc (loc
, index
,
15385 fold_convert_loc (loc
, sizetype
, low_bound
));
15391 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15392 && TREE_CODE (string
) == STRING_CST
15393 && TREE_CODE (index
) == INTEGER_CST
15394 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15395 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15397 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15398 return build_int_cst_type (TREE_TYPE (exp
),
15399 (TREE_STRING_POINTER (string
)
15400 [TREE_INT_CST_LOW (index
)]));
15405 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15406 an integer constant, real, or fixed-point constant.
15408 TYPE is the type of the result. */
15411 fold_negate_const (tree arg0
, tree type
)
15413 tree t
= NULL_TREE
;
15415 switch (TREE_CODE (arg0
))
15420 wide_int val
= wi::neg (arg0
, &overflow
);
15421 t
= force_fit_type (type
, val
, 1,
15422 (overflow
| TREE_OVERFLOW (arg0
))
15423 && !TYPE_UNSIGNED (type
));
15428 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15433 FIXED_VALUE_TYPE f
;
15434 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15435 &(TREE_FIXED_CST (arg0
)), NULL
,
15436 TYPE_SATURATING (type
));
15437 t
= build_fixed (type
, f
);
15438 /* Propagate overflow flags. */
15439 if (overflow_p
| TREE_OVERFLOW (arg0
))
15440 TREE_OVERFLOW (t
) = 1;
15445 gcc_unreachable ();
15451 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15452 an integer constant or real constant.
15454 TYPE is the type of the result. */
15457 fold_abs_const (tree arg0
, tree type
)
15459 tree t
= NULL_TREE
;
15461 switch (TREE_CODE (arg0
))
15465 /* If the value is unsigned or non-negative, then the absolute value
15466 is the same as the ordinary value. */
15467 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
15470 /* If the value is negative, then the absolute value is
15475 wide_int val
= wi::neg (arg0
, &overflow
);
15476 t
= force_fit_type (type
, val
, -1,
15477 overflow
| TREE_OVERFLOW (arg0
));
15483 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15484 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15490 gcc_unreachable ();
15496 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15497 constant. TYPE is the type of the result. */
15500 fold_not_const (const_tree arg0
, tree type
)
15502 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15504 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
15507 /* Given CODE, a relational operator, the target type, TYPE and two
15508 constant operands OP0 and OP1, return the result of the
15509 relational operation. If the result is not a compile time
15510 constant, then return NULL_TREE. */
15513 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15515 int result
, invert
;
15517 /* From here on, the only cases we handle are when the result is
15518 known to be a constant. */
15520 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15522 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15523 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15525 /* Handle the cases where either operand is a NaN. */
15526 if (real_isnan (c0
) || real_isnan (c1
))
15536 case UNORDERED_EXPR
:
15550 if (flag_trapping_math
)
15556 gcc_unreachable ();
15559 return constant_boolean_node (result
, type
);
15562 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15565 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15567 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15568 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15569 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15572 /* Handle equality/inequality of complex constants. */
15573 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15575 tree rcond
= fold_relational_const (code
, type
,
15576 TREE_REALPART (op0
),
15577 TREE_REALPART (op1
));
15578 tree icond
= fold_relational_const (code
, type
,
15579 TREE_IMAGPART (op0
),
15580 TREE_IMAGPART (op1
));
15581 if (code
== EQ_EXPR
)
15582 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15583 else if (code
== NE_EXPR
)
15584 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15589 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15591 unsigned count
= VECTOR_CST_NELTS (op0
);
15592 tree
*elts
= XALLOCAVEC (tree
, count
);
15593 gcc_assert (VECTOR_CST_NELTS (op1
) == count
15594 && TYPE_VECTOR_SUBPARTS (type
) == count
);
15596 for (unsigned i
= 0; i
< count
; i
++)
15598 tree elem_type
= TREE_TYPE (type
);
15599 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15600 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15602 tree tem
= fold_relational_const (code
, elem_type
,
15605 if (tem
== NULL_TREE
)
15608 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
15611 return build_vector (type
, elts
);
15614 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15616 To compute GT, swap the arguments and do LT.
15617 To compute GE, do LT and invert the result.
15618 To compute LE, swap the arguments, do LT and invert the result.
15619 To compute NE, do EQ and invert the result.
15621 Therefore, the code below must handle only EQ and LT. */
15623 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15628 code
= swap_tree_comparison (code
);
15631 /* Note that it is safe to invert for real values here because we
15632 have already handled the one case that it matters. */
15635 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15638 code
= invert_tree_comparison (code
, false);
15641 /* Compute a result for LT or EQ if args permit;
15642 Otherwise return T. */
15643 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15645 if (code
== EQ_EXPR
)
15646 result
= tree_int_cst_equal (op0
, op1
);
15648 result
= tree_int_cst_lt (op0
, op1
);
15655 return constant_boolean_node (result
, type
);
15658 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15659 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15663 fold_build_cleanup_point_expr (tree type
, tree expr
)
15665 /* If the expression does not have side effects then we don't have to wrap
15666 it with a cleanup point expression. */
15667 if (!TREE_SIDE_EFFECTS (expr
))
15670 /* If the expression is a return, check to see if the expression inside the
15671 return has no side effects or the right hand side of the modify expression
15672 inside the return. If either don't have side effects set we don't need to
15673 wrap the expression in a cleanup point expression. Note we don't check the
15674 left hand side of the modify because it should always be a return decl. */
15675 if (TREE_CODE (expr
) == RETURN_EXPR
)
15677 tree op
= TREE_OPERAND (expr
, 0);
15678 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15680 op
= TREE_OPERAND (op
, 1);
15681 if (!TREE_SIDE_EFFECTS (op
))
15685 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15688 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15689 of an indirection through OP0, or NULL_TREE if no simplification is
15693 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15699 subtype
= TREE_TYPE (sub
);
15700 if (!POINTER_TYPE_P (subtype
))
15703 if (TREE_CODE (sub
) == ADDR_EXPR
)
15705 tree op
= TREE_OPERAND (sub
, 0);
15706 tree optype
= TREE_TYPE (op
);
15707 /* *&CONST_DECL -> to the value of the const decl. */
15708 if (TREE_CODE (op
) == CONST_DECL
)
15709 return DECL_INITIAL (op
);
15710 /* *&p => p; make sure to handle *&"str"[cst] here. */
15711 if (type
== optype
)
15713 tree fop
= fold_read_from_constant_string (op
);
15719 /* *(foo *)&fooarray => fooarray[0] */
15720 else if (TREE_CODE (optype
) == ARRAY_TYPE
15721 && type
== TREE_TYPE (optype
)
15722 && (!in_gimple_form
15723 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15725 tree type_domain
= TYPE_DOMAIN (optype
);
15726 tree min_val
= size_zero_node
;
15727 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15728 min_val
= TYPE_MIN_VALUE (type_domain
);
15730 && TREE_CODE (min_val
) != INTEGER_CST
)
15732 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15733 NULL_TREE
, NULL_TREE
);
15735 /* *(foo *)&complexfoo => __real__ complexfoo */
15736 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15737 && type
== TREE_TYPE (optype
))
15738 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15739 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15740 else if (TREE_CODE (optype
) == VECTOR_TYPE
15741 && type
== TREE_TYPE (optype
))
15743 tree part_width
= TYPE_SIZE (type
);
15744 tree index
= bitsize_int (0);
15745 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15749 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15750 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15752 tree op00
= TREE_OPERAND (sub
, 0);
15753 tree op01
= TREE_OPERAND (sub
, 1);
15756 if (TREE_CODE (op00
) == ADDR_EXPR
)
15759 op00
= TREE_OPERAND (op00
, 0);
15760 op00type
= TREE_TYPE (op00
);
15762 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15763 if (TREE_CODE (op00type
) == VECTOR_TYPE
15764 && type
== TREE_TYPE (op00type
))
15766 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
15767 tree part_width
= TYPE_SIZE (type
);
15768 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
15769 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15770 tree index
= bitsize_int (indexi
);
15772 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
15773 return fold_build3_loc (loc
,
15774 BIT_FIELD_REF
, type
, op00
,
15775 part_width
, index
);
15778 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15779 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15780 && type
== TREE_TYPE (op00type
))
15782 tree size
= TYPE_SIZE_UNIT (type
);
15783 if (tree_int_cst_equal (size
, op01
))
15784 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15786 /* ((foo *)&fooarray)[1] => fooarray[1] */
15787 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15788 && type
== TREE_TYPE (op00type
))
15790 tree type_domain
= TYPE_DOMAIN (op00type
);
15791 tree min_val
= size_zero_node
;
15792 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15793 min_val
= TYPE_MIN_VALUE (type_domain
);
15794 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15795 TYPE_SIZE_UNIT (type
));
15796 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15797 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15798 NULL_TREE
, NULL_TREE
);
15803 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15804 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15805 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15806 && (!in_gimple_form
15807 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15810 tree min_val
= size_zero_node
;
15811 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15812 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15813 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15814 min_val
= TYPE_MIN_VALUE (type_domain
);
15816 && TREE_CODE (min_val
) != INTEGER_CST
)
15818 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15825 /* Builds an expression for an indirection through T, simplifying some
15829 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15831 tree type
= TREE_TYPE (TREE_TYPE (t
));
15832 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15837 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15840 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15843 fold_indirect_ref_loc (location_t loc
, tree t
)
15845 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15853 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15854 whose result is ignored. The type of the returned tree need not be
15855 the same as the original expression. */
15858 fold_ignored_result (tree t
)
15860 if (!TREE_SIDE_EFFECTS (t
))
15861 return integer_zero_node
;
15864 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15867 t
= TREE_OPERAND (t
, 0);
15871 case tcc_comparison
:
15872 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15873 t
= TREE_OPERAND (t
, 0);
15874 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15875 t
= TREE_OPERAND (t
, 1);
15880 case tcc_expression
:
15881 switch (TREE_CODE (t
))
15883 case COMPOUND_EXPR
:
15884 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15886 t
= TREE_OPERAND (t
, 0);
15890 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15891 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15893 t
= TREE_OPERAND (t
, 0);
15906 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15909 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15911 tree div
= NULL_TREE
;
15916 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15917 have to do anything. Only do this when we are not given a const,
15918 because in that case, this check is more expensive than just
15920 if (TREE_CODE (value
) != INTEGER_CST
)
15922 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15924 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15928 /* If divisor is a power of two, simplify this to bit manipulation. */
15929 if (divisor
== (divisor
& -divisor
))
15931 if (TREE_CODE (value
) == INTEGER_CST
)
15933 wide_int val
= value
;
15936 if ((val
& (divisor
- 1)) == 0)
15939 overflow_p
= TREE_OVERFLOW (value
);
15940 val
&= ~(divisor
- 1);
15945 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
15951 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15952 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15953 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15954 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15960 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15961 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15962 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15968 /* Likewise, but round down. */
15971 round_down_loc (location_t loc
, tree value
, int divisor
)
15973 tree div
= NULL_TREE
;
15975 gcc_assert (divisor
> 0);
15979 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15980 have to do anything. Only do this when we are not given a const,
15981 because in that case, this check is more expensive than just
15983 if (TREE_CODE (value
) != INTEGER_CST
)
15985 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15987 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15991 /* If divisor is a power of two, simplify this to bit manipulation. */
15992 if (divisor
== (divisor
& -divisor
))
15996 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15997 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16002 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16003 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16004 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16010 /* Returns the pointer to the base of the object addressed by EXP and
16011 extracts the information about the offset of the access, storing it
16012 to PBITPOS and POFFSET. */
16015 split_address_to_core_and_offset (tree exp
,
16016 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16020 int unsignedp
, volatilep
;
16021 HOST_WIDE_INT bitsize
;
16022 location_t loc
= EXPR_LOCATION (exp
);
16024 if (TREE_CODE (exp
) == ADDR_EXPR
)
16026 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16027 poffset
, &mode
, &unsignedp
, &volatilep
,
16029 core
= build_fold_addr_expr_loc (loc
, core
);
16035 *poffset
= NULL_TREE
;
16041 /* Returns true if addresses of E1 and E2 differ by a constant, false
16042 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16045 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16048 HOST_WIDE_INT bitpos1
, bitpos2
;
16049 tree toffset1
, toffset2
, tdiff
, type
;
16051 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16052 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16054 if (bitpos1
% BITS_PER_UNIT
!= 0
16055 || bitpos2
% BITS_PER_UNIT
!= 0
16056 || !operand_equal_p (core1
, core2
, 0))
16059 if (toffset1
&& toffset2
)
16061 type
= TREE_TYPE (toffset1
);
16062 if (type
!= TREE_TYPE (toffset2
))
16063 toffset2
= fold_convert (type
, toffset2
);
16065 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16066 if (!cst_and_fits_in_hwi (tdiff
))
16069 *diff
= int_cst_value (tdiff
);
16071 else if (toffset1
|| toffset2
)
16073 /* If only one of the offsets is non-constant, the difference cannot
16080 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16084 /* Simplify the floating point expression EXP when the sign of the
16085 result is not significant. Return NULL_TREE if no simplification
16089 fold_strip_sign_ops (tree exp
)
16092 location_t loc
= EXPR_LOCATION (exp
);
16094 switch (TREE_CODE (exp
))
16098 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16099 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16103 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
16105 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16106 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16107 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16108 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16109 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16110 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16113 case COMPOUND_EXPR
:
16114 arg0
= TREE_OPERAND (exp
, 0);
16115 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16117 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16121 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16122 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16124 return fold_build3_loc (loc
,
16125 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16126 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16127 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16132 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16135 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16136 /* Strip copysign function call, return the 1st argument. */
16137 arg0
= CALL_EXPR_ARG (exp
, 0);
16138 arg1
= CALL_EXPR_ARG (exp
, 1);
16139 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16142 /* Strip sign ops from the argument of "odd" math functions. */
16143 if (negate_mathfn_p (fcode
))
16145 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16147 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);