1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
66 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
73 #include "gimple-expr.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
81 #include "plugin-api.h"
84 #include "generic-match.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
88 int folding_initializer
= 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code
{
112 static bool negate_mathfn_p (enum built_in_function
);
113 static bool negate_expr_p (tree
);
114 static tree
negate_expr (tree
);
115 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
116 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
117 static tree
const_binop (enum tree_code
, tree
, tree
);
118 static enum comparison_code
comparison_to_compcode (enum tree_code
);
119 static enum tree_code
compcode_to_comparison (enum comparison_code
);
120 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
121 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
122 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
123 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
124 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
125 static tree
make_bit_field_ref (location_t
, tree
, tree
,
126 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
127 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
129 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
131 machine_mode
*, int *, int *,
133 static tree
sign_bit_p (tree
, const_tree
);
134 static int simple_operand_p (const_tree
);
135 static bool simple_operand_p_2 (tree
);
136 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
137 static tree
range_predecessor (tree
);
138 static tree
range_successor (tree
);
139 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
140 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
141 static tree
unextend (tree
, int, int, tree
);
142 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
144 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
145 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
146 static tree
fold_binary_op_with_conditional_arg (location_t
,
147 enum tree_code
, tree
,
150 static tree
fold_mathfn_compare (location_t
,
151 enum built_in_function
, enum tree_code
,
153 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
154 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
155 static bool reorder_operands_p (const_tree
, const_tree
);
156 static tree
fold_negate_const (tree
, tree
);
157 static tree
fold_not_const (const_tree
, tree
);
158 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
159 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
165 expr_location_or (tree t
, location_t loc
)
167 location_t tloc
= EXPR_LOCATION (t
);
168 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
175 protected_set_expr_location_unshare (tree x
, location_t loc
)
177 if (CAN_HAVE_LOCATION_P (x
)
178 && EXPR_LOCATION (x
) != loc
179 && !(TREE_CODE (x
) == SAVE_EXPR
180 || TREE_CODE (x
) == TARGET_EXPR
181 || TREE_CODE (x
) == BIND_EXPR
))
184 SET_EXPR_LOCATION (x
, loc
);
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
194 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
198 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
200 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
214 static int fold_deferring_overflow_warnings
;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning
;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings
;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
247 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
252 gcc_assert (fold_deferring_overflow_warnings
> 0);
253 --fold_deferring_overflow_warnings
;
254 if (fold_deferring_overflow_warnings
> 0)
256 if (fold_deferred_overflow_warning
!= NULL
258 && code
< (int) fold_deferred_overflow_code
)
259 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
263 warnmsg
= fold_deferred_overflow_warning
;
264 fold_deferred_overflow_warning
= NULL
;
266 if (!issue
|| warnmsg
== NULL
)
269 if (gimple_no_warning_p (stmt
))
272 /* Use the smallest code level when deciding to issue the
274 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
275 code
= fold_deferred_overflow_code
;
277 if (!issue_strict_overflow_warning (code
))
281 locus
= input_location
;
283 locus
= gimple_location (stmt
);
284 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
287 /* Stop deferring overflow warnings, ignoring any deferred
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL
, 0);
296 /* Whether we are deferring overflow warnings. */
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings
> 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
308 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
310 if (fold_deferring_overflow_warnings
> 0)
312 if (fold_deferred_overflow_warning
== NULL
313 || wc
< fold_deferred_overflow_code
)
315 fold_deferred_overflow_warning
= gmsgid
;
316 fold_deferred_overflow_code
= wc
;
319 else if (issue_strict_overflow_warning (wc
))
320 warning (OPT_Wstrict_overflow
, gmsgid
);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
327 negate_mathfn_p (enum built_in_function code
)
331 CASE_FLT_FN (BUILT_IN_ASIN
):
332 CASE_FLT_FN (BUILT_IN_ASINH
):
333 CASE_FLT_FN (BUILT_IN_ATAN
):
334 CASE_FLT_FN (BUILT_IN_ATANH
):
335 CASE_FLT_FN (BUILT_IN_CASIN
):
336 CASE_FLT_FN (BUILT_IN_CASINH
):
337 CASE_FLT_FN (BUILT_IN_CATAN
):
338 CASE_FLT_FN (BUILT_IN_CATANH
):
339 CASE_FLT_FN (BUILT_IN_CBRT
):
340 CASE_FLT_FN (BUILT_IN_CPROJ
):
341 CASE_FLT_FN (BUILT_IN_CSIN
):
342 CASE_FLT_FN (BUILT_IN_CSINH
):
343 CASE_FLT_FN (BUILT_IN_CTAN
):
344 CASE_FLT_FN (BUILT_IN_CTANH
):
345 CASE_FLT_FN (BUILT_IN_ERF
):
346 CASE_FLT_FN (BUILT_IN_LLROUND
):
347 CASE_FLT_FN (BUILT_IN_LROUND
):
348 CASE_FLT_FN (BUILT_IN_ROUND
):
349 CASE_FLT_FN (BUILT_IN_SIN
):
350 CASE_FLT_FN (BUILT_IN_SINH
):
351 CASE_FLT_FN (BUILT_IN_TAN
):
352 CASE_FLT_FN (BUILT_IN_TANH
):
353 CASE_FLT_FN (BUILT_IN_TRUNC
):
356 CASE_FLT_FN (BUILT_IN_LLRINT
):
357 CASE_FLT_FN (BUILT_IN_LRINT
):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
359 CASE_FLT_FN (BUILT_IN_RINT
):
360 return !flag_rounding_math
;
368 /* Check whether we may negate an integer constant T without causing
372 may_negate_without_overflow_p (const_tree t
)
376 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
378 type
= TREE_TYPE (t
);
379 if (TYPE_UNSIGNED (type
))
382 return !wi::only_sign_bit_p (t
);
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t
)
396 type
= TREE_TYPE (t
);
399 switch (TREE_CODE (t
))
402 if (TYPE_OVERFLOW_WRAPS (type
))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t
);
408 return (INTEGRAL_TYPE_P (type
)
409 && TYPE_OVERFLOW_WRAPS (type
));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
421 return negate_expr_p (TREE_REALPART (t
))
422 && negate_expr_p (TREE_IMAGPART (t
));
426 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
429 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
431 for (i
= 0; i
< count
; i
++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
439 return negate_expr_p (TREE_OPERAND (t
, 0))
440 && negate_expr_p (TREE_OPERAND (t
, 1));
443 return negate_expr_p (TREE_OPERAND (t
, 0));
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t
, 1))
451 && reorder_operands_p (TREE_OPERAND (t
, 0),
452 TREE_OPERAND (t
, 1)))
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t
, 0));
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
461 && reorder_operands_p (TREE_OPERAND (t
, 0),
462 TREE_OPERAND (t
, 1));
465 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
472 return negate_expr_p (TREE_OPERAND (t
, 1))
473 || negate_expr_p (TREE_OPERAND (t
, 0));
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
499 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
501 return negate_expr_p (TREE_OPERAND (t
, 1));
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type
) == REAL_TYPE
)
507 tree tem
= strip_float_extensions (t
);
509 return negate_expr_p (tem
);
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t
)))
516 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
523 tree op1
= TREE_OPERAND (t
, 1);
524 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
541 fold_negate_expr (location_t loc
, tree t
)
543 tree type
= TREE_TYPE (t
);
546 switch (TREE_CODE (t
))
548 /* Convert - (~A) to A + 1. */
550 if (INTEGRAL_TYPE_P (type
))
551 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
552 build_one_cst (type
));
556 tem
= fold_negate_const (t
, type
);
557 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
558 || !TYPE_OVERFLOW_TRAPS (type
))
563 tem
= fold_negate_const (t
, type
);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
570 tem
= fold_negate_const (t
, type
);
575 tree rpart
= negate_expr (TREE_REALPART (t
));
576 tree ipart
= negate_expr (TREE_IMAGPART (t
));
578 if ((TREE_CODE (rpart
) == REAL_CST
579 && TREE_CODE (ipart
) == REAL_CST
)
580 || (TREE_CODE (rpart
) == INTEGER_CST
581 && TREE_CODE (ipart
) == INTEGER_CST
))
582 return build_complex (type
, rpart
, ipart
);
588 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
589 tree
*elts
= XALLOCAVEC (tree
, count
);
591 for (i
= 0; i
< count
; i
++)
593 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
594 if (elts
[i
] == NULL_TREE
)
598 return build_vector (type
, elts
);
602 if (negate_expr_p (t
))
603 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
604 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
605 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
609 if (negate_expr_p (t
))
610 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
611 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
615 return TREE_OPERAND (t
, 0);
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t
, 1))
623 && reorder_operands_p (TREE_OPERAND (t
, 0),
624 TREE_OPERAND (t
, 1)))
626 tem
= negate_expr (TREE_OPERAND (t
, 1));
627 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
628 tem
, TREE_OPERAND (t
, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t
, 0)))
634 tem
= negate_expr (TREE_OPERAND (t
, 0));
635 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
636 tem
, TREE_OPERAND (t
, 1));
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
645 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
646 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
647 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
651 if (TYPE_UNSIGNED (type
))
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
659 tem
= TREE_OPERAND (t
, 1);
660 if (negate_expr_p (tem
))
661 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
662 TREE_OPERAND (t
, 0), negate_expr (tem
));
663 tem
= TREE_OPERAND (t
, 0);
664 if (negate_expr_p (tem
))
665 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
666 negate_expr (tem
), TREE_OPERAND (t
, 1));
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
678 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
680 const char * const warnmsg
= G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem
= TREE_OPERAND (t
, 1);
683 if (negate_expr_p (tem
))
685 if (INTEGRAL_TYPE_P (type
)
686 && (TREE_CODE (tem
) != INTEGER_CST
687 || integer_onep (tem
)))
688 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
689 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
690 TREE_OPERAND (t
, 0), negate_expr (tem
));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem
= TREE_OPERAND (t
, 0);
699 if ((INTEGRAL_TYPE_P (type
)
700 && (TREE_CODE (tem
) == NEGATE_EXPR
701 || (TREE_CODE (tem
) == INTEGER_CST
702 && may_negate_without_overflow_p (tem
))))
703 || !INTEGRAL_TYPE_P (type
))
704 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
705 negate_expr (tem
), TREE_OPERAND (t
, 1));
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type
) == REAL_TYPE
)
713 tem
= strip_float_extensions (t
);
714 if (tem
!= t
&& negate_expr_p (tem
))
715 return fold_convert_loc (loc
, type
, negate_expr (tem
));
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t
))
722 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
726 fndecl
= get_callee_fndecl (t
);
727 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
728 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
736 tree op1
= TREE_OPERAND (t
, 1);
737 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
739 tree ntype
= TYPE_UNSIGNED (type
)
740 ? signed_type_for (type
)
741 : unsigned_type_for (type
);
742 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
743 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
744 return fold_convert_loc (loc
, type
, temp
);
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
769 loc
= EXPR_LOCATION (t
);
770 type
= TREE_TYPE (t
);
773 tem
= fold_negate_expr (loc
, t
);
775 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
776 return fold_convert_loc (loc
, type
, tem
);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
800 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
801 tree
*minus_litp
, int negate_p
)
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in
);
812 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
813 || TREE_CODE (in
) == FIXED_CST
)
815 else if (TREE_CODE (in
) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
823 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
825 tree op0
= TREE_OPERAND (in
, 0);
826 tree op1
= TREE_OPERAND (in
, 1);
827 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
828 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
832 || TREE_CODE (op0
) == FIXED_CST
)
833 *litp
= op0
, op0
= 0;
834 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
835 || TREE_CODE (op1
) == FIXED_CST
)
836 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
838 if (op0
!= 0 && TREE_CONSTANT (op0
))
839 *conp
= op0
, op0
= 0;
840 else if (op1
!= 0 && TREE_CONSTANT (op1
))
841 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0
!= 0 && op1
!= 0)
850 var
= op1
, neg_var_p
= neg1_p
;
852 /* Now do any needed negations. */
854 *minus_litp
= *litp
, *litp
= 0;
856 *conp
= negate_expr (*conp
);
858 var
= negate_expr (var
);
860 else if (TREE_CODE (in
) == BIT_NOT_EXPR
861 && code
== PLUS_EXPR
)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp
= build_one_cst (TREE_TYPE (in
));
865 var
= negate_expr (TREE_OPERAND (in
, 0));
867 else if (TREE_CONSTANT (in
))
875 *minus_litp
= *litp
, *litp
= 0;
876 else if (*minus_litp
)
877 *litp
= *minus_litp
, *minus_litp
= 0;
878 *conp
= negate_expr (*conp
);
879 var
= negate_expr (var
);
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
891 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
902 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
904 if (code
== PLUS_EXPR
)
906 if (TREE_CODE (t1
) == NEGATE_EXPR
)
907 return build2_loc (loc
, MINUS_EXPR
, type
,
908 fold_convert_loc (loc
, type
, t2
),
909 fold_convert_loc (loc
, type
,
910 TREE_OPERAND (t1
, 0)));
911 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
912 return build2_loc (loc
, MINUS_EXPR
, type
,
913 fold_convert_loc (loc
, type
, t1
),
914 fold_convert_loc (loc
, type
,
915 TREE_OPERAND (t2
, 0)));
916 else if (integer_zerop (t2
))
917 return fold_convert_loc (loc
, type
, t1
);
919 else if (code
== MINUS_EXPR
)
921 if (integer_zerop (t2
))
922 return fold_convert_loc (loc
, type
, t1
);
925 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
926 fold_convert_loc (loc
, type
, t2
));
929 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
930 fold_convert_loc (loc
, type
, t2
));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
937 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
939 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
941 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
956 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
957 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
958 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
967 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
972 tree type
= TREE_TYPE (arg1
);
973 signop sign
= TYPE_SIGN (type
);
974 bool overflow
= false;
976 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
977 TYPE_SIGN (TREE_TYPE (parg2
)));
982 res
= wi::bit_or (arg1
, arg2
);
986 res
= wi::bit_xor (arg1
, arg2
);
990 res
= wi::bit_and (arg1
, arg2
);
995 if (wi::neg_p (arg2
))
998 if (code
== RSHIFT_EXPR
)
1004 if (code
== RSHIFT_EXPR
)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res
= wi::rshift (arg1
, arg2
, sign
);
1010 res
= wi::lshift (arg1
, arg2
);
1015 if (wi::neg_p (arg2
))
1018 if (code
== RROTATE_EXPR
)
1019 code
= LROTATE_EXPR
;
1021 code
= RROTATE_EXPR
;
1024 if (code
== RROTATE_EXPR
)
1025 res
= wi::rrotate (arg1
, arg2
);
1027 res
= wi::lrotate (arg1
, arg2
);
1031 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1035 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1039 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1042 case MULT_HIGHPART_EXPR
:
1043 res
= wi::mul_high (arg1
, arg2
, sign
);
1046 case TRUNC_DIV_EXPR
:
1047 case EXACT_DIV_EXPR
:
1050 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1053 case FLOOR_DIV_EXPR
:
1056 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1062 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1065 case ROUND_DIV_EXPR
:
1068 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1071 case TRUNC_MOD_EXPR
:
1074 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1077 case FLOOR_MOD_EXPR
:
1080 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1086 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1089 case ROUND_MOD_EXPR
:
1092 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1096 res
= wi::min (arg1
, arg2
, sign
);
1100 res
= wi::max (arg1
, arg2
, sign
);
1107 t
= force_fit_type (type
, res
, overflowable
,
1108 (((sign
== SIGNED
|| overflowable
== -1)
1110 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1116 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1118 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1127 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1129 /* Sanity check for the recursive cases. */
1136 if (TREE_CODE (arg1
) == INTEGER_CST
)
1137 return int_const_binop (code
, arg1
, arg2
);
1139 if (TREE_CODE (arg1
) == REAL_CST
)
1144 REAL_VALUE_TYPE value
;
1145 REAL_VALUE_TYPE result
;
1149 /* The following codes are handled by real_arithmetic. */
1164 d1
= TREE_REAL_CST (arg1
);
1165 d2
= TREE_REAL_CST (arg2
);
1167 type
= TREE_TYPE (arg1
);
1168 mode
= TYPE_MODE (type
);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode
)
1173 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code
== RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2
, dconst0
)
1180 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1
))
1187 else if (REAL_VALUE_ISNAN (d2
))
1190 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1191 real_convert (&result
, mode
, &value
);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode
)
1197 && REAL_VALUE_ISINF (result
)
1198 && !REAL_VALUE_ISINF (d1
)
1199 && !REAL_VALUE_ISINF (d2
))
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1208 && (inexact
|| !real_identical (&result
, &value
)))
1211 t
= build_real (type
, result
);
1213 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1217 if (TREE_CODE (arg1
) == FIXED_CST
)
1219 FIXED_VALUE_TYPE f1
;
1220 FIXED_VALUE_TYPE f2
;
1221 FIXED_VALUE_TYPE result
;
1226 /* The following codes are handled by fixed_arithmetic. */
1232 case TRUNC_DIV_EXPR
:
1233 f2
= TREE_FIXED_CST (arg2
);
1240 f2
.data
.high
= w2
.elt (1);
1241 f2
.data
.low
= w2
.elt (0);
1250 f1
= TREE_FIXED_CST (arg1
);
1251 type
= TREE_TYPE (arg1
);
1252 sat_p
= TYPE_SATURATING (type
);
1253 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1254 t
= build_fixed (type
, result
);
1255 /* Propagate overflow flags. */
1256 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1257 TREE_OVERFLOW (t
) = 1;
1261 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1263 tree type
= TREE_TYPE (arg1
);
1264 tree r1
= TREE_REALPART (arg1
);
1265 tree i1
= TREE_IMAGPART (arg1
);
1266 tree r2
= TREE_REALPART (arg2
);
1267 tree i2
= TREE_IMAGPART (arg2
);
1274 real
= const_binop (code
, r1
, r2
);
1275 imag
= const_binop (code
, i1
, i2
);
1279 if (COMPLEX_FLOAT_TYPE_P (type
))
1280 return do_mpc_arg2 (arg1
, arg2
, type
,
1281 /* do_nonfinite= */ folding_initializer
,
1284 real
= const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, r1
, r2
),
1286 const_binop (MULT_EXPR
, i1
, i2
));
1287 imag
= const_binop (PLUS_EXPR
,
1288 const_binop (MULT_EXPR
, r1
, i2
),
1289 const_binop (MULT_EXPR
, i1
, r2
));
1293 if (COMPLEX_FLOAT_TYPE_P (type
))
1294 return do_mpc_arg2 (arg1
, arg2
, type
,
1295 /* do_nonfinite= */ folding_initializer
,
1298 case TRUNC_DIV_EXPR
:
1300 case FLOOR_DIV_EXPR
:
1301 case ROUND_DIV_EXPR
:
1302 if (flag_complex_method
== 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1312 = const_binop (PLUS_EXPR
,
1313 const_binop (MULT_EXPR
, r2
, r2
),
1314 const_binop (MULT_EXPR
, i2
, i2
));
1316 = const_binop (PLUS_EXPR
,
1317 const_binop (MULT_EXPR
, r1
, r2
),
1318 const_binop (MULT_EXPR
, i1
, i2
));
1320 = const_binop (MINUS_EXPR
,
1321 const_binop (MULT_EXPR
, i1
, r2
),
1322 const_binop (MULT_EXPR
, r1
, i2
));
1324 real
= const_binop (code
, t1
, magsquared
);
1325 imag
= const_binop (code
, t2
, magsquared
);
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1335 fold_abs_const (r2
, TREE_TYPE (type
)),
1336 fold_abs_const (i2
, TREE_TYPE (type
)));
1338 if (integer_nonzerop (compare
))
1340 /* In the TRUE branch, we compute
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1347 tree ratio
= const_binop (code
, r2
, i2
);
1348 tree div
= const_binop (PLUS_EXPR
, i2
,
1349 const_binop (MULT_EXPR
, r2
, ratio
));
1350 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1351 real
= const_binop (PLUS_EXPR
, real
, i1
);
1352 real
= const_binop (code
, real
, div
);
1354 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1355 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1356 imag
= const_binop (code
, imag
, div
);
1360 /* In the FALSE branch, we compute
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1367 tree ratio
= const_binop (code
, i2
, r2
);
1368 tree div
= const_binop (PLUS_EXPR
, r2
,
1369 const_binop (MULT_EXPR
, i2
, ratio
));
1371 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1372 real
= const_binop (PLUS_EXPR
, real
, r1
);
1373 real
= const_binop (code
, real
, div
);
1375 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1376 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1377 imag
= const_binop (code
, imag
, div
);
1387 return build_complex (type
, real
, imag
);
1390 if (TREE_CODE (arg1
) == VECTOR_CST
1391 && TREE_CODE (arg2
) == VECTOR_CST
)
1393 tree type
= TREE_TYPE (arg1
);
1394 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1395 tree
*elts
= XALLOCAVEC (tree
, count
);
1397 for (i
= 0; i
< count
; i
++)
1399 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1400 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1402 elts
[i
] = const_binop (code
, elem1
, elem2
);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts
[i
] == NULL_TREE
)
1410 return build_vector (type
, elts
);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1
) == VECTOR_CST
1415 && TREE_CODE (arg2
) == INTEGER_CST
)
1417 tree type
= TREE_TYPE (arg1
);
1418 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1419 tree
*elts
= XALLOCAVEC (tree
, count
);
1421 if (code
== VEC_RSHIFT_EXPR
)
1423 if (!tree_fits_uhwi_p (arg2
))
1426 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1427 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1428 unsigned HOST_WIDE_INT innerc
1429 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1430 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1432 int offset
= shiftc
/ innerc
;
1433 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1434 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1435 vector element, but last element if BYTES_BIG_ENDIAN. */
1436 if (BYTES_BIG_ENDIAN
)
1438 tree zero
= build_zero_cst (TREE_TYPE (type
));
1439 for (i
= 0; i
< count
; i
++)
1441 if (i
+ offset
< 0 || i
+ offset
>= count
)
1444 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1448 for (i
= 0; i
< count
; i
++)
1450 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1452 elts
[i
] = const_binop (code
, elem1
, arg2
);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE */
1456 if (elts
[i
] == NULL_TREE
)
1460 return build_vector (type
, elts
);
1465 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1466 indicates which particular sizetype to create. */
1469 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1471 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1474 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1475 is a tree code. The type of the result is taken from the operands.
1476 Both must be equivalent integer types, ala int_binop_types_match_p.
1477 If the operands are constant, so is the result. */
1480 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1482 tree type
= TREE_TYPE (arg0
);
1484 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1485 return error_mark_node
;
1487 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1493 /* And some specific cases even faster than that. */
1494 if (code
== PLUS_EXPR
)
1496 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1498 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1501 else if (code
== MINUS_EXPR
)
1503 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1506 else if (code
== MULT_EXPR
)
1508 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1512 /* Handle general case of two integer constants. For sizetype
1513 constant calculations we always want to know about overflow,
1514 even in the unsigned case. */
1515 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1518 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1521 /* Given two values, either both of sizetype or both of bitsizetype,
1522 compute the difference between the two values. Return the value
1523 in signed type corresponding to the type of the operands. */
1526 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1528 tree type
= TREE_TYPE (arg0
);
1531 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1534 /* If the type is already signed, just do the simple thing. */
1535 if (!TYPE_UNSIGNED (type
))
1536 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1538 if (type
== sizetype
)
1540 else if (type
== bitsizetype
)
1541 ctype
= sbitsizetype
;
1543 ctype
= signed_type_for (type
);
1545 /* If either operand is not a constant, do the conversions to the signed
1546 type and subtract. The hardware will do the right thing with any
1547 overflow in the subtraction. */
1548 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1549 return size_binop_loc (loc
, MINUS_EXPR
,
1550 fold_convert_loc (loc
, ctype
, arg0
),
1551 fold_convert_loc (loc
, ctype
, arg1
));
1553 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1554 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1555 overflow) and negate (which can't either). Special-case a result
1556 of zero while we're here. */
1557 if (tree_int_cst_equal (arg0
, arg1
))
1558 return build_int_cst (ctype
, 0);
1559 else if (tree_int_cst_lt (arg1
, arg0
))
1560 return fold_convert_loc (loc
, ctype
,
1561 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1563 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1564 fold_convert_loc (loc
, ctype
,
1565 size_binop_loc (loc
,
1570 /* A subroutine of fold_convert_const handling conversions of an
1571 INTEGER_CST to another integer type. */
1574 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1576 /* Given an integer constant, make new constant with new type,
1577 appropriately sign-extended or truncated. Use widest_int
1578 so that any extension is done according ARG1's type. */
1579 return force_fit_type (type
, wi::to_widest (arg1
),
1580 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1581 TREE_OVERFLOW (arg1
));
1584 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1585 to an integer type. */
1588 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1590 bool overflow
= false;
1593 /* The following code implements the floating point to integer
1594 conversion rules required by the Java Language Specification,
1595 that IEEE NaNs are mapped to zero and values that overflow
1596 the target precision saturate, i.e. values greater than
1597 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1598 are mapped to INT_MIN. These semantics are allowed by the
1599 C and C++ standards that simply state that the behavior of
1600 FP-to-integer conversion is unspecified upon overflow. */
1604 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1608 case FIX_TRUNC_EXPR
:
1609 real_trunc (&r
, VOIDmode
, &x
);
1616 /* If R is NaN, return zero and show we have an overflow. */
1617 if (REAL_VALUE_ISNAN (r
))
1620 val
= wi::zero (TYPE_PRECISION (type
));
1623 /* See if R is less than the lower bound or greater than the
1628 tree lt
= TYPE_MIN_VALUE (type
);
1629 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1630 if (REAL_VALUES_LESS (r
, l
))
1639 tree ut
= TYPE_MAX_VALUE (type
);
1642 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1643 if (REAL_VALUES_LESS (u
, r
))
1652 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1654 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1658 /* A subroutine of fold_convert_const handling conversions of a
1659 FIXED_CST to an integer type. */
1662 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1665 double_int temp
, temp_trunc
;
1668 /* Right shift FIXED_CST to temp by fbit. */
1669 temp
= TREE_FIXED_CST (arg1
).data
;
1670 mode
= TREE_FIXED_CST (arg1
).mode
;
1671 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1673 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1674 HOST_BITS_PER_DOUBLE_INT
,
1675 SIGNED_FIXED_POINT_MODE_P (mode
));
1677 /* Left shift temp to temp_trunc by fbit. */
1678 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1679 HOST_BITS_PER_DOUBLE_INT
,
1680 SIGNED_FIXED_POINT_MODE_P (mode
));
1684 temp
= double_int_zero
;
1685 temp_trunc
= double_int_zero
;
1688 /* If FIXED_CST is negative, we need to round the value toward 0.
1689 By checking if the fractional bits are not zero to add 1 to temp. */
1690 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1691 && temp_trunc
.is_negative ()
1692 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1693 temp
+= double_int_one
;
1695 /* Given a fixed-point constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t
= force_fit_type (type
, temp
, -1,
1698 (temp
.is_negative ()
1699 && (TYPE_UNSIGNED (type
)
1700 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1701 | TREE_OVERFLOW (arg1
));
1706 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1707 to another floating point type. */
1710 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1712 REAL_VALUE_TYPE value
;
1715 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1716 t
= build_real (type
, value
);
1718 /* If converting an infinity or NAN to a representation that doesn't
1719 have one, set the overflow bit so that we can produce some kind of
1720 error message at the appropriate point if necessary. It's not the
1721 most user-friendly message, but it's better than nothing. */
1722 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1723 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1724 TREE_OVERFLOW (t
) = 1;
1725 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1726 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1727 TREE_OVERFLOW (t
) = 1;
1728 /* Regular overflow, conversion produced an infinity in a mode that
1729 can't represent them. */
1730 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1731 && REAL_VALUE_ISINF (value
)
1732 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1733 TREE_OVERFLOW (t
) = 1;
1735 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1739 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1740 to a floating point type. */
1743 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1745 REAL_VALUE_TYPE value
;
1748 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1749 t
= build_real (type
, value
);
1751 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to another fixed-point type. */
1759 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1761 FIXED_VALUE_TYPE value
;
1765 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1766 TYPE_SATURATING (type
));
1767 t
= build_fixed (type
, value
);
1769 /* Propagate overflow flags. */
1770 if (overflow_p
| TREE_OVERFLOW (arg1
))
1771 TREE_OVERFLOW (t
) = 1;
1775 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1776 to a fixed-point type. */
1779 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1781 FIXED_VALUE_TYPE value
;
1786 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1788 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1789 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1790 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1792 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1794 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1795 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1796 TYPE_SATURATING (type
));
1797 t
= build_fixed (type
, value
);
1799 /* Propagate overflow flags. */
1800 if (overflow_p
| TREE_OVERFLOW (arg1
))
1801 TREE_OVERFLOW (t
) = 1;
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to a fixed-point type. */
1809 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1811 FIXED_VALUE_TYPE value
;
1815 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1816 &TREE_REAL_CST (arg1
),
1817 TYPE_SATURATING (type
));
1818 t
= build_fixed (type
, value
);
1820 /* Propagate overflow flags. */
1821 if (overflow_p
| TREE_OVERFLOW (arg1
))
1822 TREE_OVERFLOW (t
) = 1;
1826 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1827 type TYPE. If no simplification can be done return NULL_TREE. */
1830 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1832 if (TREE_TYPE (arg1
) == type
)
1835 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1836 || TREE_CODE (type
) == OFFSET_TYPE
)
1838 if (TREE_CODE (arg1
) == INTEGER_CST
)
1839 return fold_convert_const_int_from_int (type
, arg1
);
1840 else if (TREE_CODE (arg1
) == REAL_CST
)
1841 return fold_convert_const_int_from_real (code
, type
, arg1
);
1842 else if (TREE_CODE (arg1
) == FIXED_CST
)
1843 return fold_convert_const_int_from_fixed (type
, arg1
);
1845 else if (TREE_CODE (type
) == REAL_TYPE
)
1847 if (TREE_CODE (arg1
) == INTEGER_CST
)
1848 return build_real_from_int_cst (type
, arg1
);
1849 else if (TREE_CODE (arg1
) == REAL_CST
)
1850 return fold_convert_const_real_from_real (type
, arg1
);
1851 else if (TREE_CODE (arg1
) == FIXED_CST
)
1852 return fold_convert_const_real_from_fixed (type
, arg1
);
1854 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1856 if (TREE_CODE (arg1
) == FIXED_CST
)
1857 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1858 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1859 return fold_convert_const_fixed_from_int (type
, arg1
);
1860 else if (TREE_CODE (arg1
) == REAL_CST
)
1861 return fold_convert_const_fixed_from_real (type
, arg1
);
1866 /* Construct a vector of zero elements of vector type TYPE. */
1869 build_zero_vector (tree type
)
1873 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1874 return build_vector_from_val (type
, t
);
1877 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1880 fold_convertible_p (const_tree type
, const_tree arg
)
1882 tree orig
= TREE_TYPE (arg
);
1887 if (TREE_CODE (arg
) == ERROR_MARK
1888 || TREE_CODE (type
) == ERROR_MARK
1889 || TREE_CODE (orig
) == ERROR_MARK
)
1892 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1895 switch (TREE_CODE (type
))
1897 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1898 case POINTER_TYPE
: case REFERENCE_TYPE
:
1900 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1901 || TREE_CODE (orig
) == OFFSET_TYPE
)
1903 return (TREE_CODE (orig
) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1907 case FIXED_POINT_TYPE
:
1911 return TREE_CODE (type
) == TREE_CODE (orig
);
1918 /* Convert expression ARG to type TYPE. Used by the middle-end for
1919 simple conversions in preference to calling the front-end's convert. */
1922 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1924 tree orig
= TREE_TYPE (arg
);
1930 if (TREE_CODE (arg
) == ERROR_MARK
1931 || TREE_CODE (type
) == ERROR_MARK
1932 || TREE_CODE (orig
) == ERROR_MARK
)
1933 return error_mark_node
;
1935 switch (TREE_CODE (type
))
1938 case REFERENCE_TYPE
:
1939 /* Handle conversions between pointers to different address spaces. */
1940 if (POINTER_TYPE_P (orig
)
1941 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1942 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1943 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1946 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1948 if (TREE_CODE (arg
) == INTEGER_CST
)
1950 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1951 if (tem
!= NULL_TREE
)
1954 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1955 || TREE_CODE (orig
) == OFFSET_TYPE
)
1956 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1957 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1958 return fold_convert_loc (loc
, type
,
1959 fold_build1_loc (loc
, REALPART_EXPR
,
1960 TREE_TYPE (orig
), arg
));
1961 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1962 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1963 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1966 if (TREE_CODE (arg
) == INTEGER_CST
)
1968 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1969 if (tem
!= NULL_TREE
)
1972 else if (TREE_CODE (arg
) == REAL_CST
)
1974 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1975 if (tem
!= NULL_TREE
)
1978 else if (TREE_CODE (arg
) == FIXED_CST
)
1980 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1981 if (tem
!= NULL_TREE
)
1985 switch (TREE_CODE (orig
))
1988 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1989 case POINTER_TYPE
: case REFERENCE_TYPE
:
1990 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1993 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1995 case FIXED_POINT_TYPE
:
1996 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1999 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2000 return fold_convert_loc (loc
, type
, tem
);
2006 case FIXED_POINT_TYPE
:
2007 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2008 || TREE_CODE (arg
) == REAL_CST
)
2010 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2011 if (tem
!= NULL_TREE
)
2012 goto fold_convert_exit
;
2015 switch (TREE_CODE (orig
))
2017 case FIXED_POINT_TYPE
:
2022 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2025 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2026 return fold_convert_loc (loc
, type
, tem
);
2033 switch (TREE_CODE (orig
))
2036 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2037 case POINTER_TYPE
: case REFERENCE_TYPE
:
2039 case FIXED_POINT_TYPE
:
2040 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2041 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2042 fold_convert_loc (loc
, TREE_TYPE (type
),
2043 integer_zero_node
));
2048 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2050 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2051 TREE_OPERAND (arg
, 0));
2052 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2053 TREE_OPERAND (arg
, 1));
2054 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2057 arg
= save_expr (arg
);
2058 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2059 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2060 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2061 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2062 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2070 if (integer_zerop (arg
))
2071 return build_zero_vector (type
);
2072 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2073 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2074 || TREE_CODE (orig
) == VECTOR_TYPE
);
2075 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2078 tem
= fold_ignored_result (arg
);
2079 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2082 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2083 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2087 protected_set_expr_location_unshare (tem
, loc
);
2091 /* Return false if expr can be assumed not to be an lvalue, true
2095 maybe_lvalue_p (const_tree x
)
2097 /* We only need to wrap lvalue tree codes. */
2098 switch (TREE_CODE (x
))
2111 case ARRAY_RANGE_REF
:
2117 case PREINCREMENT_EXPR
:
2118 case PREDECREMENT_EXPR
:
2120 case TRY_CATCH_EXPR
:
2121 case WITH_CLEANUP_EXPR
:
2130 /* Assume the worst for front-end tree codes. */
2131 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2139 /* Return an expr equal to X but certainly not valid as an lvalue. */
2142 non_lvalue_loc (location_t loc
, tree x
)
2144 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2149 if (! maybe_lvalue_p (x
))
2151 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2154 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2155 Zero means allow extended lvalues. */
2157 int pedantic_lvalues
;
2159 /* When pedantic, return an expr equal to X but certainly not valid as a
2160 pedantic lvalue. Otherwise, return X. */
2163 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2165 if (pedantic_lvalues
)
2166 return non_lvalue_loc (loc
, x
);
2168 return protected_set_expr_location_unshare (x
, loc
);
2171 /* Given a tree comparison code, return the code that is the logical inverse.
2172 It is generally not safe to do this for floating-point comparisons, except
2173 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2174 ERROR_MARK in this case. */
2177 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2179 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2180 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2190 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2192 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2194 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2196 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2210 return UNORDERED_EXPR
;
2211 case UNORDERED_EXPR
:
2212 return ORDERED_EXPR
;
2218 /* Similar, but return the comparison that results if the operands are
2219 swapped. This is safe for floating-point. */
2222 swap_tree_comparison (enum tree_code code
)
2229 case UNORDERED_EXPR
:
2255 /* Convert a comparison tree code from an enum tree_code representation
2256 into a compcode bit-based encoding. This function is the inverse of
2257 compcode_to_comparison. */
2259 static enum comparison_code
2260 comparison_to_compcode (enum tree_code code
)
2277 return COMPCODE_ORD
;
2278 case UNORDERED_EXPR
:
2279 return COMPCODE_UNORD
;
2281 return COMPCODE_UNLT
;
2283 return COMPCODE_UNEQ
;
2285 return COMPCODE_UNLE
;
2287 return COMPCODE_UNGT
;
2289 return COMPCODE_LTGT
;
2291 return COMPCODE_UNGE
;
2297 /* Convert a compcode bit-based encoding of a comparison operator back
2298 to GCC's enum tree_code representation. This function is the
2299 inverse of comparison_to_compcode. */
2301 static enum tree_code
2302 compcode_to_comparison (enum comparison_code code
)
2319 return ORDERED_EXPR
;
2320 case COMPCODE_UNORD
:
2321 return UNORDERED_EXPR
;
2339 /* Return a tree for the comparison which is the combination of
2340 doing the AND or OR (depending on CODE) of the two operations LCODE
2341 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2342 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2343 if this makes the transformation invalid. */
2346 combine_comparisons (location_t loc
,
2347 enum tree_code code
, enum tree_code lcode
,
2348 enum tree_code rcode
, tree truth_type
,
2349 tree ll_arg
, tree lr_arg
)
2351 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2352 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2353 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2358 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2359 compcode
= lcompcode
& rcompcode
;
2362 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2363 compcode
= lcompcode
| rcompcode
;
2372 /* Eliminate unordered comparisons, as well as LTGT and ORD
2373 which are not used unless the mode has NaNs. */
2374 compcode
&= ~COMPCODE_UNORD
;
2375 if (compcode
== COMPCODE_LTGT
)
2376 compcode
= COMPCODE_NE
;
2377 else if (compcode
== COMPCODE_ORD
)
2378 compcode
= COMPCODE_TRUE
;
2380 else if (flag_trapping_math
)
2382 /* Check that the original operation and the optimized ones will trap
2383 under the same condition. */
2384 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2385 && (lcompcode
!= COMPCODE_EQ
)
2386 && (lcompcode
!= COMPCODE_ORD
);
2387 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2388 && (rcompcode
!= COMPCODE_EQ
)
2389 && (rcompcode
!= COMPCODE_ORD
);
2390 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2391 && (compcode
!= COMPCODE_EQ
)
2392 && (compcode
!= COMPCODE_ORD
);
2394 /* In a short-circuited boolean expression the LHS might be
2395 such that the RHS, if evaluated, will never trap. For
2396 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2397 if neither x nor y is NaN. (This is a mixed blessing: for
2398 example, the expression above will never trap, hence
2399 optimizing it to x < y would be invalid). */
2400 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2401 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2404 /* If the comparison was short-circuited, and only the RHS
2405 trapped, we may now generate a spurious trap. */
2407 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2410 /* If we changed the conditions that cause a trap, we lose. */
2411 if ((ltrap
|| rtrap
) != trap
)
2415 if (compcode
== COMPCODE_TRUE
)
2416 return constant_boolean_node (true, truth_type
);
2417 else if (compcode
== COMPCODE_FALSE
)
2418 return constant_boolean_node (false, truth_type
);
2421 enum tree_code tcode
;
2423 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2424 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2428 /* Return nonzero if two operands (typically of the same tree node)
2429 are necessarily equal. If either argument has side-effects this
2430 function returns zero. FLAGS modifies behavior as follows:
2432 If OEP_ONLY_CONST is set, only return nonzero for constants.
2433 This function tests whether the operands are indistinguishable;
2434 it does not test whether they are equal using C's == operation.
2435 The distinction is important for IEEE floating point, because
2436 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2437 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2439 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2440 even though it may hold multiple values during a function.
2441 This is because a GCC tree node guarantees that nothing else is
2442 executed between the evaluation of its "operands" (which may often
2443 be evaluated in arbitrary order). Hence if the operands themselves
2444 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2445 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2446 unset means assuming isochronic (or instantaneous) tree equivalence.
2447 Unless comparing arbitrary expression trees, such as from different
2448 statements, this flag can usually be left unset.
2450 If OEP_PURE_SAME is set, then pure functions with identical arguments
2451 are considered the same. It is used when the caller has other ways
2452 to ensure that global memory is unchanged in between. */
2455 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2457 /* If either is ERROR_MARK, they aren't equal. */
2458 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2459 || TREE_TYPE (arg0
) == error_mark_node
2460 || TREE_TYPE (arg1
) == error_mark_node
)
2463 /* Similar, if either does not have a type (like a released SSA name),
2464 they aren't equal. */
2465 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2468 /* Check equality of integer constants before bailing out due to
2469 precision differences. */
2470 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2471 return tree_int_cst_equal (arg0
, arg1
);
2473 /* If both types don't have the same signedness, then we can't consider
2474 them equal. We must check this before the STRIP_NOPS calls
2475 because they may change the signedness of the arguments. As pointers
2476 strictly don't have a signedness, require either two pointers or
2477 two non-pointers as well. */
2478 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2479 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2482 /* We cannot consider pointers to different address space equal. */
2483 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2484 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2485 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2488 /* If both types don't have the same precision, then it is not safe
2490 if (element_precision (TREE_TYPE (arg0
))
2491 != element_precision (TREE_TYPE (arg1
)))
2497 /* In case both args are comparisons but with different comparison
2498 code, try to swap the comparison operands of one arg to produce
2499 a match and compare that variant. */
2500 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2501 && COMPARISON_CLASS_P (arg0
)
2502 && COMPARISON_CLASS_P (arg1
))
2504 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2506 if (TREE_CODE (arg0
) == swap_code
)
2507 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2508 TREE_OPERAND (arg1
, 1), flags
)
2509 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2510 TREE_OPERAND (arg1
, 0), flags
);
2513 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2514 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2515 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2518 /* This is needed for conversions and for COMPONENT_REF.
2519 Might as well play it safe and always test this. */
2520 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2521 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2522 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2525 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2526 We don't care about side effects in that case because the SAVE_EXPR
2527 takes care of that for us. In all other cases, two expressions are
2528 equal if they have no side effects. If we have two identical
2529 expressions with side effects that should be treated the same due
2530 to the only side effects being identical SAVE_EXPR's, that will
2531 be detected in the recursive calls below.
2532 If we are taking an invariant address of two identical objects
2533 they are necessarily equal as well. */
2534 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2535 && (TREE_CODE (arg0
) == SAVE_EXPR
2536 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2537 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2540 /* Next handle constant cases, those for which we can return 1 even
2541 if ONLY_CONST is set. */
2542 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2543 switch (TREE_CODE (arg0
))
2546 return tree_int_cst_equal (arg0
, arg1
);
2549 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2550 TREE_FIXED_CST (arg1
));
2553 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2554 TREE_REAL_CST (arg1
)))
2558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2560 /* If we do not distinguish between signed and unsigned zero,
2561 consider them equal. */
2562 if (real_zerop (arg0
) && real_zerop (arg1
))
2571 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2574 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2576 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2577 VECTOR_CST_ELT (arg1
, i
), flags
))
2584 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2586 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2590 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2591 && ! memcmp (TREE_STRING_POINTER (arg0
),
2592 TREE_STRING_POINTER (arg1
),
2593 TREE_STRING_LENGTH (arg0
)));
2596 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2597 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2598 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2603 if (flags
& OEP_ONLY_CONST
)
2606 /* Define macros to test an operand from arg0 and arg1 for equality and a
2607 variant that allows null and views null as being different from any
2608 non-null value. In the latter case, if either is null, the both
2609 must be; otherwise, do the normal comparison. */
2610 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2611 TREE_OPERAND (arg1, N), flags)
2613 #define OP_SAME_WITH_NULL(N) \
2614 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2615 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2617 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2620 /* Two conversions are equal only if signedness and modes match. */
2621 switch (TREE_CODE (arg0
))
2624 case FIX_TRUNC_EXPR
:
2625 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2626 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2636 case tcc_comparison
:
2638 if (OP_SAME (0) && OP_SAME (1))
2641 /* For commutative ops, allow the other order. */
2642 return (commutative_tree_code (TREE_CODE (arg0
))
2643 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2644 TREE_OPERAND (arg1
, 1), flags
)
2645 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2646 TREE_OPERAND (arg1
, 0), flags
));
2649 /* If either of the pointer (or reference) expressions we are
2650 dereferencing contain a side effect, these cannot be equal,
2651 but their addresses can be. */
2652 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2653 && (TREE_SIDE_EFFECTS (arg0
)
2654 || TREE_SIDE_EFFECTS (arg1
)))
2657 switch (TREE_CODE (arg0
))
2660 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2667 case TARGET_MEM_REF
:
2668 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2669 /* Require equal extra operands and then fall through to MEM_REF
2670 handling of the two common operands. */
2671 if (!OP_SAME_WITH_NULL (2)
2672 || !OP_SAME_WITH_NULL (3)
2673 || !OP_SAME_WITH_NULL (4))
2677 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2678 /* Require equal access sizes, and similar pointer types.
2679 We can have incomplete types for array references of
2680 variable-sized arrays from the Fortran frontend
2681 though. Also verify the types are compatible. */
2682 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2683 || (TYPE_SIZE (TREE_TYPE (arg0
))
2684 && TYPE_SIZE (TREE_TYPE (arg1
))
2685 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2686 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2687 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2688 && alias_ptr_types_compatible_p
2689 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2690 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2691 && OP_SAME (0) && OP_SAME (1));
2694 case ARRAY_RANGE_REF
:
2695 /* Operands 2 and 3 may be null.
2696 Compare the array index by value if it is constant first as we
2697 may have different types but same value here. */
2700 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2701 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2702 TREE_OPERAND (arg1
, 1))
2704 && OP_SAME_WITH_NULL (2)
2705 && OP_SAME_WITH_NULL (3));
2708 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2709 may be NULL when we're called to compare MEM_EXPRs. */
2710 if (!OP_SAME_WITH_NULL (0)
2713 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2714 return OP_SAME_WITH_NULL (2);
2719 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2720 return OP_SAME (1) && OP_SAME (2);
2726 case tcc_expression
:
2727 switch (TREE_CODE (arg0
))
2730 case TRUTH_NOT_EXPR
:
2733 case TRUTH_ANDIF_EXPR
:
2734 case TRUTH_ORIF_EXPR
:
2735 return OP_SAME (0) && OP_SAME (1);
2738 case WIDEN_MULT_PLUS_EXPR
:
2739 case WIDEN_MULT_MINUS_EXPR
:
2742 /* The multiplcation operands are commutative. */
2745 case TRUTH_AND_EXPR
:
2747 case TRUTH_XOR_EXPR
:
2748 if (OP_SAME (0) && OP_SAME (1))
2751 /* Otherwise take into account this is a commutative operation. */
2752 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2753 TREE_OPERAND (arg1
, 1), flags
)
2754 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2755 TREE_OPERAND (arg1
, 0), flags
));
2760 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2767 switch (TREE_CODE (arg0
))
2770 /* If the CALL_EXPRs call different functions, then they
2771 clearly can not be equal. */
2772 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2777 unsigned int cef
= call_expr_flags (arg0
);
2778 if (flags
& OEP_PURE_SAME
)
2779 cef
&= ECF_CONST
| ECF_PURE
;
2786 /* Now see if all the arguments are the same. */
2788 const_call_expr_arg_iterator iter0
, iter1
;
2790 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2791 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2793 a0
= next_const_call_expr_arg (&iter0
),
2794 a1
= next_const_call_expr_arg (&iter1
))
2795 if (! operand_equal_p (a0
, a1
, flags
))
2798 /* If we get here and both argument lists are exhausted
2799 then the CALL_EXPRs are equal. */
2800 return ! (a0
|| a1
);
2806 case tcc_declaration
:
2807 /* Consider __builtin_sqrt equal to sqrt. */
2808 return (TREE_CODE (arg0
) == FUNCTION_DECL
2809 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2810 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2811 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2818 #undef OP_SAME_WITH_NULL
2821 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2822 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2824 When in doubt, return 0. */
2827 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2829 int unsignedp1
, unsignedpo
;
2830 tree primarg0
, primarg1
, primother
;
2831 unsigned int correct_width
;
2833 if (operand_equal_p (arg0
, arg1
, 0))
2836 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2837 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2840 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2841 and see if the inner values are the same. This removes any
2842 signedness comparison, which doesn't matter here. */
2843 primarg0
= arg0
, primarg1
= arg1
;
2844 STRIP_NOPS (primarg0
);
2845 STRIP_NOPS (primarg1
);
2846 if (operand_equal_p (primarg0
, primarg1
, 0))
2849 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2850 actual comparison operand, ARG0.
2852 First throw away any conversions to wider types
2853 already present in the operands. */
2855 primarg1
= get_narrower (arg1
, &unsignedp1
);
2856 primother
= get_narrower (other
, &unsignedpo
);
2858 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2859 if (unsignedp1
== unsignedpo
2860 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2861 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2863 tree type
= TREE_TYPE (arg0
);
2865 /* Make sure shorter operand is extended the right way
2866 to match the longer operand. */
2867 primarg1
= fold_convert (signed_or_unsigned_type_for
2868 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2870 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2877 /* See if ARG is an expression that is either a comparison or is performing
2878 arithmetic on comparisons. The comparisons must only be comparing
2879 two different values, which will be stored in *CVAL1 and *CVAL2; if
2880 they are nonzero it means that some operands have already been found.
2881 No variables may be used anywhere else in the expression except in the
2882 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2883 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2885 If this is true, return 1. Otherwise, return zero. */
2888 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2890 enum tree_code code
= TREE_CODE (arg
);
2891 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2896 else if (tclass
== tcc_expression
2897 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2898 || code
== COMPOUND_EXPR
))
2899 tclass
= tcc_binary
;
2901 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2902 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2904 /* If we've already found a CVAL1 or CVAL2, this expression is
2905 two complex to handle. */
2906 if (*cval1
|| *cval2
)
2916 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2919 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2920 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2921 cval1
, cval2
, save_p
));
2926 case tcc_expression
:
2927 if (code
== COND_EXPR
)
2928 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2929 cval1
, cval2
, save_p
)
2930 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2931 cval1
, cval2
, save_p
)
2932 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2933 cval1
, cval2
, save_p
));
2936 case tcc_comparison
:
2937 /* First see if we can handle the first operand, then the second. For
2938 the second operand, we know *CVAL1 can't be zero. It must be that
2939 one side of the comparison is each of the values; test for the
2940 case where this isn't true by failing if the two operands
2943 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2944 TREE_OPERAND (arg
, 1), 0))
2948 *cval1
= TREE_OPERAND (arg
, 0);
2949 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2951 else if (*cval2
== 0)
2952 *cval2
= TREE_OPERAND (arg
, 0);
2953 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2958 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2960 else if (*cval2
== 0)
2961 *cval2
= TREE_OPERAND (arg
, 1);
2962 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2974 /* ARG is a tree that is known to contain just arithmetic operations and
2975 comparisons. Evaluate the operations in the tree substituting NEW0 for
2976 any occurrence of OLD0 as an operand of a comparison and likewise for
2980 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2981 tree old1
, tree new1
)
2983 tree type
= TREE_TYPE (arg
);
2984 enum tree_code code
= TREE_CODE (arg
);
2985 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2987 /* We can handle some of the tcc_expression cases here. */
2988 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2990 else if (tclass
== tcc_expression
2991 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2992 tclass
= tcc_binary
;
2997 return fold_build1_loc (loc
, code
, type
,
2998 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2999 old0
, new0
, old1
, new1
));
3002 return fold_build2_loc (loc
, code
, type
,
3003 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3004 old0
, new0
, old1
, new1
),
3005 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3006 old0
, new0
, old1
, new1
));
3008 case tcc_expression
:
3012 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3016 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3020 return fold_build3_loc (loc
, code
, type
,
3021 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3022 old0
, new0
, old1
, new1
),
3023 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3024 old0
, new0
, old1
, new1
),
3025 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3026 old0
, new0
, old1
, new1
));
3030 /* Fall through - ??? */
3032 case tcc_comparison
:
3034 tree arg0
= TREE_OPERAND (arg
, 0);
3035 tree arg1
= TREE_OPERAND (arg
, 1);
3037 /* We need to check both for exact equality and tree equality. The
3038 former will be true if the operand has a side-effect. In that
3039 case, we know the operand occurred exactly once. */
3041 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3043 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3046 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3048 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3051 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3059 /* Return a tree for the case when the result of an expression is RESULT
3060 converted to TYPE and OMITTED was previously an operand of the expression
3061 but is now not needed (e.g., we folded OMITTED * 0).
3063 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3064 the conversion of RESULT to TYPE. */
3067 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3069 tree t
= fold_convert_loc (loc
, type
, result
);
3071 /* If the resulting operand is an empty statement, just return the omitted
3072 statement casted to void. */
3073 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3074 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3075 fold_ignored_result (omitted
));
3077 if (TREE_SIDE_EFFECTS (omitted
))
3078 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3079 fold_ignored_result (omitted
), t
);
3081 return non_lvalue_loc (loc
, t
);
3084 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3087 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3090 tree t
= fold_convert_loc (loc
, type
, result
);
3092 /* If the resulting operand is an empty statement, just return the omitted
3093 statement casted to void. */
3094 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3095 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3096 fold_ignored_result (omitted
));
3098 if (TREE_SIDE_EFFECTS (omitted
))
3099 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3100 fold_ignored_result (omitted
), t
);
3102 return pedantic_non_lvalue_loc (loc
, t
);
3105 /* Return a tree for the case when the result of an expression is RESULT
3106 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3107 of the expression but are now not needed.
3109 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3110 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3111 evaluated before OMITTED2. Otherwise, if neither has side effects,
3112 just do the conversion of RESULT to TYPE. */
3115 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3116 tree omitted1
, tree omitted2
)
3118 tree t
= fold_convert_loc (loc
, type
, result
);
3120 if (TREE_SIDE_EFFECTS (omitted2
))
3121 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3122 if (TREE_SIDE_EFFECTS (omitted1
))
3123 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3125 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3129 /* Return a simplified tree node for the truth-negation of ARG. This
3130 never alters ARG itself. We assume that ARG is an operation that
3131 returns a truth value (0 or 1).
3133 FIXME: one would think we would fold the result, but it causes
3134 problems with the dominator optimizer. */
3137 fold_truth_not_expr (location_t loc
, tree arg
)
3139 tree type
= TREE_TYPE (arg
);
3140 enum tree_code code
= TREE_CODE (arg
);
3141 location_t loc1
, loc2
;
3143 /* If this is a comparison, we can simply invert it, except for
3144 floating-point non-equality comparisons, in which case we just
3145 enclose a TRUTH_NOT_EXPR around what we have. */
3147 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3149 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3150 if (FLOAT_TYPE_P (op_type
)
3151 && flag_trapping_math
3152 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3153 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3156 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3157 if (code
== ERROR_MARK
)
3160 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3161 TREE_OPERAND (arg
, 1));
3167 return constant_boolean_node (integer_zerop (arg
), type
);
3169 case TRUTH_AND_EXPR
:
3170 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3171 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3172 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3173 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3174 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3177 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3178 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3179 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3180 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3181 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3183 case TRUTH_XOR_EXPR
:
3184 /* Here we can invert either operand. We invert the first operand
3185 unless the second operand is a TRUTH_NOT_EXPR in which case our
3186 result is the XOR of the first operand with the inside of the
3187 negation of the second operand. */
3189 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3190 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3191 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3193 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3194 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3195 TREE_OPERAND (arg
, 1));
3197 case TRUTH_ANDIF_EXPR
:
3198 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3199 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3200 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3201 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3202 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3204 case TRUTH_ORIF_EXPR
:
3205 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3206 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3207 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3208 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3209 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3211 case TRUTH_NOT_EXPR
:
3212 return TREE_OPERAND (arg
, 0);
3216 tree arg1
= TREE_OPERAND (arg
, 1);
3217 tree arg2
= TREE_OPERAND (arg
, 2);
3219 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3220 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3222 /* A COND_EXPR may have a throw as one operand, which
3223 then has void type. Just leave void operands
3225 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3226 VOID_TYPE_P (TREE_TYPE (arg1
))
3227 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3228 VOID_TYPE_P (TREE_TYPE (arg2
))
3229 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3233 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3234 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3235 TREE_OPERAND (arg
, 0),
3236 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3238 case NON_LVALUE_EXPR
:
3239 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3240 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3243 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3244 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3246 /* ... fall through ... */
3249 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3250 return build1_loc (loc
, TREE_CODE (arg
), type
,
3251 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3254 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3256 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3259 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3261 case CLEANUP_POINT_EXPR
:
3262 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3263 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3264 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3271 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3272 assume that ARG is an operation that returns a truth value (0 or 1
3273 for scalars, 0 or -1 for vectors). Return the folded expression if
3274 folding is successful. Otherwise, return NULL_TREE. */
3277 fold_invert_truthvalue (location_t loc
, tree arg
)
3279 tree type
= TREE_TYPE (arg
);
3280 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3286 /* Return a simplified tree node for the truth-negation of ARG. This
3287 never alters ARG itself. We assume that ARG is an operation that
3288 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3291 invert_truthvalue_loc (location_t loc
, tree arg
)
3293 if (TREE_CODE (arg
) == ERROR_MARK
)
3296 tree type
= TREE_TYPE (arg
);
3297 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3303 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3304 operands are another bit-wise operation with a common input. If so,
3305 distribute the bit operations to save an operation and possibly two if
3306 constants are involved. For example, convert
3307 (A | B) & (A | C) into A | (B & C)
3308 Further simplification will occur if B and C are constants.
3310 If this optimization cannot be done, 0 will be returned. */
3313 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3314 tree arg0
, tree arg1
)
3319 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3320 || TREE_CODE (arg0
) == code
3321 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3322 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3325 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3327 common
= TREE_OPERAND (arg0
, 0);
3328 left
= TREE_OPERAND (arg0
, 1);
3329 right
= TREE_OPERAND (arg1
, 1);
3331 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3333 common
= TREE_OPERAND (arg0
, 0);
3334 left
= TREE_OPERAND (arg0
, 1);
3335 right
= TREE_OPERAND (arg1
, 0);
3337 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3339 common
= TREE_OPERAND (arg0
, 1);
3340 left
= TREE_OPERAND (arg0
, 0);
3341 right
= TREE_OPERAND (arg1
, 1);
3343 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3345 common
= TREE_OPERAND (arg0
, 1);
3346 left
= TREE_OPERAND (arg0
, 0);
3347 right
= TREE_OPERAND (arg1
, 0);
3352 common
= fold_convert_loc (loc
, type
, common
);
3353 left
= fold_convert_loc (loc
, type
, left
);
3354 right
= fold_convert_loc (loc
, type
, right
);
3355 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3356 fold_build2_loc (loc
, code
, type
, left
, right
));
3359 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3360 with code CODE. This optimization is unsafe. */
3362 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3363 tree arg0
, tree arg1
)
3365 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3366 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3368 /* (A / C) +- (B / C) -> (A +- B) / C. */
3370 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3371 TREE_OPERAND (arg1
, 1), 0))
3372 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3373 fold_build2_loc (loc
, code
, type
,
3374 TREE_OPERAND (arg0
, 0),
3375 TREE_OPERAND (arg1
, 0)),
3376 TREE_OPERAND (arg0
, 1));
3378 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3379 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3380 TREE_OPERAND (arg1
, 0), 0)
3381 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3382 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3384 REAL_VALUE_TYPE r0
, r1
;
3385 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3386 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3388 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3390 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3391 real_arithmetic (&r0
, code
, &r0
, &r1
);
3392 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3393 TREE_OPERAND (arg0
, 0),
3394 build_real (type
, r0
));
3400 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3401 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3404 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3405 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3407 tree result
, bftype
;
3411 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3412 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3413 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3414 && tree_fits_shwi_p (size
)
3415 && tree_to_shwi (size
) == bitsize
)
3416 return fold_convert_loc (loc
, type
, inner
);
3420 if (TYPE_PRECISION (bftype
) != bitsize
3421 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3422 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3424 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3425 size_int (bitsize
), bitsize_int (bitpos
));
3428 result
= fold_convert_loc (loc
, type
, result
);
3433 /* Optimize a bit-field compare.
3435 There are two cases: First is a compare against a constant and the
3436 second is a comparison of two items where the fields are at the same
3437 bit position relative to the start of a chunk (byte, halfword, word)
3438 large enough to contain it. In these cases we can avoid the shift
3439 implicit in bitfield extractions.
3441 For constants, we emit a compare of the shifted constant with the
3442 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3443 compared. For two fields at the same position, we do the ANDs with the
3444 similar mask and compare the result of the ANDs.
3446 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3447 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3448 are the left and right operands of the comparison, respectively.
3450 If the optimization described above can be done, we return the resulting
3451 tree. Otherwise we return zero. */
3454 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3455 tree compare_type
, tree lhs
, tree rhs
)
3457 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3458 tree type
= TREE_TYPE (lhs
);
3460 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3461 machine_mode lmode
, rmode
, nmode
;
3462 int lunsignedp
, runsignedp
;
3463 int lvolatilep
= 0, rvolatilep
= 0;
3464 tree linner
, rinner
= NULL_TREE
;
3468 /* Get all the information about the extractions being done. If the bit size
3469 if the same as the size of the underlying object, we aren't doing an
3470 extraction at all and so can do nothing. We also don't want to
3471 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3472 then will no longer be able to replace it. */
3473 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3474 &lunsignedp
, &lvolatilep
, false);
3475 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3476 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3481 /* If this is not a constant, we can only do something if bit positions,
3482 sizes, and signedness are the same. */
3483 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3484 &runsignedp
, &rvolatilep
, false);
3486 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3487 || lunsignedp
!= runsignedp
|| offset
!= 0
3488 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3492 /* See if we can find a mode to refer to this field. We should be able to,
3493 but fail if we can't. */
3494 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3495 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3496 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3497 TYPE_ALIGN (TREE_TYPE (rinner
))),
3499 if (nmode
== VOIDmode
)
3502 /* Set signed and unsigned types of the precision of this mode for the
3504 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3506 /* Compute the bit position and size for the new reference and our offset
3507 within it. If the new reference is the same size as the original, we
3508 won't optimize anything, so return zero. */
3509 nbitsize
= GET_MODE_BITSIZE (nmode
);
3510 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3512 if (nbitsize
== lbitsize
)
3515 if (BYTES_BIG_ENDIAN
)
3516 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3518 /* Make the mask to be used against the extracted field. */
3519 mask
= build_int_cst_type (unsigned_type
, -1);
3520 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3521 mask
= const_binop (RSHIFT_EXPR
, mask
,
3522 size_int (nbitsize
- lbitsize
- lbitpos
));
3525 /* If not comparing with constant, just rework the comparison
3527 return fold_build2_loc (loc
, code
, compare_type
,
3528 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3529 make_bit_field_ref (loc
, linner
,
3534 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3535 make_bit_field_ref (loc
, rinner
,
3541 /* Otherwise, we are handling the constant case. See if the constant is too
3542 big for the field. Warn and return a tree of for 0 (false) if so. We do
3543 this not only for its own sake, but to avoid having to test for this
3544 error case below. If we didn't, we might generate wrong code.
3546 For unsigned fields, the constant shifted right by the field length should
3547 be all zero. For signed fields, the high-order bits should agree with
3552 if (wi::lrshift (rhs
, lbitsize
) != 0)
3554 warning (0, "comparison is always %d due to width of bit-field",
3556 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3561 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3562 if (tem
!= 0 && tem
!= -1)
3564 warning (0, "comparison is always %d due to width of bit-field",
3566 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3570 /* Single-bit compares should always be against zero. */
3571 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3573 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3574 rhs
= build_int_cst (type
, 0);
3577 /* Make a new bitfield reference, shift the constant over the
3578 appropriate number of bits and mask it with the computed mask
3579 (in case this was a signed field). If we changed it, make a new one. */
3580 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3582 rhs
= const_binop (BIT_AND_EXPR
,
3583 const_binop (LSHIFT_EXPR
,
3584 fold_convert_loc (loc
, unsigned_type
, rhs
),
3585 size_int (lbitpos
)),
3588 lhs
= build2_loc (loc
, code
, compare_type
,
3589 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3593 /* Subroutine for fold_truth_andor_1: decode a field reference.
3595 If EXP is a comparison reference, we return the innermost reference.
3597 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3598 set to the starting bit number.
3600 If the innermost field can be completely contained in a mode-sized
3601 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3603 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3604 otherwise it is not changed.
3606 *PUNSIGNEDP is set to the signedness of the field.
3608 *PMASK is set to the mask used. This is either contained in a
3609 BIT_AND_EXPR or derived from the width of the field.
3611 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3613 Return 0 if this is not a component reference or is one that we can't
3614 do anything with. */
3617 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3618 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3619 int *punsignedp
, int *pvolatilep
,
3620 tree
*pmask
, tree
*pand_mask
)
3622 tree outer_type
= 0;
3624 tree mask
, inner
, offset
;
3626 unsigned int precision
;
3628 /* All the optimizations using this function assume integer fields.
3629 There are problems with FP fields since the type_for_size call
3630 below can fail for, e.g., XFmode. */
3631 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3634 /* We are interested in the bare arrangement of bits, so strip everything
3635 that doesn't affect the machine mode. However, record the type of the
3636 outermost expression if it may matter below. */
3637 if (CONVERT_EXPR_P (exp
)
3638 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3639 outer_type
= TREE_TYPE (exp
);
3642 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3644 and_mask
= TREE_OPERAND (exp
, 1);
3645 exp
= TREE_OPERAND (exp
, 0);
3646 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3647 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3651 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3652 punsignedp
, pvolatilep
, false);
3653 if ((inner
== exp
&& and_mask
== 0)
3654 || *pbitsize
< 0 || offset
!= 0
3655 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3658 /* If the number of bits in the reference is the same as the bitsize of
3659 the outer type, then the outer type gives the signedness. Otherwise
3660 (in case of a small bitfield) the signedness is unchanged. */
3661 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3662 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3664 /* Compute the mask to access the bitfield. */
3665 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3666 precision
= TYPE_PRECISION (unsigned_type
);
3668 mask
= build_int_cst_type (unsigned_type
, -1);
3670 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3671 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3673 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3675 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3676 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3679 *pand_mask
= and_mask
;
3683 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3684 bit positions and MASK is SIGNED. */
3687 all_ones_mask_p (const_tree mask
, unsigned int size
)
3689 tree type
= TREE_TYPE (mask
);
3690 unsigned int precision
= TYPE_PRECISION (type
);
3692 /* If this function returns true when the type of the mask is
3693 UNSIGNED, then there will be errors. In particular see
3694 gcc.c-torture/execute/990326-1.c. There does not appear to be
3695 any documentation paper trail as to why this is so. But the pre
3696 wide-int worked with that restriction and it has been preserved
3698 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3701 return wi::mask (size
, false, precision
) == mask
;
3704 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3705 represents the sign bit of EXP's type. If EXP represents a sign
3706 or zero extension, also test VAL against the unextended type.
3707 The return value is the (sub)expression whose sign bit is VAL,
3708 or NULL_TREE otherwise. */
3711 sign_bit_p (tree exp
, const_tree val
)
3716 /* Tree EXP must have an integral type. */
3717 t
= TREE_TYPE (exp
);
3718 if (! INTEGRAL_TYPE_P (t
))
3721 /* Tree VAL must be an integer constant. */
3722 if (TREE_CODE (val
) != INTEGER_CST
3723 || TREE_OVERFLOW (val
))
3726 width
= TYPE_PRECISION (t
);
3727 if (wi::only_sign_bit_p (val
, width
))
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp
) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3733 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3738 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3742 simple_operand_p (const_tree exp
)
3744 /* Strip any conversions that don't change the machine mode. */
3747 return (CONSTANT_CLASS_P (exp
)
3748 || TREE_CODE (exp
) == SSA_NAME
3750 && ! TREE_ADDRESSABLE (exp
)
3751 && ! TREE_THIS_VOLATILE (exp
)
3752 && ! DECL_NONLOCAL (exp
)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp
)
3757 && ! DECL_EXTERNAL (exp
)
3758 /* Weakrefs are not safe to be read, since they can be NULL.
3759 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3760 have DECL_WEAK flag set. */
3761 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3762 /* Loading a static variable is unduly expensive, but global
3763 registers aren't expensive. */
3764 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3767 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3768 to be evaluated unconditionally.
3769 I addition to simple_operand_p, we assume that comparisons, conversions,
3770 and logic-not operations are simple, if their operands are simple, too. */
3773 simple_operand_p_2 (tree exp
)
3775 enum tree_code code
;
3777 if (TREE_SIDE_EFFECTS (exp
)
3778 || tree_could_trap_p (exp
))
3781 while (CONVERT_EXPR_P (exp
))
3782 exp
= TREE_OPERAND (exp
, 0);
3784 code
= TREE_CODE (exp
);
3786 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3787 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3788 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3790 if (code
== TRUTH_NOT_EXPR
)
3791 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3793 return simple_operand_p (exp
);
3797 /* The following functions are subroutines to fold_range_test and allow it to
3798 try to change a logical combination of comparisons into a range test.
3801 X == 2 || X == 3 || X == 4 || X == 5
3805 (unsigned) (X - 2) <= 3
3807 We describe each set of comparisons as being either inside or outside
3808 a range, using a variable named like IN_P, and then describe the
3809 range with a lower and upper bound. If one of the bounds is omitted,
3810 it represents either the highest or lowest value of the type.
3812 In the comments below, we represent a range by two numbers in brackets
3813 preceded by a "+" to designate being inside that range, or a "-" to
3814 designate being outside that range, so the condition can be inverted by
3815 flipping the prefix. An omitted bound is represented by a "-". For
3816 example, "- [-, 10]" means being outside the range starting at the lowest
3817 possible value and ending at 10, in other words, being greater than 10.
3818 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3821 We set up things so that the missing bounds are handled in a consistent
3822 manner so neither a missing bound nor "true" and "false" need to be
3823 handled using a special case. */
3825 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3826 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3827 and UPPER1_P are nonzero if the respective argument is an upper bound
3828 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3829 must be specified for a comparison. ARG1 will be converted to ARG0's
3830 type if both are specified. */
3833 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3834 tree arg1
, int upper1_p
)
3840 /* If neither arg represents infinity, do the normal operation.
3841 Else, if not a comparison, return infinity. Else handle the special
3842 comparison rules. Note that most of the cases below won't occur, but
3843 are handled for consistency. */
3845 if (arg0
!= 0 && arg1
!= 0)
3847 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3848 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3850 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3853 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3856 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3857 for neither. In real maths, we cannot assume open ended ranges are
3858 the same. But, this is computer arithmetic, where numbers are finite.
3859 We can therefore make the transformation of any unbounded range with
3860 the value Z, Z being greater than any representable number. This permits
3861 us to treat unbounded ranges as equal. */
3862 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3863 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3867 result
= sgn0
== sgn1
;
3870 result
= sgn0
!= sgn1
;
3873 result
= sgn0
< sgn1
;
3876 result
= sgn0
<= sgn1
;
3879 result
= sgn0
> sgn1
;
3882 result
= sgn0
>= sgn1
;
3888 return constant_boolean_node (result
, type
);
3891 /* Helper routine for make_range. Perform one step for it, return
3892 new expression if the loop should continue or NULL_TREE if it should
3896 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3897 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3898 bool *strict_overflow_p
)
3900 tree arg0_type
= TREE_TYPE (arg0
);
3901 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3902 int in_p
= *p_in_p
, n_in_p
;
3906 case TRUTH_NOT_EXPR
:
3907 /* We can only do something if the range is testing for zero. */
3908 if (low
== NULL_TREE
|| high
== NULL_TREE
3909 || ! integer_zerop (low
) || ! integer_zerop (high
))
3914 case EQ_EXPR
: case NE_EXPR
:
3915 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low
== NULL_TREE
|| high
== NULL_TREE
3922 || ! integer_zerop (low
) || ! integer_zerop (high
)
3923 || TREE_CODE (arg1
) != INTEGER_CST
)
3928 case NE_EXPR
: /* - [c, c] */
3931 case EQ_EXPR
: /* + [c, c] */
3932 in_p
= ! in_p
, low
= high
= arg1
;
3934 case GT_EXPR
: /* - [-, c] */
3935 low
= 0, high
= arg1
;
3937 case GE_EXPR
: /* + [c, -] */
3938 in_p
= ! in_p
, low
= arg1
, high
= 0;
3940 case LT_EXPR
: /* - [c, -] */
3941 low
= arg1
, high
= 0;
3943 case LE_EXPR
: /* + [-, c] */
3944 in_p
= ! in_p
, low
= 0, high
= arg1
;
3950 /* If this is an unsigned comparison, we also know that EXP is
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3957 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3959 build_int_cst (arg0_type
, 0),
3963 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3965 /* If the high bound is missing, but we have a nonzero low
3966 bound, reverse the range so it goes from zero to the low bound
3968 if (high
== 0 && low
&& ! integer_zerop (low
))
3971 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3972 build_int_cst (TREE_TYPE (low
), 1), 0);
3973 low
= build_int_cst (arg0_type
, 0);
3983 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3984 low and high are non-NULL, then normalize will DTRT. */
3985 if (!TYPE_UNSIGNED (arg0_type
)
3986 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3988 if (low
== NULL_TREE
)
3989 low
= TYPE_MIN_VALUE (arg0_type
);
3990 if (high
== NULL_TREE
)
3991 high
= TYPE_MAX_VALUE (arg0_type
);
3994 /* (-x) IN [a,b] -> x in [-b, -a] */
3995 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3996 build_int_cst (exp_type
, 0),
3998 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3999 build_int_cst (exp_type
, 0),
4001 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4007 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4008 build_int_cst (exp_type
, 1));
4012 if (TREE_CODE (arg1
) != INTEGER_CST
)
4015 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4016 move a constant to the other side. */
4017 if (!TYPE_UNSIGNED (arg0_type
)
4018 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4021 /* If EXP is signed, any overflow in the computation is undefined,
4022 so we don't worry about it so long as our computations on
4023 the bounds don't overflow. For unsigned, overflow is defined
4024 and this is exactly the right thing. */
4025 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4026 arg0_type
, low
, 0, arg1
, 0);
4027 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4028 arg0_type
, high
, 1, arg1
, 0);
4029 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4030 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4033 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4034 *strict_overflow_p
= true;
4037 /* Check for an unsigned range which has wrapped around the maximum
4038 value thus making n_high < n_low, and normalize it. */
4039 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4041 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4042 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4043 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4044 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4046 /* If the range is of the form +/- [ x+1, x ], we won't
4047 be able to normalize it. But then, it represents the
4048 whole range or the empty set, so make it
4050 if (tree_int_cst_equal (n_low
, low
)
4051 && tree_int_cst_equal (n_high
, high
))
4057 low
= n_low
, high
= n_high
;
4065 case NON_LVALUE_EXPR
:
4066 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4069 if (! INTEGRAL_TYPE_P (arg0_type
)
4070 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4071 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4074 n_low
= low
, n_high
= high
;
4077 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4080 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4082 /* If we're converting arg0 from an unsigned type, to exp,
4083 a signed type, we will be doing the comparison as unsigned.
4084 The tests above have already verified that LOW and HIGH
4087 So we have to ensure that we will handle large unsigned
4088 values the same way that the current signed bounds treat
4091 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4095 /* For fixed-point modes, we need to pass the saturating flag
4096 as the 2nd parameter. */
4097 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4099 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4100 TYPE_SATURATING (arg0_type
));
4103 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4105 /* A range without an upper bound is, naturally, unbounded.
4106 Since convert would have cropped a very large value, use
4107 the max value for the destination type. */
4109 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4110 : TYPE_MAX_VALUE (arg0_type
);
4112 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4113 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4114 fold_convert_loc (loc
, arg0_type
,
4116 build_int_cst (arg0_type
, 1));
4118 /* If the low bound is specified, "and" the range with the
4119 range for which the original unsigned value will be
4123 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4124 1, fold_convert_loc (loc
, arg0_type
,
4129 in_p
= (n_in_p
== in_p
);
4133 /* Otherwise, "or" the range with the range of the input
4134 that will be interpreted as negative. */
4135 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4136 1, fold_convert_loc (loc
, arg0_type
,
4141 in_p
= (in_p
!= n_in_p
);
4155 /* Given EXP, a logical expression, set the range it is testing into
4156 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4157 actually being tested. *PLOW and *PHIGH will be made of the same
4158 type as the returned expression. If EXP is not a comparison, we
4159 will most likely not be returning a useful value and range. Set
4160 *STRICT_OVERFLOW_P to true if the return value is only valid
4161 because signed overflow is undefined; otherwise, do not change
4162 *STRICT_OVERFLOW_P. */
4165 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4166 bool *strict_overflow_p
)
4168 enum tree_code code
;
4169 tree arg0
, arg1
= NULL_TREE
;
4170 tree exp_type
, nexp
;
4173 location_t loc
= EXPR_LOCATION (exp
);
4175 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4176 and see if we can refine the range. Some of the cases below may not
4177 happen, but it doesn't seem worth worrying about this. We "continue"
4178 the outer loop when we've changed something; otherwise we "break"
4179 the switch, which will "break" the while. */
4182 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4186 code
= TREE_CODE (exp
);
4187 exp_type
= TREE_TYPE (exp
);
4190 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4192 if (TREE_OPERAND_LENGTH (exp
) > 0)
4193 arg0
= TREE_OPERAND (exp
, 0);
4194 if (TREE_CODE_CLASS (code
) == tcc_binary
4195 || TREE_CODE_CLASS (code
) == tcc_comparison
4196 || (TREE_CODE_CLASS (code
) == tcc_expression
4197 && TREE_OPERAND_LENGTH (exp
) > 1))
4198 arg1
= TREE_OPERAND (exp
, 1);
4200 if (arg0
== NULL_TREE
)
4203 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4204 &high
, &in_p
, strict_overflow_p
);
4205 if (nexp
== NULL_TREE
)
4210 /* If EXP is a constant, we can evaluate whether this is true or false. */
4211 if (TREE_CODE (exp
) == INTEGER_CST
)
4213 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4215 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4221 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4225 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4226 type, TYPE, return an expression to test if EXP is in (or out of, depending
4227 on IN_P) the range. Return 0 if the test couldn't be created. */
4230 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4231 tree low
, tree high
)
4233 tree etype
= TREE_TYPE (exp
), value
;
4235 #ifdef HAVE_canonicalize_funcptr_for_compare
4236 /* Disable this optimization for function pointer expressions
4237 on targets that require function pointer canonicalization. */
4238 if (HAVE_canonicalize_funcptr_for_compare
4239 && TREE_CODE (etype
) == POINTER_TYPE
4240 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4246 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4248 return invert_truthvalue_loc (loc
, value
);
4253 if (low
== 0 && high
== 0)
4254 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4257 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4258 fold_convert_loc (loc
, etype
, high
));
4261 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4262 fold_convert_loc (loc
, etype
, low
));
4264 if (operand_equal_p (low
, high
, 0))
4265 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4266 fold_convert_loc (loc
, etype
, low
));
4268 if (integer_zerop (low
))
4270 if (! TYPE_UNSIGNED (etype
))
4272 etype
= unsigned_type_for (etype
);
4273 high
= fold_convert_loc (loc
, etype
, high
);
4274 exp
= fold_convert_loc (loc
, etype
, exp
);
4276 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4279 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4280 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4282 int prec
= TYPE_PRECISION (etype
);
4284 if (wi::mask (prec
- 1, false, prec
) == high
)
4286 if (TYPE_UNSIGNED (etype
))
4288 tree signed_etype
= signed_type_for (etype
);
4289 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4291 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4293 etype
= signed_etype
;
4294 exp
= fold_convert_loc (loc
, etype
, exp
);
4296 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4297 build_int_cst (etype
, 0));
4301 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4302 This requires wrap-around arithmetics for the type of the expression.
4303 First make sure that arithmetics in this type is valid, then make sure
4304 that it wraps around. */
4305 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4306 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4307 TYPE_UNSIGNED (etype
));
4309 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4311 tree utype
, minv
, maxv
;
4313 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4314 for the type in question, as we rely on this here. */
4315 utype
= unsigned_type_for (etype
);
4316 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4317 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4318 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4319 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4321 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4328 high
= fold_convert_loc (loc
, etype
, high
);
4329 low
= fold_convert_loc (loc
, etype
, low
);
4330 exp
= fold_convert_loc (loc
, etype
, exp
);
4332 value
= const_binop (MINUS_EXPR
, high
, low
);
4335 if (POINTER_TYPE_P (etype
))
4337 if (value
!= 0 && !TREE_OVERFLOW (value
))
4339 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4340 return build_range_check (loc
, type
,
4341 fold_build_pointer_plus_loc (loc
, exp
, low
),
4342 1, build_int_cst (etype
, 0), value
);
4347 if (value
!= 0 && !TREE_OVERFLOW (value
))
4348 return build_range_check (loc
, type
,
4349 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4350 1, build_int_cst (etype
, 0), value
);
4355 /* Return the predecessor of VAL in its type, handling the infinite case. */
4358 range_predecessor (tree val
)
4360 tree type
= TREE_TYPE (val
);
4362 if (INTEGRAL_TYPE_P (type
)
4363 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4366 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4367 build_int_cst (TREE_TYPE (val
), 1), 0);
4370 /* Return the successor of VAL in its type, handling the infinite case. */
4373 range_successor (tree val
)
4375 tree type
= TREE_TYPE (val
);
4377 if (INTEGRAL_TYPE_P (type
)
4378 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4381 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4382 build_int_cst (TREE_TYPE (val
), 1), 0);
4385 /* Given two ranges, see if we can merge them into one. Return 1 if we
4386 can, 0 if we can't. Set the output range into the specified parameters. */
4389 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4390 tree high0
, int in1_p
, tree low1
, tree high1
)
4398 int lowequal
= ((low0
== 0 && low1
== 0)
4399 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4400 low0
, 0, low1
, 0)));
4401 int highequal
= ((high0
== 0 && high1
== 0)
4402 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4403 high0
, 1, high1
, 1)));
4405 /* Make range 0 be the range that starts first, or ends last if they
4406 start at the same value. Swap them if it isn't. */
4407 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4410 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4411 high1
, 1, high0
, 1))))
4413 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4414 tem
= low0
, low0
= low1
, low1
= tem
;
4415 tem
= high0
, high0
= high1
, high1
= tem
;
4418 /* Now flag two cases, whether the ranges are disjoint or whether the
4419 second range is totally subsumed in the first. Note that the tests
4420 below are simplified by the ones above. */
4421 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4422 high0
, 1, low1
, 0));
4423 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4424 high1
, 1, high0
, 1));
4426 /* We now have four cases, depending on whether we are including or
4427 excluding the two ranges. */
4430 /* If they don't overlap, the result is false. If the second range
4431 is a subset it is the result. Otherwise, the range is from the start
4432 of the second to the end of the first. */
4434 in_p
= 0, low
= high
= 0;
4436 in_p
= 1, low
= low1
, high
= high1
;
4438 in_p
= 1, low
= low1
, high
= high0
;
4441 else if (in0_p
&& ! in1_p
)
4443 /* If they don't overlap, the result is the first range. If they are
4444 equal, the result is false. If the second range is a subset of the
4445 first, and the ranges begin at the same place, we go from just after
4446 the end of the second range to the end of the first. If the second
4447 range is not a subset of the first, or if it is a subset and both
4448 ranges end at the same place, the range starts at the start of the
4449 first range and ends just before the second range.
4450 Otherwise, we can't describe this as a single range. */
4452 in_p
= 1, low
= low0
, high
= high0
;
4453 else if (lowequal
&& highequal
)
4454 in_p
= 0, low
= high
= 0;
4455 else if (subset
&& lowequal
)
4457 low
= range_successor (high1
);
4462 /* We are in the weird situation where high0 > high1 but
4463 high1 has no successor. Punt. */
4467 else if (! subset
|| highequal
)
4470 high
= range_predecessor (low1
);
4474 /* low0 < low1 but low1 has no predecessor. Punt. */
4482 else if (! in0_p
&& in1_p
)
4484 /* If they don't overlap, the result is the second range. If the second
4485 is a subset of the first, the result is false. Otherwise,
4486 the range starts just after the first range and ends at the
4487 end of the second. */
4489 in_p
= 1, low
= low1
, high
= high1
;
4490 else if (subset
|| highequal
)
4491 in_p
= 0, low
= high
= 0;
4494 low
= range_successor (high0
);
4499 /* high1 > high0 but high0 has no successor. Punt. */
4507 /* The case where we are excluding both ranges. Here the complex case
4508 is if they don't overlap. In that case, the only time we have a
4509 range is if they are adjacent. If the second is a subset of the
4510 first, the result is the first. Otherwise, the range to exclude
4511 starts at the beginning of the first range and ends at the end of the
4515 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4516 range_successor (high0
),
4518 in_p
= 0, low
= low0
, high
= high1
;
4521 /* Canonicalize - [min, x] into - [-, x]. */
4522 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4523 switch (TREE_CODE (TREE_TYPE (low0
)))
4526 if (TYPE_PRECISION (TREE_TYPE (low0
))
4527 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4531 if (tree_int_cst_equal (low0
,
4532 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4536 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4537 && integer_zerop (low0
))
4544 /* Canonicalize - [x, max] into - [x, -]. */
4545 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4546 switch (TREE_CODE (TREE_TYPE (high1
)))
4549 if (TYPE_PRECISION (TREE_TYPE (high1
))
4550 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4554 if (tree_int_cst_equal (high1
,
4555 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4559 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4560 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4562 build_int_cst (TREE_TYPE (high1
), 1),
4570 /* The ranges might be also adjacent between the maximum and
4571 minimum values of the given type. For
4572 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4573 return + [x + 1, y - 1]. */
4574 if (low0
== 0 && high1
== 0)
4576 low
= range_successor (high0
);
4577 high
= range_predecessor (low1
);
4578 if (low
== 0 || high
== 0)
4588 in_p
= 0, low
= low0
, high
= high0
;
4590 in_p
= 0, low
= low0
, high
= high1
;
4593 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4598 /* Subroutine of fold, looking inside expressions of the form
4599 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4600 of the COND_EXPR. This function is being used also to optimize
4601 A op B ? C : A, by reversing the comparison first.
4603 Return a folded expression whose code is not a COND_EXPR
4604 anymore, or NULL_TREE if no folding opportunity is found. */
4607 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4608 tree arg0
, tree arg1
, tree arg2
)
4610 enum tree_code comp_code
= TREE_CODE (arg0
);
4611 tree arg00
= TREE_OPERAND (arg0
, 0);
4612 tree arg01
= TREE_OPERAND (arg0
, 1);
4613 tree arg1_type
= TREE_TYPE (arg1
);
4619 /* If we have A op 0 ? A : -A, consider applying the following
4622 A == 0? A : -A same as -A
4623 A != 0? A : -A same as A
4624 A >= 0? A : -A same as abs (A)
4625 A > 0? A : -A same as abs (A)
4626 A <= 0? A : -A same as -abs (A)
4627 A < 0? A : -A same as -abs (A)
4629 None of these transformations work for modes with signed
4630 zeros. If A is +/-0, the first two transformations will
4631 change the sign of the result (from +0 to -0, or vice
4632 versa). The last four will fix the sign of the result,
4633 even though the original expressions could be positive or
4634 negative, depending on the sign of A.
4636 Note that all these transformations are correct if A is
4637 NaN, since the two alternatives (A and -A) are also NaNs. */
4638 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4639 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4640 ? real_zerop (arg01
)
4641 : integer_zerop (arg01
))
4642 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4643 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4644 /* In the case that A is of the form X-Y, '-A' (arg2) may
4645 have already been folded to Y-X, check for that. */
4646 || (TREE_CODE (arg1
) == MINUS_EXPR
4647 && TREE_CODE (arg2
) == MINUS_EXPR
4648 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4649 TREE_OPERAND (arg2
, 1), 0)
4650 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4651 TREE_OPERAND (arg2
, 0), 0))))
4656 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4657 return pedantic_non_lvalue_loc (loc
,
4658 fold_convert_loc (loc
, type
,
4659 negate_expr (tem
)));
4662 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4665 if (flag_trapping_math
)
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4671 arg1
= fold_convert_loc (loc
, signed_type_for
4672 (TREE_TYPE (arg1
)), arg1
);
4673 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4674 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4677 if (flag_trapping_math
)
4681 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4682 arg1
= fold_convert_loc (loc
, signed_type_for
4683 (TREE_TYPE (arg1
)), arg1
);
4684 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4685 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4687 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4691 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4692 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4693 both transformations are correct when A is NaN: A != 0
4694 is then true, and A == 0 is false. */
4696 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4697 && integer_zerop (arg01
) && integer_zerop (arg2
))
4699 if (comp_code
== NE_EXPR
)
4700 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4701 else if (comp_code
== EQ_EXPR
)
4702 return build_zero_cst (type
);
4705 /* Try some transformations of A op B ? A : B.
4707 A == B? A : B same as B
4708 A != B? A : B same as A
4709 A >= B? A : B same as max (A, B)
4710 A > B? A : B same as max (B, A)
4711 A <= B? A : B same as min (A, B)
4712 A < B? A : B same as min (B, A)
4714 As above, these transformations don't work in the presence
4715 of signed zeros. For example, if A and B are zeros of
4716 opposite sign, the first two transformations will change
4717 the sign of the result. In the last four, the original
4718 expressions give different results for (A=+0, B=-0) and
4719 (A=-0, B=+0), but the transformed expressions do not.
4721 The first two transformations are correct if either A or B
4722 is a NaN. In the first transformation, the condition will
4723 be false, and B will indeed be chosen. In the case of the
4724 second transformation, the condition A != B will be true,
4725 and A will be chosen.
4727 The conversions to max() and min() are not correct if B is
4728 a number and A is not. The conditions in the original
4729 expressions will be false, so all four give B. The min()
4730 and max() versions would give a NaN instead. */
4731 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4732 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4733 /* Avoid these transformations if the COND_EXPR may be used
4734 as an lvalue in the C++ front-end. PR c++/19199. */
4736 || VECTOR_TYPE_P (type
)
4737 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4738 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4739 || ! maybe_lvalue_p (arg1
)
4740 || ! maybe_lvalue_p (arg2
)))
4742 tree comp_op0
= arg00
;
4743 tree comp_op1
= arg01
;
4744 tree comp_type
= TREE_TYPE (comp_op0
);
4746 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4747 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4757 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4759 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4764 /* In C++ a ?: expression can be an lvalue, so put the
4765 operand which will be used if they are equal first
4766 so that we can convert this back to the
4767 corresponding COND_EXPR. */
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4770 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4771 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4772 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4773 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4774 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4775 comp_op1
, comp_op0
);
4776 return pedantic_non_lvalue_loc (loc
,
4777 fold_convert_loc (loc
, type
, tem
));
4784 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4786 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4787 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4788 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4789 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4790 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4791 comp_op1
, comp_op0
);
4792 return pedantic_non_lvalue_loc (loc
,
4793 fold_convert_loc (loc
, type
, tem
));
4797 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4798 return pedantic_non_lvalue_loc (loc
,
4799 fold_convert_loc (loc
, type
, arg2
));
4802 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4803 return pedantic_non_lvalue_loc (loc
,
4804 fold_convert_loc (loc
, type
, arg1
));
4807 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4812 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4813 we might still be able to simplify this. For example,
4814 if C1 is one less or one more than C2, this might have started
4815 out as a MIN or MAX and been transformed by this function.
4816 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4818 if (INTEGRAL_TYPE_P (type
)
4819 && TREE_CODE (arg01
) == INTEGER_CST
4820 && TREE_CODE (arg2
) == INTEGER_CST
)
4824 if (TREE_CODE (arg1
) == INTEGER_CST
)
4826 /* We can replace A with C1 in this case. */
4827 arg1
= fold_convert_loc (loc
, type
, arg01
);
4828 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4831 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4832 MIN_EXPR, to preserve the signedness of the comparison. */
4833 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4835 && operand_equal_p (arg01
,
4836 const_binop (PLUS_EXPR
, arg2
,
4837 build_int_cst (type
, 1)),
4840 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4841 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4843 return pedantic_non_lvalue_loc (loc
,
4844 fold_convert_loc (loc
, type
, tem
));
4849 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4851 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4853 && operand_equal_p (arg01
,
4854 const_binop (MINUS_EXPR
, arg2
,
4855 build_int_cst (type
, 1)),
4858 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4859 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4861 return pedantic_non_lvalue_loc (loc
,
4862 fold_convert_loc (loc
, type
, tem
));
4867 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4868 MAX_EXPR, to preserve the signedness of the comparison. */
4869 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4871 && operand_equal_p (arg01
,
4872 const_binop (MINUS_EXPR
, arg2
,
4873 build_int_cst (type
, 1)),
4876 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4877 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4879 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4884 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4885 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4887 && operand_equal_p (arg01
,
4888 const_binop (PLUS_EXPR
, arg2
,
4889 build_int_cst (type
, 1)),
4892 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4893 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4895 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4909 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4910 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4911 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4915 /* EXP is some logical combination of boolean tests. See if we can
4916 merge it into some range test. Return the new tree if so. */
4919 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4922 int or_op
= (code
== TRUTH_ORIF_EXPR
4923 || code
== TRUTH_OR_EXPR
);
4924 int in0_p
, in1_p
, in_p
;
4925 tree low0
, low1
, low
, high0
, high1
, high
;
4926 bool strict_overflow_p
= false;
4928 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4929 "when simplifying range test");
4931 if (!INTEGRAL_TYPE_P (type
))
4934 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4935 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4937 /* If this is an OR operation, invert both sides; we will invert
4938 again at the end. */
4940 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4942 /* If both expressions are the same, if we can merge the ranges, and we
4943 can build the range test, return it or it inverted. If one of the
4944 ranges is always true or always false, consider it to be the same
4945 expression as the other. */
4946 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4947 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4949 && 0 != (tem
= (build_range_check (loc
, type
,
4951 : rhs
!= 0 ? rhs
: integer_zero_node
,
4954 if (strict_overflow_p
)
4955 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4956 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4959 /* On machines where the branch cost is expensive, if this is a
4960 short-circuited branch and the underlying object on both sides
4961 is the same, make a non-short-circuit operation. */
4962 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4963 && lhs
!= 0 && rhs
!= 0
4964 && (code
== TRUTH_ANDIF_EXPR
4965 || code
== TRUTH_ORIF_EXPR
)
4966 && operand_equal_p (lhs
, rhs
, 0))
4968 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4969 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4970 which cases we can't do this. */
4971 if (simple_operand_p (lhs
))
4972 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4973 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4976 else if (!lang_hooks
.decls
.global_bindings_p ()
4977 && !CONTAINS_PLACEHOLDER_P (lhs
))
4979 tree common
= save_expr (lhs
);
4981 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4982 or_op
? ! in0_p
: in0_p
,
4984 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4985 or_op
? ! in1_p
: in1_p
,
4988 if (strict_overflow_p
)
4989 fold_overflow_warning (warnmsg
,
4990 WARN_STRICT_OVERFLOW_COMPARISON
);
4991 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4992 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5001 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5002 bit value. Arrange things so the extra bits will be set to zero if and
5003 only if C is signed-extended to its full width. If MASK is nonzero,
5004 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5007 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5009 tree type
= TREE_TYPE (c
);
5010 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5013 if (p
== modesize
|| unsignedp
)
5016 /* We work by getting just the sign bit into the low-order bit, then
5017 into the high-order bit, then sign-extend. We then XOR that value
5019 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5021 /* We must use a signed type in order to get an arithmetic right shift.
5022 However, we must also avoid introducing accidental overflows, so that
5023 a subsequent call to integer_zerop will work. Hence we must
5024 do the type conversion here. At this point, the constant is either
5025 zero or one, and the conversion to a signed type can never overflow.
5026 We could get an overflow if this conversion is done anywhere else. */
5027 if (TYPE_UNSIGNED (type
))
5028 temp
= fold_convert (signed_type_for (type
), temp
);
5030 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5031 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5033 temp
= const_binop (BIT_AND_EXPR
, temp
,
5034 fold_convert (TREE_TYPE (c
), mask
));
5035 /* If necessary, convert the type back to match the type of C. */
5036 if (TYPE_UNSIGNED (type
))
5037 temp
= fold_convert (type
, temp
);
5039 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5042 /* For an expression that has the form
5046 we can drop one of the inner expressions and simplify to
5050 LOC is the location of the resulting expression. OP is the inner
5051 logical operation; the left-hand side in the examples above, while CMPOP
5052 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5053 removing a condition that guards another, as in
5054 (A != NULL && A->...) || A == NULL
5055 which we must not transform. If RHS_ONLY is true, only eliminate the
5056 right-most operand of the inner logical operation. */
5059 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5062 tree type
= TREE_TYPE (cmpop
);
5063 enum tree_code code
= TREE_CODE (cmpop
);
5064 enum tree_code truthop_code
= TREE_CODE (op
);
5065 tree lhs
= TREE_OPERAND (op
, 0);
5066 tree rhs
= TREE_OPERAND (op
, 1);
5067 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5068 enum tree_code rhs_code
= TREE_CODE (rhs
);
5069 enum tree_code lhs_code
= TREE_CODE (lhs
);
5070 enum tree_code inv_code
;
5072 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5075 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5078 if (rhs_code
== truthop_code
)
5080 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5081 if (newrhs
!= NULL_TREE
)
5084 rhs_code
= TREE_CODE (rhs
);
5087 if (lhs_code
== truthop_code
&& !rhs_only
)
5089 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5090 if (newlhs
!= NULL_TREE
)
5093 lhs_code
= TREE_CODE (lhs
);
5097 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5098 if (inv_code
== rhs_code
5099 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5100 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5102 if (!rhs_only
&& inv_code
== lhs_code
5103 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5106 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5107 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5112 /* Find ways of folding logical expressions of LHS and RHS:
5113 Try to merge two comparisons to the same innermost item.
5114 Look for range tests like "ch >= '0' && ch <= '9'".
5115 Look for combinations of simple terms on machines with expensive branches
5116 and evaluate the RHS unconditionally.
5118 For example, if we have p->a == 2 && p->b == 4 and we can make an
5119 object large enough to span both A and B, we can do this with a comparison
5120 against the object ANDed with the a mask.
5122 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5123 operations to do this with one comparison.
5125 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5126 function and the one above.
5128 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5129 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5131 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5134 We return the simplified tree or 0 if no optimization is possible. */
5137 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5140 /* If this is the "or" of two comparisons, we can do something if
5141 the comparisons are NE_EXPR. If this is the "and", we can do something
5142 if the comparisons are EQ_EXPR. I.e.,
5143 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5145 WANTED_CODE is this operation code. For single bit fields, we can
5146 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5147 comparison for one-bit fields. */
5149 enum tree_code wanted_code
;
5150 enum tree_code lcode
, rcode
;
5151 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5152 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5153 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5154 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5155 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5156 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5157 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5158 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5159 machine_mode lnmode
, rnmode
;
5160 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5161 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5162 tree l_const
, r_const
;
5163 tree lntype
, rntype
, result
;
5164 HOST_WIDE_INT first_bit
, end_bit
;
5167 /* Start by getting the comparison codes. Fail if anything is volatile.
5168 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5169 it were surrounded with a NE_EXPR. */
5171 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5174 lcode
= TREE_CODE (lhs
);
5175 rcode
= TREE_CODE (rhs
);
5177 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5179 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5180 build_int_cst (TREE_TYPE (lhs
), 0));
5184 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5186 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5187 build_int_cst (TREE_TYPE (rhs
), 0));
5191 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5192 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5195 ll_arg
= TREE_OPERAND (lhs
, 0);
5196 lr_arg
= TREE_OPERAND (lhs
, 1);
5197 rl_arg
= TREE_OPERAND (rhs
, 0);
5198 rr_arg
= TREE_OPERAND (rhs
, 1);
5200 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5201 if (simple_operand_p (ll_arg
)
5202 && simple_operand_p (lr_arg
))
5204 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5205 && operand_equal_p (lr_arg
, rr_arg
, 0))
5207 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5208 truth_type
, ll_arg
, lr_arg
);
5212 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5213 && operand_equal_p (lr_arg
, rl_arg
, 0))
5215 result
= combine_comparisons (loc
, code
, lcode
,
5216 swap_tree_comparison (rcode
),
5217 truth_type
, ll_arg
, lr_arg
);
5223 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5224 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5226 /* If the RHS can be evaluated unconditionally and its operands are
5227 simple, it wins to evaluate the RHS unconditionally on machines
5228 with expensive branches. In this case, this isn't a comparison
5229 that can be merged. */
5231 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5233 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5234 && simple_operand_p (rl_arg
)
5235 && simple_operand_p (rr_arg
))
5237 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5238 if (code
== TRUTH_OR_EXPR
5239 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5240 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5241 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5243 return build2_loc (loc
, NE_EXPR
, truth_type
,
5244 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5246 build_int_cst (TREE_TYPE (ll_arg
), 0));
5248 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5249 if (code
== TRUTH_AND_EXPR
5250 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5251 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5252 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5253 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5254 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5255 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5257 build_int_cst (TREE_TYPE (ll_arg
), 0));
5260 /* See if the comparisons can be merged. Then get all the parameters for
5263 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5264 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5268 ll_inner
= decode_field_reference (loc
, ll_arg
,
5269 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5270 &ll_unsignedp
, &volatilep
, &ll_mask
,
5272 lr_inner
= decode_field_reference (loc
, lr_arg
,
5273 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5274 &lr_unsignedp
, &volatilep
, &lr_mask
,
5276 rl_inner
= decode_field_reference (loc
, rl_arg
,
5277 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5278 &rl_unsignedp
, &volatilep
, &rl_mask
,
5280 rr_inner
= decode_field_reference (loc
, rr_arg
,
5281 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5282 &rr_unsignedp
, &volatilep
, &rr_mask
,
5285 /* It must be true that the inner operation on the lhs of each
5286 comparison must be the same if we are to be able to do anything.
5287 Then see if we have constants. If not, the same must be true for
5289 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5290 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5293 if (TREE_CODE (lr_arg
) == INTEGER_CST
5294 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5295 l_const
= lr_arg
, r_const
= rr_arg
;
5296 else if (lr_inner
== 0 || rr_inner
== 0
5297 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5300 l_const
= r_const
= 0;
5302 /* If either comparison code is not correct for our logical operation,
5303 fail. However, we can convert a one-bit comparison against zero into
5304 the opposite comparison against that bit being set in the field. */
5306 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5307 if (lcode
!= wanted_code
)
5309 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5311 /* Make the left operand unsigned, since we are only interested
5312 in the value of one bit. Otherwise we are doing the wrong
5321 /* This is analogous to the code for l_const above. */
5322 if (rcode
!= wanted_code
)
5324 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5333 /* See if we can find a mode that contains both fields being compared on
5334 the left. If we can't, fail. Otherwise, update all constants and masks
5335 to be relative to a field of that size. */
5336 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5337 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5338 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5339 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5341 if (lnmode
== VOIDmode
)
5344 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5345 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5346 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5347 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5349 if (BYTES_BIG_ENDIAN
)
5351 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5352 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5355 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5356 size_int (xll_bitpos
));
5357 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5358 size_int (xrl_bitpos
));
5362 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5363 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5364 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5365 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5366 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5369 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5371 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5376 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5377 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5378 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5379 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5380 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5383 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5385 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5389 /* If the right sides are not constant, do the same for it. Also,
5390 disallow this optimization if a size or signedness mismatch occurs
5391 between the left and right sides. */
5394 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5395 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5396 /* Make sure the two fields on the right
5397 correspond to the left without being swapped. */
5398 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5401 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5402 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5403 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5404 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5406 if (rnmode
== VOIDmode
)
5409 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5410 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5411 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5412 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5414 if (BYTES_BIG_ENDIAN
)
5416 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5417 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5420 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5422 size_int (xlr_bitpos
));
5423 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5425 size_int (xrr_bitpos
));
5427 /* Make a mask that corresponds to both fields being compared.
5428 Do this for both items being compared. If the operands are the
5429 same size and the bits being compared are in the same position
5430 then we can do this by masking both and comparing the masked
5432 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5433 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5434 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5436 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5437 ll_unsignedp
|| rl_unsignedp
);
5438 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5439 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5441 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5442 lr_unsignedp
|| rr_unsignedp
);
5443 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5444 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5446 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5449 /* There is still another way we can do something: If both pairs of
5450 fields being compared are adjacent, we may be able to make a wider
5451 field containing them both.
5453 Note that we still must mask the lhs/rhs expressions. Furthermore,
5454 the mask must be shifted to account for the shift done by
5455 make_bit_field_ref. */
5456 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5457 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5458 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5459 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5463 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5464 ll_bitsize
+ rl_bitsize
,
5465 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5466 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5467 lr_bitsize
+ rr_bitsize
,
5468 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5470 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5471 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5472 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5473 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5475 /* Convert to the smaller type before masking out unwanted bits. */
5477 if (lntype
!= rntype
)
5479 if (lnbitsize
> rnbitsize
)
5481 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5482 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5485 else if (lnbitsize
< rnbitsize
)
5487 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5488 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5493 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5494 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5496 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5497 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5499 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5505 /* Handle the case of comparisons with constants. If there is something in
5506 common between the masks, those bits of the constants must be the same.
5507 If not, the condition is always false. Test for this to avoid generating
5508 incorrect code below. */
5509 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5510 if (! integer_zerop (result
)
5511 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5512 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5514 if (wanted_code
== NE_EXPR
)
5516 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5517 return constant_boolean_node (true, truth_type
);
5521 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5522 return constant_boolean_node (false, truth_type
);
5526 /* Construct the expression we will return. First get the component
5527 reference we will make. Unless the mask is all ones the width of
5528 that field, perform the mask operation. Then compare with the
5530 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5531 ll_unsignedp
|| rl_unsignedp
);
5533 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5534 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5535 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5537 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5538 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5545 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5549 enum tree_code op_code
;
5552 int consts_equal
, consts_lt
;
5555 STRIP_SIGN_NOPS (arg0
);
5557 op_code
= TREE_CODE (arg0
);
5558 minmax_const
= TREE_OPERAND (arg0
, 1);
5559 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5560 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5561 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5562 inner
= TREE_OPERAND (arg0
, 0);
5564 /* If something does not permit us to optimize, return the original tree. */
5565 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5566 || TREE_CODE (comp_const
) != INTEGER_CST
5567 || TREE_OVERFLOW (comp_const
)
5568 || TREE_CODE (minmax_const
) != INTEGER_CST
5569 || TREE_OVERFLOW (minmax_const
))
5572 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5573 and GT_EXPR, doing the rest with recursive calls using logical
5577 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5580 = optimize_minmax_comparison (loc
,
5581 invert_tree_comparison (code
, false),
5584 return invert_truthvalue_loc (loc
, tem
);
5590 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5591 optimize_minmax_comparison
5592 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5593 optimize_minmax_comparison
5594 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5597 if (op_code
== MAX_EXPR
&& consts_equal
)
5598 /* MAX (X, 0) == 0 -> X <= 0 */
5599 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5601 else if (op_code
== MAX_EXPR
&& consts_lt
)
5602 /* MAX (X, 0) == 5 -> X == 5 */
5603 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5605 else if (op_code
== MAX_EXPR
)
5606 /* MAX (X, 0) == -1 -> false */
5607 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5609 else if (consts_equal
)
5610 /* MIN (X, 0) == 0 -> X >= 0 */
5611 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5614 /* MIN (X, 0) == 5 -> false */
5615 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5618 /* MIN (X, 0) == -1 -> X == -1 */
5619 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5622 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5623 /* MAX (X, 0) > 0 -> X > 0
5624 MAX (X, 0) > 5 -> X > 5 */
5625 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5627 else if (op_code
== MAX_EXPR
)
5628 /* MAX (X, 0) > -1 -> true */
5629 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5631 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5632 /* MIN (X, 0) > 0 -> false
5633 MIN (X, 0) > 5 -> false */
5634 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5637 /* MIN (X, 0) > -1 -> X > -1 */
5638 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5645 /* T is an integer expression that is being multiplied, divided, or taken a
5646 modulus (CODE says which and what kind of divide or modulus) by a
5647 constant C. See if we can eliminate that operation by folding it with
5648 other operations already in T. WIDE_TYPE, if non-null, is a type that
5649 should be used for the computation if wider than our type.
5651 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5652 (X * 2) + (Y * 4). We must, however, be assured that either the original
5653 expression would not overflow or that overflow is undefined for the type
5654 in the language in question.
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5664 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5665 bool *strict_overflow_p
)
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5679 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5686 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5687 bool *strict_overflow_p
)
5689 tree type
= TREE_TYPE (t
);
5690 enum tree_code tcode
= TREE_CODE (t
);
5691 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5692 > GET_MODE_SIZE (TYPE_MODE (type
)))
5693 ? wide_type
: type
);
5695 int same_p
= tcode
== code
;
5696 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5697 bool sub_strict_overflow_p
;
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c
))
5703 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5704 op0
= TREE_OPERAND (t
, 0);
5706 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5707 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code
== MULT_EXPR
5717 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5718 return const_binop (code
, fold_convert (ctype
, t
),
5719 fold_convert (ctype
, c
));
5722 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0
)
5725 || UNARY_CLASS_P (op0
)
5726 || BINARY_CLASS_P (op0
)
5727 || VL_EXP_CLASS_P (op0
)
5728 || EXPRESSION_CLASS_P (op0
))
5729 /* ... and has wrapping overflow, and its type is smaller
5730 than ctype, then we cannot pass through as widening. */
5731 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5732 && (TYPE_PRECISION (ctype
)
5733 > TYPE_PRECISION (TREE_TYPE (op0
))))
5734 /* ... or this is a truncation (t is narrower than op0),
5735 then we cannot pass through this narrowing. */
5736 || (TYPE_PRECISION (type
)
5737 < TYPE_PRECISION (TREE_TYPE (op0
)))
5738 /* ... or signedness changes for division or modulus,
5739 then we cannot pass through this conversion. */
5740 || (code
!= MULT_EXPR
5741 && (TYPE_UNSIGNED (ctype
)
5742 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5743 /* ... or has undefined overflow while the converted to
5744 type has not, we cannot do the operation in the inner type
5745 as that would introduce undefined overflow. */
5746 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5747 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5750 /* Pass the constant down and see if we can make a simplification. If
5751 we can, replace this expression with the inner simplification for
5752 possible later conversion to our or some other type. */
5753 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5754 && TREE_CODE (t2
) == INTEGER_CST
5755 && !TREE_OVERFLOW (t2
)
5756 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5758 ? ctype
: NULL_TREE
,
5759 strict_overflow_p
))))
5764 /* If widening the type changes it from signed to unsigned, then we
5765 must avoid building ABS_EXPR itself as unsigned. */
5766 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5768 tree cstype
= (*signed_type_for
) (ctype
);
5769 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5772 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5773 return fold_convert (ctype
, t1
);
5777 /* If the constant is negative, we cannot simplify this. */
5778 if (tree_int_cst_sgn (c
) == -1)
5782 /* For division and modulus, type can't be unsigned, as e.g.
5783 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5784 For signed types, even with wrapping overflow, this is fine. */
5785 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5787 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5789 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5792 case MIN_EXPR
: case MAX_EXPR
:
5793 /* If widening the type changes the signedness, then we can't perform
5794 this optimization as that changes the result. */
5795 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5798 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5799 sub_strict_overflow_p
= false;
5800 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5801 &sub_strict_overflow_p
)) != 0
5802 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5803 &sub_strict_overflow_p
)) != 0)
5805 if (tree_int_cst_sgn (c
) < 0)
5806 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5807 if (sub_strict_overflow_p
)
5808 *strict_overflow_p
= true;
5809 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5810 fold_convert (ctype
, t2
));
5814 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5815 /* If the second operand is constant, this is a multiplication
5816 or floor division, by a power of two, so we can treat it that
5817 way unless the multiplier or divisor overflows. Signed
5818 left-shift overflow is implementation-defined rather than
5819 undefined in C90, so do not convert signed left shift into
5821 if (TREE_CODE (op1
) == INTEGER_CST
5822 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5823 /* const_binop may not detect overflow correctly,
5824 so check for it explicitly here. */
5825 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5826 && 0 != (t1
= fold_convert (ctype
,
5827 const_binop (LSHIFT_EXPR
,
5830 && !TREE_OVERFLOW (t1
))
5831 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5832 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5834 fold_convert (ctype
, op0
),
5836 c
, code
, wide_type
, strict_overflow_p
);
5839 case PLUS_EXPR
: case MINUS_EXPR
:
5840 /* See if we can eliminate the operation on both sides. If we can, we
5841 can return a new PLUS or MINUS. If we can't, the only remaining
5842 cases where we can do anything are if the second operand is a
5844 sub_strict_overflow_p
= false;
5845 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5846 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5847 if (t1
!= 0 && t2
!= 0
5848 && (code
== MULT_EXPR
5849 /* If not multiplication, we can only do this if both operands
5850 are divisible by c. */
5851 || (multiple_of_p (ctype
, op0
, c
)
5852 && multiple_of_p (ctype
, op1
, c
))))
5854 if (sub_strict_overflow_p
)
5855 *strict_overflow_p
= true;
5856 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5857 fold_convert (ctype
, t2
));
5860 /* If this was a subtraction, negate OP1 and set it to be an addition.
5861 This simplifies the logic below. */
5862 if (tcode
== MINUS_EXPR
)
5864 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5865 /* If OP1 was not easily negatable, the constant may be OP0. */
5866 if (TREE_CODE (op0
) == INTEGER_CST
)
5877 if (TREE_CODE (op1
) != INTEGER_CST
)
5880 /* If either OP1 or C are negative, this optimization is not safe for
5881 some of the division and remainder types while for others we need
5882 to change the code. */
5883 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5885 if (code
== CEIL_DIV_EXPR
)
5886 code
= FLOOR_DIV_EXPR
;
5887 else if (code
== FLOOR_DIV_EXPR
)
5888 code
= CEIL_DIV_EXPR
;
5889 else if (code
!= MULT_EXPR
5890 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5894 /* If it's a multiply or a division/modulus operation of a multiple
5895 of our constant, do the operation and verify it doesn't overflow. */
5896 if (code
== MULT_EXPR
5897 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5899 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5900 fold_convert (ctype
, c
));
5901 /* We allow the constant to overflow with wrapping semantics. */
5903 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5909 /* If we have an unsigned type, we cannot widen the operation since it
5910 will change the result if the original computation overflowed. */
5911 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5914 /* If we were able to eliminate our operation from the first side,
5915 apply our operation to the second side and reform the PLUS. */
5916 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5917 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5919 /* The last case is if we are a multiply. In that case, we can
5920 apply the distributive law to commute the multiply and addition
5921 if the multiplication of the constants doesn't overflow
5922 and overflow is defined. With undefined overflow
5923 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5924 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5925 return fold_build2 (tcode
, ctype
,
5926 fold_build2 (code
, ctype
,
5927 fold_convert (ctype
, op0
),
5928 fold_convert (ctype
, c
)),
5934 /* We have a special case here if we are doing something like
5935 (C * 8) % 4 since we know that's zero. */
5936 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5937 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5938 /* If the multiplication can overflow we cannot optimize this. */
5939 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5940 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5941 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5943 *strict_overflow_p
= true;
5944 return omit_one_operand (type
, integer_zero_node
, op0
);
5947 /* ... fall through ... */
5949 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5950 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5951 /* If we can extract our operation from the LHS, do so and return a
5952 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5953 do something only if the second operand is a constant. */
5955 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5956 strict_overflow_p
)) != 0)
5957 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5958 fold_convert (ctype
, op1
));
5959 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5960 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5961 strict_overflow_p
)) != 0)
5962 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5963 fold_convert (ctype
, t1
));
5964 else if (TREE_CODE (op1
) != INTEGER_CST
)
5967 /* If these are the same operation types, we can associate them
5968 assuming no overflow. */
5971 bool overflow_p
= false;
5972 bool overflow_mul_p
;
5973 signop sign
= TYPE_SIGN (ctype
);
5974 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5975 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5977 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5980 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5981 wide_int_to_tree (ctype
, mul
));
5984 /* If these operations "cancel" each other, we have the main
5985 optimizations of this pass, which occur when either constant is a
5986 multiple of the other, in which case we replace this with either an
5987 operation or CODE or TCODE.
5989 If we have an unsigned type, we cannot do this since it will change
5990 the result if the original computation overflowed. */
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5992 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5993 || (tcode
== MULT_EXPR
5994 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5995 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5996 && code
!= MULT_EXPR
)))
5998 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6001 *strict_overflow_p
= true;
6002 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6003 fold_convert (ctype
,
6004 const_binop (TRUNC_DIV_EXPR
,
6007 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6009 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6010 *strict_overflow_p
= true;
6011 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6012 fold_convert (ctype
,
6013 const_binop (TRUNC_DIV_EXPR
,
6026 /* Return a node which has the indicated constant VALUE (either 0 or
6027 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6028 and is of the indicated TYPE. */
6031 constant_boolean_node (bool value
, tree type
)
6033 if (type
== integer_type_node
)
6034 return value
? integer_one_node
: integer_zero_node
;
6035 else if (type
== boolean_type_node
)
6036 return value
? boolean_true_node
: boolean_false_node
;
6037 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6038 return build_vector_from_val (type
,
6039 build_int_cst (TREE_TYPE (type
),
6042 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6046 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6047 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6048 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6049 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6050 COND is the first argument to CODE; otherwise (as in the example
6051 given here), it is the second argument. TYPE is the type of the
6052 original expression. Return NULL_TREE if no simplification is
6056 fold_binary_op_with_conditional_arg (location_t loc
,
6057 enum tree_code code
,
6058 tree type
, tree op0
, tree op1
,
6059 tree cond
, tree arg
, int cond_first_p
)
6061 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6062 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6063 tree test
, true_value
, false_value
;
6064 tree lhs
= NULL_TREE
;
6065 tree rhs
= NULL_TREE
;
6066 enum tree_code cond_code
= COND_EXPR
;
6068 if (TREE_CODE (cond
) == COND_EXPR
6069 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6071 test
= TREE_OPERAND (cond
, 0);
6072 true_value
= TREE_OPERAND (cond
, 1);
6073 false_value
= TREE_OPERAND (cond
, 2);
6074 /* If this operand throws an expression, then it does not make
6075 sense to try to perform a logical or arithmetic operation
6077 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6079 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6084 tree testtype
= TREE_TYPE (cond
);
6086 true_value
= constant_boolean_node (true, testtype
);
6087 false_value
= constant_boolean_node (false, testtype
);
6090 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6091 cond_code
= VEC_COND_EXPR
;
6093 /* This transformation is only worthwhile if we don't have to wrap ARG
6094 in a SAVE_EXPR and the operation can be simplified without recursing
6095 on at least one of the branches once its pushed inside the COND_EXPR. */
6096 if (!TREE_CONSTANT (arg
)
6097 && (TREE_SIDE_EFFECTS (arg
)
6098 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6099 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6102 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6105 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6107 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6109 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6113 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6115 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6117 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6120 /* Check that we have simplified at least one of the branches. */
6121 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6124 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6128 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6130 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6131 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6132 ADDEND is the same as X.
6134 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6135 and finite. The problematic cases are when X is zero, and its mode
6136 has signed zeros. In the case of rounding towards -infinity,
6137 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6138 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6141 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6143 if (!real_zerop (addend
))
6146 /* Don't allow the fold with -fsignaling-nans. */
6147 if (HONOR_SNANS (TYPE_MODE (type
)))
6150 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6151 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6154 /* In a vector or complex, we would need to check the sign of all zeros. */
6155 if (TREE_CODE (addend
) != REAL_CST
)
6158 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6159 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6162 /* The mode has signed zeros, and we have to honor their sign.
6163 In this situation, there is only one case we can return true for.
6164 X - 0 is the same as X unless rounding towards -infinity is
6166 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6169 /* Subroutine of fold() that checks comparisons of built-in math
6170 functions against real constants.
6172 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6173 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6174 is the type of the result and ARG0 and ARG1 are the operands of the
6175 comparison. ARG1 must be a TREE_REAL_CST.
6177 The function returns the constant folded tree if a simplification
6178 can be made, and NULL_TREE otherwise. */
6181 fold_mathfn_compare (location_t loc
,
6182 enum built_in_function fcode
, enum tree_code code
,
6183 tree type
, tree arg0
, tree arg1
)
6187 if (BUILTIN_SQRT_P (fcode
))
6189 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6190 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6192 c
= TREE_REAL_CST (arg1
);
6193 if (REAL_VALUE_NEGATIVE (c
))
6195 /* sqrt(x) < y is always false, if y is negative. */
6196 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6197 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6199 /* sqrt(x) > y is always true, if y is negative and we
6200 don't care about NaNs, i.e. negative values of x. */
6201 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6202 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6204 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6205 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6206 build_real (TREE_TYPE (arg
), dconst0
));
6208 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6212 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6213 real_convert (&c2
, mode
, &c2
);
6215 if (REAL_VALUE_ISINF (c2
))
6217 /* sqrt(x) > y is x == +Inf, when y is very large. */
6218 if (HONOR_INFINITIES (mode
))
6219 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6220 build_real (TREE_TYPE (arg
), c2
));
6222 /* sqrt(x) > y is always false, when y is very large
6223 and we don't care about infinities. */
6224 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6227 /* sqrt(x) > c is the same as x > c*c. */
6228 return fold_build2_loc (loc
, code
, type
, arg
,
6229 build_real (TREE_TYPE (arg
), c2
));
6231 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6235 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6236 real_convert (&c2
, mode
, &c2
);
6238 if (REAL_VALUE_ISINF (c2
))
6240 /* sqrt(x) < y is always true, when y is a very large
6241 value and we don't care about NaNs or Infinities. */
6242 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6243 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6245 /* sqrt(x) < y is x != +Inf when y is very large and we
6246 don't care about NaNs. */
6247 if (! HONOR_NANS (mode
))
6248 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6249 build_real (TREE_TYPE (arg
), c2
));
6251 /* sqrt(x) < y is x >= 0 when y is very large and we
6252 don't care about Infinities. */
6253 if (! HONOR_INFINITIES (mode
))
6254 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6255 build_real (TREE_TYPE (arg
), dconst0
));
6257 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6258 arg
= save_expr (arg
);
6259 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6260 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6261 build_real (TREE_TYPE (arg
),
6263 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6264 build_real (TREE_TYPE (arg
),
6268 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6269 if (! HONOR_NANS (mode
))
6270 return fold_build2_loc (loc
, code
, type
, arg
,
6271 build_real (TREE_TYPE (arg
), c2
));
6273 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6274 arg
= save_expr (arg
);
6275 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6276 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6277 build_real (TREE_TYPE (arg
),
6279 fold_build2_loc (loc
, code
, type
, arg
,
6280 build_real (TREE_TYPE (arg
),
6288 /* Subroutine of fold() that optimizes comparisons against Infinities,
6289 either +Inf or -Inf.
6291 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6292 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6293 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6295 The function returns the constant folded tree if a simplification
6296 can be made, and NULL_TREE otherwise. */
6299 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6300 tree arg0
, tree arg1
)
6303 REAL_VALUE_TYPE max
;
6307 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6309 /* For negative infinity swap the sense of the comparison. */
6310 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6312 code
= swap_tree_comparison (code
);
6317 /* x > +Inf is always false, if with ignore sNANs. */
6318 if (HONOR_SNANS (mode
))
6320 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6323 /* x <= +Inf is always true, if we don't case about NaNs. */
6324 if (! HONOR_NANS (mode
))
6325 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6327 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6328 arg0
= save_expr (arg0
);
6329 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6333 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6334 real_maxval (&max
, neg
, mode
);
6335 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6336 arg0
, build_real (TREE_TYPE (arg0
), max
));
6339 /* x < +Inf is always equal to x <= DBL_MAX. */
6340 real_maxval (&max
, neg
, mode
);
6341 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6342 arg0
, build_real (TREE_TYPE (arg0
), max
));
6345 /* x != +Inf is always equal to !(x > DBL_MAX). */
6346 real_maxval (&max
, neg
, mode
);
6347 if (! HONOR_NANS (mode
))
6348 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6349 arg0
, build_real (TREE_TYPE (arg0
), max
));
6351 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6352 arg0
, build_real (TREE_TYPE (arg0
), max
));
6353 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6362 /* Subroutine of fold() that optimizes comparisons of a division by
6363 a nonzero integer constant against an integer constant, i.e.
6366 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6367 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6368 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6370 The function returns the constant folded tree if a simplification
6371 can be made, and NULL_TREE otherwise. */
6374 fold_div_compare (location_t loc
,
6375 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6377 tree prod
, tmp
, hi
, lo
;
6378 tree arg00
= TREE_OPERAND (arg0
, 0);
6379 tree arg01
= TREE_OPERAND (arg0
, 1);
6380 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6381 bool neg_overflow
= false;
6384 /* We have to do this the hard way to detect unsigned overflow.
6385 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6386 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6387 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6388 neg_overflow
= false;
6390 if (sign
== UNSIGNED
)
6392 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6393 build_int_cst (TREE_TYPE (arg01
), 1));
6396 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6397 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6398 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6399 -1, overflow
| TREE_OVERFLOW (prod
));
6401 else if (tree_int_cst_sgn (arg01
) >= 0)
6403 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6404 build_int_cst (TREE_TYPE (arg01
), 1));
6405 switch (tree_int_cst_sgn (arg1
))
6408 neg_overflow
= true;
6409 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6414 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6419 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6429 /* A negative divisor reverses the relational operators. */
6430 code
= swap_tree_comparison (code
);
6432 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6433 build_int_cst (TREE_TYPE (arg01
), 1));
6434 switch (tree_int_cst_sgn (arg1
))
6437 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6442 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6447 neg_overflow
= true;
6448 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6460 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6461 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6462 if (TREE_OVERFLOW (hi
))
6463 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6464 if (TREE_OVERFLOW (lo
))
6465 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6466 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6469 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6470 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6471 if (TREE_OVERFLOW (hi
))
6472 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6473 if (TREE_OVERFLOW (lo
))
6474 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6475 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6478 if (TREE_OVERFLOW (lo
))
6480 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6481 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6483 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6486 if (TREE_OVERFLOW (hi
))
6488 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6489 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6491 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6494 if (TREE_OVERFLOW (hi
))
6496 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6497 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6499 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6502 if (TREE_OVERFLOW (lo
))
6504 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6505 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6507 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6517 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6518 equality/inequality test, then return a simplified form of the test
6519 using a sign testing. Otherwise return NULL. TYPE is the desired
6523 fold_single_bit_test_into_sign_test (location_t loc
,
6524 enum tree_code code
, tree arg0
, tree arg1
,
6527 /* If this is testing a single bit, we can optimize the test. */
6528 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6529 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6530 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6532 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6533 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6534 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6536 if (arg00
!= NULL_TREE
6537 /* This is only a win if casting to a signed type is cheap,
6538 i.e. when arg00's type is not a partial mode. */
6539 && TYPE_PRECISION (TREE_TYPE (arg00
))
6540 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6542 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6543 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6545 fold_convert_loc (loc
, stype
, arg00
),
6546 build_int_cst (stype
, 0));
6553 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6554 equality/inequality test, then return a simplified form of
6555 the test using shifts and logical operations. Otherwise return
6556 NULL. TYPE is the desired result type. */
6559 fold_single_bit_test (location_t loc
, enum tree_code code
,
6560 tree arg0
, tree arg1
, tree result_type
)
6562 /* If this is testing a single bit, we can optimize the test. */
6563 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6564 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6565 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6567 tree inner
= TREE_OPERAND (arg0
, 0);
6568 tree type
= TREE_TYPE (arg0
);
6569 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6570 machine_mode operand_mode
= TYPE_MODE (type
);
6572 tree signed_type
, unsigned_type
, intermediate_type
;
6575 /* First, see if we can fold the single bit test into a sign-bit
6577 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6582 /* Otherwise we have (A & C) != 0 where C is a single bit,
6583 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6584 Similarly for (A & C) == 0. */
6586 /* If INNER is a right shift of a constant and it plus BITNUM does
6587 not overflow, adjust BITNUM and INNER. */
6588 if (TREE_CODE (inner
) == RSHIFT_EXPR
6589 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6590 && bitnum
< TYPE_PRECISION (type
)
6591 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6592 TYPE_PRECISION (type
) - bitnum
))
6594 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6595 inner
= TREE_OPERAND (inner
, 0);
6598 /* If we are going to be able to omit the AND below, we must do our
6599 operations as unsigned. If we must use the AND, we have a choice.
6600 Normally unsigned is faster, but for some machines signed is. */
6601 #ifdef LOAD_EXTEND_OP
6602 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6603 && !flag_syntax_only
) ? 0 : 1;
6608 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6609 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6610 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6611 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6614 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6615 inner
, size_int (bitnum
));
6617 one
= build_int_cst (intermediate_type
, 1);
6619 if (code
== EQ_EXPR
)
6620 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6622 /* Put the AND last so it can combine with more things. */
6623 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6625 /* Make sure to return the proper type. */
6626 inner
= fold_convert_loc (loc
, result_type
, inner
);
6633 /* Check whether we are allowed to reorder operands arg0 and arg1,
6634 such that the evaluation of arg1 occurs before arg0. */
6637 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6639 if (! flag_evaluation_order
)
6641 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6643 return ! TREE_SIDE_EFFECTS (arg0
)
6644 && ! TREE_SIDE_EFFECTS (arg1
);
6647 /* Test whether it is preferable two swap two operands, ARG0 and
6648 ARG1, for example because ARG0 is an integer constant and ARG1
6649 isn't. If REORDER is true, only recommend swapping if we can
6650 evaluate the operands in reverse order. */
6653 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6655 if (CONSTANT_CLASS_P (arg1
))
6657 if (CONSTANT_CLASS_P (arg0
))
6660 STRIP_SIGN_NOPS (arg0
);
6661 STRIP_SIGN_NOPS (arg1
);
6663 if (TREE_CONSTANT (arg1
))
6665 if (TREE_CONSTANT (arg0
))
6668 if (reorder
&& flag_evaluation_order
6669 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6672 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6673 for commutative and comparison operators. Ensuring a canonical
6674 form allows the optimizers to find additional redundancies without
6675 having to explicitly check for both orderings. */
6676 if (TREE_CODE (arg0
) == SSA_NAME
6677 && TREE_CODE (arg1
) == SSA_NAME
6678 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6681 /* Put SSA_NAMEs last. */
6682 if (TREE_CODE (arg1
) == SSA_NAME
)
6684 if (TREE_CODE (arg0
) == SSA_NAME
)
6687 /* Put variables last. */
6696 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6697 ARG0 is extended to a wider type. */
6700 fold_widened_comparison (location_t loc
, enum tree_code code
,
6701 tree type
, tree arg0
, tree arg1
)
6703 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6705 tree shorter_type
, outer_type
;
6709 if (arg0_unw
== arg0
)
6711 shorter_type
= TREE_TYPE (arg0_unw
);
6713 #ifdef HAVE_canonicalize_funcptr_for_compare
6714 /* Disable this optimization if we're casting a function pointer
6715 type on targets that require function pointer canonicalization. */
6716 if (HAVE_canonicalize_funcptr_for_compare
6717 && TREE_CODE (shorter_type
) == POINTER_TYPE
6718 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6722 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6725 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6727 /* If possible, express the comparison in the shorter mode. */
6728 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6729 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6730 && (TREE_TYPE (arg1_unw
) == shorter_type
6731 || ((TYPE_PRECISION (shorter_type
)
6732 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6733 && (TYPE_UNSIGNED (shorter_type
)
6734 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6735 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6736 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6737 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6738 && int_fits_type_p (arg1_unw
, shorter_type
))))
6739 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6740 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6742 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6743 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6744 || !int_fits_type_p (arg1_unw
, shorter_type
))
6747 /* If we are comparing with the integer that does not fit into the range
6748 of the shorter type, the result is known. */
6749 outer_type
= TREE_TYPE (arg1_unw
);
6750 min
= lower_bound_in_type (outer_type
, shorter_type
);
6751 max
= upper_bound_in_type (outer_type
, shorter_type
);
6753 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6755 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6762 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6767 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6773 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6775 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6780 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6782 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6791 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6792 ARG0 just the signedness is changed. */
6795 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6796 tree arg0
, tree arg1
)
6799 tree inner_type
, outer_type
;
6801 if (!CONVERT_EXPR_P (arg0
))
6804 outer_type
= TREE_TYPE (arg0
);
6805 arg0_inner
= TREE_OPERAND (arg0
, 0);
6806 inner_type
= TREE_TYPE (arg0_inner
);
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type
) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6817 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6820 if (TREE_CODE (arg1
) != INTEGER_CST
6821 && !(CONVERT_EXPR_P (arg1
)
6822 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6825 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6830 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6833 if (TREE_CODE (arg1
) == INTEGER_CST
)
6834 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6835 TREE_OVERFLOW (arg1
));
6837 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6839 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6843 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6844 means A >= Y && A != MAX, but in this case we know that
6845 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6848 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6850 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6852 if (TREE_CODE (bound
) == LT_EXPR
)
6853 a
= TREE_OPERAND (bound
, 0);
6854 else if (TREE_CODE (bound
) == GT_EXPR
)
6855 a
= TREE_OPERAND (bound
, 1);
6859 typea
= TREE_TYPE (a
);
6860 if (!INTEGRAL_TYPE_P (typea
)
6861 && !POINTER_TYPE_P (typea
))
6864 if (TREE_CODE (ineq
) == LT_EXPR
)
6866 a1
= TREE_OPERAND (ineq
, 1);
6867 y
= TREE_OPERAND (ineq
, 0);
6869 else if (TREE_CODE (ineq
) == GT_EXPR
)
6871 a1
= TREE_OPERAND (ineq
, 0);
6872 y
= TREE_OPERAND (ineq
, 1);
6877 if (TREE_TYPE (a1
) != typea
)
6880 if (POINTER_TYPE_P (typea
))
6882 /* Convert the pointer types into integer before taking the difference. */
6883 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6884 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6885 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6888 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6890 if (!diff
|| !integer_onep (diff
))
6893 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6896 /* Fold a sum or difference of at least one multiplication.
6897 Returns the folded tree or NULL if no simplification could be made. */
6900 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6901 tree arg0
, tree arg1
)
6903 tree arg00
, arg01
, arg10
, arg11
;
6904 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6906 /* (A * C) +- (B * C) -> (A+-B) * C.
6907 (A * C) +- A -> A * (C+-1).
6908 We are most concerned about the case where C is a constant,
6909 but other combinations show up during loop reduction. Since
6910 it is not difficult, try all four possibilities. */
6912 if (TREE_CODE (arg0
) == MULT_EXPR
)
6914 arg00
= TREE_OPERAND (arg0
, 0);
6915 arg01
= TREE_OPERAND (arg0
, 1);
6917 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6919 arg00
= build_one_cst (type
);
6924 /* We cannot generate constant 1 for fract. */
6925 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6928 arg01
= build_one_cst (type
);
6930 if (TREE_CODE (arg1
) == MULT_EXPR
)
6932 arg10
= TREE_OPERAND (arg1
, 0);
6933 arg11
= TREE_OPERAND (arg1
, 1);
6935 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6937 arg10
= build_one_cst (type
);
6938 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6939 the purpose of this canonicalization. */
6940 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6941 && negate_expr_p (arg1
)
6942 && code
== PLUS_EXPR
)
6944 arg11
= negate_expr (arg1
);
6952 /* We cannot generate constant 1 for fract. */
6953 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6956 arg11
= build_one_cst (type
);
6960 if (operand_equal_p (arg01
, arg11
, 0))
6961 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6962 else if (operand_equal_p (arg00
, arg10
, 0))
6963 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6964 else if (operand_equal_p (arg00
, arg11
, 0))
6965 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6966 else if (operand_equal_p (arg01
, arg10
, 0))
6967 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6969 /* No identical multiplicands; see if we can find a common
6970 power-of-two factor in non-power-of-two multiplies. This
6971 can help in multi-dimensional array access. */
6972 else if (tree_fits_shwi_p (arg01
)
6973 && tree_fits_shwi_p (arg11
))
6975 HOST_WIDE_INT int01
, int11
, tmp
;
6978 int01
= tree_to_shwi (arg01
);
6979 int11
= tree_to_shwi (arg11
);
6981 /* Move min of absolute values to int11. */
6982 if (absu_hwi (int01
) < absu_hwi (int11
))
6984 tmp
= int01
, int01
= int11
, int11
= tmp
;
6985 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6992 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6993 /* The remainder should not be a constant, otherwise we
6994 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6995 increased the number of multiplications necessary. */
6996 && TREE_CODE (arg10
) != INTEGER_CST
)
6998 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6999 build_int_cst (TREE_TYPE (arg00
),
7004 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7009 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7010 fold_build2_loc (loc
, code
, type
,
7011 fold_convert_loc (loc
, type
, alt0
),
7012 fold_convert_loc (loc
, type
, alt1
)),
7013 fold_convert_loc (loc
, type
, same
));
7018 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7019 specified by EXPR into the buffer PTR of length LEN bytes.
7020 Return the number of bytes placed in the buffer, or zero
7024 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7026 tree type
= TREE_TYPE (expr
);
7027 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7028 int byte
, offset
, word
, words
;
7029 unsigned char value
;
7031 if ((off
== -1 && total_bytes
> len
)
7032 || off
>= total_bytes
)
7036 words
= total_bytes
/ UNITS_PER_WORD
;
7038 for (byte
= 0; byte
< total_bytes
; byte
++)
7040 int bitpos
= byte
* BITS_PER_UNIT
;
7041 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7043 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7045 if (total_bytes
> UNITS_PER_WORD
)
7047 word
= byte
/ UNITS_PER_WORD
;
7048 if (WORDS_BIG_ENDIAN
)
7049 word
= (words
- 1) - word
;
7050 offset
= word
* UNITS_PER_WORD
;
7051 if (BYTES_BIG_ENDIAN
)
7052 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7054 offset
+= byte
% UNITS_PER_WORD
;
7057 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7059 && offset
- off
< len
)
7060 ptr
[offset
- off
] = value
;
7062 return MIN (len
, total_bytes
- off
);
7066 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7067 specified by EXPR into the buffer PTR of length LEN bytes.
7068 Return the number of bytes placed in the buffer, or zero
7072 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7074 tree type
= TREE_TYPE (expr
);
7075 machine_mode mode
= TYPE_MODE (type
);
7076 int total_bytes
= GET_MODE_SIZE (mode
);
7077 FIXED_VALUE_TYPE value
;
7078 tree i_value
, i_type
;
7080 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7083 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7085 if (NULL_TREE
== i_type
7086 || TYPE_PRECISION (i_type
) != total_bytes
)
7089 value
= TREE_FIXED_CST (expr
);
7090 i_value
= double_int_to_tree (i_type
, value
.data
);
7092 return native_encode_int (i_value
, ptr
, len
, off
);
7096 /* Subroutine of native_encode_expr. Encode the REAL_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7102 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7104 tree type
= TREE_TYPE (expr
);
7105 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7106 int byte
, offset
, word
, words
, bitpos
;
7107 unsigned char value
;
7109 /* There are always 32 bits in each long, no matter the size of
7110 the hosts long. We handle floating point representations with
7114 if ((off
== -1 && total_bytes
> len
)
7115 || off
>= total_bytes
)
7119 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7121 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7123 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7124 bitpos
+= BITS_PER_UNIT
)
7126 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7127 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7129 if (UNITS_PER_WORD
< 4)
7131 word
= byte
/ UNITS_PER_WORD
;
7132 if (WORDS_BIG_ENDIAN
)
7133 word
= (words
- 1) - word
;
7134 offset
= word
* UNITS_PER_WORD
;
7135 if (BYTES_BIG_ENDIAN
)
7136 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7138 offset
+= byte
% UNITS_PER_WORD
;
7141 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7142 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7144 && offset
- off
< len
)
7145 ptr
[offset
- off
] = value
;
7147 return MIN (len
, total_bytes
- off
);
7150 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7151 specified by EXPR into the buffer PTR of length LEN bytes.
7152 Return the number of bytes placed in the buffer, or zero
7156 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7161 part
= TREE_REALPART (expr
);
7162 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7166 part
= TREE_IMAGPART (expr
);
7168 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7169 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7173 return rsize
+ isize
;
7177 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7183 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7190 count
= VECTOR_CST_NELTS (expr
);
7191 itype
= TREE_TYPE (TREE_TYPE (expr
));
7192 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7193 for (i
= 0; i
< count
; i
++)
7200 elem
= VECTOR_CST_ELT (expr
, i
);
7201 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7202 if ((off
== -1 && res
!= size
)
7215 /* Subroutine of native_encode_expr. Encode the STRING_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7221 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7223 tree type
= TREE_TYPE (expr
);
7224 HOST_WIDE_INT total_bytes
;
7226 if (TREE_CODE (type
) != ARRAY_TYPE
7227 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7228 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7229 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7231 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7232 if ((off
== -1 && total_bytes
> len
)
7233 || off
>= total_bytes
)
7237 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7240 if (off
< TREE_STRING_LENGTH (expr
))
7242 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7243 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7245 memset (ptr
+ written
, 0,
7246 MIN (total_bytes
- written
, len
- written
));
7249 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7250 return MIN (total_bytes
- off
, len
);
7254 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7255 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7256 buffer PTR of length LEN bytes. If OFF is not -1 then start
7257 the encoding at byte offset OFF and encode at most LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero upon failure. */
7261 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7263 switch (TREE_CODE (expr
))
7266 return native_encode_int (expr
, ptr
, len
, off
);
7269 return native_encode_real (expr
, ptr
, len
, off
);
7272 return native_encode_fixed (expr
, ptr
, len
, off
);
7275 return native_encode_complex (expr
, ptr
, len
, off
);
7278 return native_encode_vector (expr
, ptr
, len
, off
);
7281 return native_encode_string (expr
, ptr
, len
, off
);
7289 /* Subroutine of native_interpret_expr. Interpret the contents of
7290 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7291 If the buffer cannot be interpreted, return NULL_TREE. */
7294 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7296 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7298 if (total_bytes
> len
7299 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7302 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7304 return wide_int_to_tree (type
, result
);
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7313 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7315 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7317 FIXED_VALUE_TYPE fixed_value
;
7319 if (total_bytes
> len
7320 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7323 result
= double_int::from_buffer (ptr
, total_bytes
);
7324 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7326 return build_fixed (type
, fixed_value
);
7330 /* Subroutine of native_interpret_expr. Interpret the contents of
7331 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7332 If the buffer cannot be interpreted, return NULL_TREE. */
7335 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7337 machine_mode mode
= TYPE_MODE (type
);
7338 int total_bytes
= GET_MODE_SIZE (mode
);
7339 int byte
, offset
, word
, words
, bitpos
;
7340 unsigned char value
;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7347 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7348 if (total_bytes
> len
|| total_bytes
> 24)
7350 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7352 memset (tmp
, 0, sizeof (tmp
));
7353 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7354 bitpos
+= BITS_PER_UNIT
)
7356 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7357 if (UNITS_PER_WORD
< 4)
7359 word
= byte
/ UNITS_PER_WORD
;
7360 if (WORDS_BIG_ENDIAN
)
7361 word
= (words
- 1) - word
;
7362 offset
= word
* UNITS_PER_WORD
;
7363 if (BYTES_BIG_ENDIAN
)
7364 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7366 offset
+= byte
% UNITS_PER_WORD
;
7369 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7370 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7372 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7375 real_from_target (&r
, tmp
, mode
);
7376 return build_real (type
, r
);
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7385 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7387 tree etype
, rpart
, ipart
;
7390 etype
= TREE_TYPE (type
);
7391 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7394 rpart
= native_interpret_expr (etype
, ptr
, size
);
7397 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7400 return build_complex (type
, rpart
, ipart
);
7404 /* Subroutine of native_interpret_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7406 If the buffer cannot be interpreted, return NULL_TREE. */
7409 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7415 etype
= TREE_TYPE (type
);
7416 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7417 count
= TYPE_VECTOR_SUBPARTS (type
);
7418 if (size
* count
> len
)
7421 elements
= XALLOCAVEC (tree
, count
);
7422 for (i
= count
- 1; i
>= 0; i
--)
7424 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7429 return build_vector (type
, elements
);
7433 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a constant of type TYPE. For
7435 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7436 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7437 return NULL_TREE. */
7440 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7442 switch (TREE_CODE (type
))
7448 case REFERENCE_TYPE
:
7449 return native_interpret_int (type
, ptr
, len
);
7452 return native_interpret_real (type
, ptr
, len
);
7454 case FIXED_POINT_TYPE
:
7455 return native_interpret_fixed (type
, ptr
, len
);
7458 return native_interpret_complex (type
, ptr
, len
);
7461 return native_interpret_vector (type
, ptr
, len
);
7468 /* Returns true if we can interpret the contents of a native encoding
7472 can_native_interpret_type_p (tree type
)
7474 switch (TREE_CODE (type
))
7480 case REFERENCE_TYPE
:
7481 case FIXED_POINT_TYPE
:
7491 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7492 TYPE at compile-time. If we're unable to perform the conversion
7493 return NULL_TREE. */
7496 fold_view_convert_expr (tree type
, tree expr
)
7498 /* We support up to 512-bit values (for V8DFmode). */
7499 unsigned char buffer
[64];
7502 /* Check that the host and target are sane. */
7503 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7506 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7510 return native_interpret_expr (type
, buffer
, len
);
7513 /* Build an expression for the address of T. Folds away INDIRECT_REF
7514 to avoid confusing the gimplify process. */
7517 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7519 /* The size of the object is not relevant when talking about its address. */
7520 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7521 t
= TREE_OPERAND (t
, 0);
7523 if (TREE_CODE (t
) == INDIRECT_REF
)
7525 t
= TREE_OPERAND (t
, 0);
7527 if (TREE_TYPE (t
) != ptrtype
)
7528 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7530 else if (TREE_CODE (t
) == MEM_REF
7531 && integer_zerop (TREE_OPERAND (t
, 1)))
7532 return TREE_OPERAND (t
, 0);
7533 else if (TREE_CODE (t
) == MEM_REF
7534 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7535 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7536 TREE_OPERAND (t
, 0),
7537 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7538 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7540 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7542 if (TREE_TYPE (t
) != ptrtype
)
7543 t
= fold_convert_loc (loc
, ptrtype
, t
);
7546 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7551 /* Build an expression for the address of T. */
7554 build_fold_addr_expr_loc (location_t loc
, tree t
)
7556 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7558 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7561 static bool vec_cst_ctor_to_array (tree
, tree
*);
7563 /* Fold a unary expression of code CODE and type TYPE with operand
7564 OP0. Return the folded expression if folding is successful.
7565 Otherwise, return NULL_TREE. */
7568 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7572 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7574 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7575 && TREE_CODE_LENGTH (code
) == 1);
7577 tem
= generic_simplify (loc
, code
, type
, op0
);
7584 if (CONVERT_EXPR_CODE_P (code
)
7585 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7587 /* Don't use STRIP_NOPS, because signedness of argument type
7589 STRIP_SIGN_NOPS (arg0
);
7593 /* Strip any conversions that don't change the mode. This
7594 is safe for every expression, except for a comparison
7595 expression because its signedness is derived from its
7598 Note that this is done as an internal manipulation within
7599 the constant folder, in order to find the simplest
7600 representation of the arguments so that their form can be
7601 studied. In any cases, the appropriate type conversions
7602 should be put back in the tree that will get out of the
7608 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7610 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7611 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7612 fold_build1_loc (loc
, code
, type
,
7613 fold_convert_loc (loc
, TREE_TYPE (op0
),
7614 TREE_OPERAND (arg0
, 1))));
7615 else if (TREE_CODE (arg0
) == COND_EXPR
)
7617 tree arg01
= TREE_OPERAND (arg0
, 1);
7618 tree arg02
= TREE_OPERAND (arg0
, 2);
7619 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7620 arg01
= fold_build1_loc (loc
, code
, type
,
7621 fold_convert_loc (loc
,
7622 TREE_TYPE (op0
), arg01
));
7623 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7624 arg02
= fold_build1_loc (loc
, code
, type
,
7625 fold_convert_loc (loc
,
7626 TREE_TYPE (op0
), arg02
));
7627 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7630 /* If this was a conversion, and all we did was to move into
7631 inside the COND_EXPR, bring it back out. But leave it if
7632 it is a conversion from integer to integer and the
7633 result precision is no wider than a word since such a
7634 conversion is cheap and may be optimized away by combine,
7635 while it couldn't if it were outside the COND_EXPR. Then return
7636 so we don't get into an infinite recursion loop taking the
7637 conversion out and then back in. */
7639 if ((CONVERT_EXPR_CODE_P (code
)
7640 || code
== NON_LVALUE_EXPR
)
7641 && TREE_CODE (tem
) == COND_EXPR
7642 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7643 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7644 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7646 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7647 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7648 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7650 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7651 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7652 || flag_syntax_only
))
7653 tem
= build1_loc (loc
, code
, type
,
7655 TREE_TYPE (TREE_OPERAND
7656 (TREE_OPERAND (tem
, 1), 0)),
7657 TREE_OPERAND (tem
, 0),
7658 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7659 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7668 /* Re-association barriers around constants and other re-association
7669 barriers can be removed. */
7670 if (CONSTANT_CLASS_P (op0
)
7671 || TREE_CODE (op0
) == PAREN_EXPR
)
7672 return fold_convert_loc (loc
, type
, op0
);
7675 case NON_LVALUE_EXPR
:
7676 if (!maybe_lvalue_p (op0
))
7677 return fold_convert_loc (loc
, type
, op0
);
7682 case FIX_TRUNC_EXPR
:
7683 if (TREE_TYPE (op0
) == type
)
7686 if (COMPARISON_CLASS_P (op0
))
7688 /* If we have (type) (a CMP b) and type is an integral type, return
7689 new expression involving the new type. Canonicalize
7690 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7692 Do not fold the result as that would not simplify further, also
7693 folding again results in recursions. */
7694 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7695 return build2_loc (loc
, TREE_CODE (op0
), type
,
7696 TREE_OPERAND (op0
, 0),
7697 TREE_OPERAND (op0
, 1));
7698 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7699 && TREE_CODE (type
) != VECTOR_TYPE
)
7700 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7701 constant_boolean_node (true, type
),
7702 constant_boolean_node (false, type
));
7705 /* Handle cases of two conversions in a row. */
7706 if (CONVERT_EXPR_P (op0
))
7708 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7709 tree inter_type
= TREE_TYPE (op0
);
7710 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7711 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7712 int inside_float
= FLOAT_TYPE_P (inside_type
);
7713 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7714 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7715 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7716 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7717 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7718 int inter_float
= FLOAT_TYPE_P (inter_type
);
7719 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7720 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7721 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7722 int final_int
= INTEGRAL_TYPE_P (type
);
7723 int final_ptr
= POINTER_TYPE_P (type
);
7724 int final_float
= FLOAT_TYPE_P (type
);
7725 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7726 unsigned int final_prec
= TYPE_PRECISION (type
);
7727 int final_unsignedp
= TYPE_UNSIGNED (type
);
7729 /* In addition to the cases of two conversions in a row
7730 handled below, if we are converting something to its own
7731 type via an object of identical or wider precision, neither
7732 conversion is needed. */
7733 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7734 && (((inter_int
|| inter_ptr
) && final_int
)
7735 || (inter_float
&& final_float
))
7736 && inter_prec
>= final_prec
)
7737 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7739 /* Likewise, if the intermediate and initial types are either both
7740 float or both integer, we don't need the middle conversion if the
7741 former is wider than the latter and doesn't change the signedness
7742 (for integers). Avoid this if the final type is a pointer since
7743 then we sometimes need the middle conversion. Likewise if the
7744 final type has a precision not equal to the size of its mode. */
7745 if (((inter_int
&& inside_int
)
7746 || (inter_float
&& inside_float
)
7747 || (inter_vec
&& inside_vec
))
7748 && inter_prec
>= inside_prec
7749 && (inter_float
|| inter_vec
7750 || inter_unsignedp
== inside_unsignedp
)
7751 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7752 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7754 && (! final_vec
|| inter_prec
== inside_prec
))
7755 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7757 /* If we have a sign-extension of a zero-extended value, we can
7758 replace that by a single zero-extension. Likewise if the
7759 final conversion does not change precision we can drop the
7760 intermediate conversion. */
7761 if (inside_int
&& inter_int
&& final_int
7762 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7763 && inside_unsignedp
&& !inter_unsignedp
)
7764 || final_prec
== inter_prec
))
7765 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7767 /* Two conversions in a row are not needed unless:
7768 - some conversion is floating-point (overstrict for now), or
7769 - some conversion is a vector (overstrict for now), or
7770 - the intermediate type is narrower than both initial and
7772 - the intermediate type and innermost type differ in signedness,
7773 and the outermost type is wider than the intermediate, or
7774 - the initial type is a pointer type and the precisions of the
7775 intermediate and final types differ, or
7776 - the final type is a pointer type and the precisions of the
7777 initial and intermediate types differ. */
7778 if (! inside_float
&& ! inter_float
&& ! final_float
7779 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7780 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7781 && ! (inside_int
&& inter_int
7782 && inter_unsignedp
!= inside_unsignedp
7783 && inter_prec
< final_prec
)
7784 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7785 == (final_unsignedp
&& final_prec
> inter_prec
))
7786 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7787 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7788 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7789 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7790 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7793 /* Handle (T *)&A.B.C for A being of type T and B and C
7794 living at offset zero. This occurs frequently in
7795 C++ upcasting and then accessing the base. */
7796 if (TREE_CODE (op0
) == ADDR_EXPR
7797 && POINTER_TYPE_P (type
)
7798 && handled_component_p (TREE_OPERAND (op0
, 0)))
7800 HOST_WIDE_INT bitsize
, bitpos
;
7803 int unsignedp
, volatilep
;
7804 tree base
= TREE_OPERAND (op0
, 0);
7805 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7806 &mode
, &unsignedp
, &volatilep
, false);
7807 /* If the reference was to a (constant) zero offset, we can use
7808 the address of the base if it has the same base type
7809 as the result type and the pointer type is unqualified. */
7810 if (! offset
&& bitpos
== 0
7811 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7812 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7813 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7814 return fold_convert_loc (loc
, type
,
7815 build_fold_addr_expr_loc (loc
, base
));
7818 if (TREE_CODE (op0
) == MODIFY_EXPR
7819 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7820 /* Detect assigning a bitfield. */
7821 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7823 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7825 /* Don't leave an assignment inside a conversion
7826 unless assigning a bitfield. */
7827 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7828 /* First do the assignment, then return converted constant. */
7829 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7830 TREE_NO_WARNING (tem
) = 1;
7831 TREE_USED (tem
) = 1;
7835 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7836 constants (if x has signed type, the sign bit cannot be set
7837 in c). This folds extension into the BIT_AND_EXPR.
7838 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7839 very likely don't have maximal range for their precision and this
7840 transformation effectively doesn't preserve non-maximal ranges. */
7841 if (TREE_CODE (type
) == INTEGER_TYPE
7842 && TREE_CODE (op0
) == BIT_AND_EXPR
7843 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7845 tree and_expr
= op0
;
7846 tree and0
= TREE_OPERAND (and_expr
, 0);
7847 tree and1
= TREE_OPERAND (and_expr
, 1);
7850 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7851 || (TYPE_PRECISION (type
)
7852 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7854 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7855 <= HOST_BITS_PER_WIDE_INT
7856 && tree_fits_uhwi_p (and1
))
7858 unsigned HOST_WIDE_INT cst
;
7860 cst
= tree_to_uhwi (and1
);
7861 cst
&= HOST_WIDE_INT_M1U
7862 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7863 change
= (cst
== 0);
7864 #ifdef LOAD_EXTEND_OP
7866 && !flag_syntax_only
7867 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7870 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7871 and0
= fold_convert_loc (loc
, uns
, and0
);
7872 and1
= fold_convert_loc (loc
, uns
, and1
);
7878 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7879 TREE_OVERFLOW (and1
));
7880 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7881 fold_convert_loc (loc
, type
, and0
), tem
);
7885 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7886 when one of the new casts will fold away. Conservatively we assume
7887 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7888 if (POINTER_TYPE_P (type
)
7889 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7890 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7891 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7892 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7893 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7895 tree arg00
= TREE_OPERAND (arg0
, 0);
7896 tree arg01
= TREE_OPERAND (arg0
, 1);
7898 return fold_build_pointer_plus_loc
7899 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7902 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7903 of the same precision, and X is an integer type not narrower than
7904 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7905 if (INTEGRAL_TYPE_P (type
)
7906 && TREE_CODE (op0
) == BIT_NOT_EXPR
7907 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7908 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7909 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7911 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7912 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7913 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7914 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7915 fold_convert_loc (loc
, type
, tem
));
7918 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7919 type of X and Y (integer types only). */
7920 if (INTEGRAL_TYPE_P (type
)
7921 && TREE_CODE (op0
) == MULT_EXPR
7922 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7923 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7925 /* Be careful not to introduce new overflows. */
7927 if (TYPE_OVERFLOW_WRAPS (type
))
7930 mult_type
= unsigned_type_for (type
);
7932 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7934 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7935 fold_convert_loc (loc
, mult_type
,
7936 TREE_OPERAND (op0
, 0)),
7937 fold_convert_loc (loc
, mult_type
,
7938 TREE_OPERAND (op0
, 1)));
7939 return fold_convert_loc (loc
, type
, tem
);
7943 tem
= fold_convert_const (code
, type
, arg0
);
7944 return tem
? tem
: NULL_TREE
;
7946 case ADDR_SPACE_CONVERT_EXPR
:
7947 if (integer_zerop (arg0
))
7948 return fold_convert_const (code
, type
, arg0
);
7951 case FIXED_CONVERT_EXPR
:
7952 tem
= fold_convert_const (code
, type
, arg0
);
7953 return tem
? tem
: NULL_TREE
;
7955 case VIEW_CONVERT_EXPR
:
7956 if (TREE_TYPE (op0
) == type
)
7958 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7959 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
7960 type
, TREE_OPERAND (op0
, 0));
7961 if (TREE_CODE (op0
) == MEM_REF
)
7962 return fold_build2_loc (loc
, MEM_REF
, type
,
7963 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7965 /* For integral conversions with the same precision or pointer
7966 conversions use a NOP_EXPR instead. */
7967 if ((INTEGRAL_TYPE_P (type
)
7968 || POINTER_TYPE_P (type
))
7969 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7970 || POINTER_TYPE_P (TREE_TYPE (op0
)))
7971 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7972 return fold_convert_loc (loc
, type
, op0
);
7974 /* Strip inner integral conversions that do not change the precision. */
7975 if (CONVERT_EXPR_P (op0
)
7976 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7977 || POINTER_TYPE_P (TREE_TYPE (op0
)))
7978 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
7979 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
7980 && (TYPE_PRECISION (TREE_TYPE (op0
))
7981 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
7982 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
7983 type
, TREE_OPERAND (op0
, 0));
7985 return fold_view_convert_expr (type
, op0
);
7988 tem
= fold_negate_expr (loc
, arg0
);
7990 return fold_convert_loc (loc
, type
, tem
);
7994 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7995 return fold_abs_const (arg0
, type
);
7996 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7997 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7998 /* Convert fabs((double)float) into (double)fabsf(float). */
7999 else if (TREE_CODE (arg0
) == NOP_EXPR
8000 && TREE_CODE (type
) == REAL_TYPE
)
8002 tree targ0
= strip_float_extensions (arg0
);
8004 return fold_convert_loc (loc
, type
,
8005 fold_build1_loc (loc
, ABS_EXPR
,
8009 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8010 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8012 else if (tree_expr_nonnegative_p (arg0
))
8015 /* Strip sign ops from argument. */
8016 if (TREE_CODE (type
) == REAL_TYPE
)
8018 tem
= fold_strip_sign_ops (arg0
);
8020 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8021 fold_convert_loc (loc
, type
, tem
));
8026 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8027 return fold_convert_loc (loc
, type
, arg0
);
8028 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8030 tree itype
= TREE_TYPE (type
);
8031 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8032 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8033 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8034 negate_expr (ipart
));
8036 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8038 tree itype
= TREE_TYPE (type
);
8039 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8040 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8041 return build_complex (type
, rpart
, negate_expr (ipart
));
8043 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8044 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8048 if (TREE_CODE (arg0
) == INTEGER_CST
)
8049 return fold_not_const (arg0
, type
);
8050 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8051 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8052 /* Convert ~ (-A) to A - 1. */
8053 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8054 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8055 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8056 build_int_cst (type
, 1));
8057 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8058 else if (INTEGRAL_TYPE_P (type
)
8059 && ((TREE_CODE (arg0
) == MINUS_EXPR
8060 && integer_onep (TREE_OPERAND (arg0
, 1)))
8061 || (TREE_CODE (arg0
) == PLUS_EXPR
8062 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8063 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8064 fold_convert_loc (loc
, type
,
8065 TREE_OPERAND (arg0
, 0)));
8066 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8067 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8068 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8069 fold_convert_loc (loc
, type
,
8070 TREE_OPERAND (arg0
, 0)))))
8071 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8072 fold_convert_loc (loc
, type
,
8073 TREE_OPERAND (arg0
, 1)));
8074 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8075 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8076 fold_convert_loc (loc
, type
,
8077 TREE_OPERAND (arg0
, 1)))))
8078 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8079 fold_convert_loc (loc
, type
,
8080 TREE_OPERAND (arg0
, 0)), tem
);
8081 /* Perform BIT_NOT_EXPR on each element individually. */
8082 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8086 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8088 elements
= XALLOCAVEC (tree
, count
);
8089 for (i
= 0; i
< count
; i
++)
8091 elem
= VECTOR_CST_ELT (arg0
, i
);
8092 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8093 if (elem
== NULL_TREE
)
8098 return build_vector (type
, elements
);
8100 else if (COMPARISON_CLASS_P (arg0
)
8101 && (VECTOR_TYPE_P (type
)
8102 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8104 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8105 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8106 HONOR_NANS (TYPE_MODE (op_type
)));
8107 if (subcode
!= ERROR_MARK
)
8108 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8109 TREE_OPERAND (arg0
, 1));
8115 case TRUTH_NOT_EXPR
:
8116 /* Note that the operand of this must be an int
8117 and its values must be 0 or 1.
8118 ("true" is a fixed value perhaps depending on the language,
8119 but we don't handle values other than 1 correctly yet.) */
8120 tem
= fold_truth_not_expr (loc
, arg0
);
8123 return fold_convert_loc (loc
, type
, tem
);
8126 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8127 return fold_convert_loc (loc
, type
, arg0
);
8128 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8129 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8130 TREE_OPERAND (arg0
, 1));
8131 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8132 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8133 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8135 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8136 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8137 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8138 TREE_OPERAND (arg0
, 0)),
8139 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8140 TREE_OPERAND (arg0
, 1)));
8141 return fold_convert_loc (loc
, type
, tem
);
8143 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8145 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8146 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8147 TREE_OPERAND (arg0
, 0));
8148 return fold_convert_loc (loc
, type
, tem
);
8150 if (TREE_CODE (arg0
) == CALL_EXPR
)
8152 tree fn
= get_callee_fndecl (arg0
);
8153 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8154 switch (DECL_FUNCTION_CODE (fn
))
8156 CASE_FLT_FN (BUILT_IN_CEXPI
):
8157 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8159 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8169 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8170 return build_zero_cst (type
);
8171 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8172 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8173 TREE_OPERAND (arg0
, 0));
8174 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8175 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8176 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8178 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8179 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8180 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8181 TREE_OPERAND (arg0
, 0)),
8182 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8183 TREE_OPERAND (arg0
, 1)));
8184 return fold_convert_loc (loc
, type
, tem
);
8186 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8188 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8189 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8190 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8192 if (TREE_CODE (arg0
) == CALL_EXPR
)
8194 tree fn
= get_callee_fndecl (arg0
);
8195 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8196 switch (DECL_FUNCTION_CODE (fn
))
8198 CASE_FLT_FN (BUILT_IN_CEXPI
):
8199 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8201 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8211 /* Fold *&X to X if X is an lvalue. */
8212 if (TREE_CODE (op0
) == ADDR_EXPR
)
8214 tree op00
= TREE_OPERAND (op0
, 0);
8215 if ((TREE_CODE (op00
) == VAR_DECL
8216 || TREE_CODE (op00
) == PARM_DECL
8217 || TREE_CODE (op00
) == RESULT_DECL
)
8218 && !TREE_READONLY (op00
))
8223 case VEC_UNPACK_LO_EXPR
:
8224 case VEC_UNPACK_HI_EXPR
:
8225 case VEC_UNPACK_FLOAT_LO_EXPR
:
8226 case VEC_UNPACK_FLOAT_HI_EXPR
:
8228 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8230 enum tree_code subcode
;
8232 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8233 if (TREE_CODE (arg0
) != VECTOR_CST
)
8236 elts
= XALLOCAVEC (tree
, nelts
* 2);
8237 if (!vec_cst_ctor_to_array (arg0
, elts
))
8240 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8241 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8244 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8247 subcode
= FLOAT_EXPR
;
8249 for (i
= 0; i
< nelts
; i
++)
8251 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8252 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8256 return build_vector (type
, elts
);
8259 case REDUC_MIN_EXPR
:
8260 case REDUC_MAX_EXPR
:
8261 case REDUC_PLUS_EXPR
:
8263 unsigned int nelts
, i
;
8265 enum tree_code subcode
;
8267 if (TREE_CODE (op0
) != VECTOR_CST
)
8269 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0
));
8271 elts
= XALLOCAVEC (tree
, nelts
);
8272 if (!vec_cst_ctor_to_array (op0
, elts
))
8277 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8278 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8279 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8280 default: gcc_unreachable ();
8283 for (i
= 1; i
< nelts
; i
++)
8285 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8286 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8295 } /* switch (code) */
8299 /* If the operation was a conversion do _not_ mark a resulting constant
8300 with TREE_OVERFLOW if the original constant was not. These conversions
8301 have implementation defined behavior and retaining the TREE_OVERFLOW
8302 flag here would confuse later passes such as VRP. */
8304 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8305 tree type
, tree op0
)
8307 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8309 && TREE_CODE (res
) == INTEGER_CST
8310 && TREE_CODE (op0
) == INTEGER_CST
8311 && CONVERT_EXPR_CODE_P (code
))
8312 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8317 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8318 operands OP0 and OP1. LOC is the location of the resulting expression.
8319 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8320 Return the folded expression if folding is successful. Otherwise,
8321 return NULL_TREE. */
8323 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8324 tree arg0
, tree arg1
, tree op0
, tree op1
)
8328 /* We only do these simplifications if we are optimizing. */
8332 /* Check for things like (A || B) && (A || C). We can convert this
8333 to A || (B && C). Note that either operator can be any of the four
8334 truth and/or operations and the transformation will still be
8335 valid. Also note that we only care about order for the
8336 ANDIF and ORIF operators. If B contains side effects, this
8337 might change the truth-value of A. */
8338 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8339 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8340 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8341 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8342 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8343 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8345 tree a00
= TREE_OPERAND (arg0
, 0);
8346 tree a01
= TREE_OPERAND (arg0
, 1);
8347 tree a10
= TREE_OPERAND (arg1
, 0);
8348 tree a11
= TREE_OPERAND (arg1
, 1);
8349 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8350 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8351 && (code
== TRUTH_AND_EXPR
8352 || code
== TRUTH_OR_EXPR
));
8354 if (operand_equal_p (a00
, a10
, 0))
8355 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8356 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8357 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8358 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8359 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8360 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8361 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8362 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8364 /* This case if tricky because we must either have commutative
8365 operators or else A10 must not have side-effects. */
8367 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8368 && operand_equal_p (a01
, a11
, 0))
8369 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8370 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8374 /* See if we can build a range comparison. */
8375 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8378 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8379 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8381 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8383 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8386 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8387 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8389 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8391 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8394 /* Check for the possibility of merging component references. If our
8395 lhs is another similar operation, try to merge its rhs with our
8396 rhs. Then try to merge our lhs and rhs. */
8397 if (TREE_CODE (arg0
) == code
8398 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8399 TREE_OPERAND (arg0
, 1), arg1
)))
8400 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8402 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8405 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8406 && (code
== TRUTH_AND_EXPR
8407 || code
== TRUTH_ANDIF_EXPR
8408 || code
== TRUTH_OR_EXPR
8409 || code
== TRUTH_ORIF_EXPR
))
8411 enum tree_code ncode
, icode
;
8413 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8414 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8415 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8417 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8418 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8419 We don't want to pack more than two leafs to a non-IF AND/OR
8421 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8422 equal to IF-CODE, then we don't want to add right-hand operand.
8423 If the inner right-hand side of left-hand operand has
8424 side-effects, or isn't simple, then we can't add to it,
8425 as otherwise we might destroy if-sequence. */
8426 if (TREE_CODE (arg0
) == icode
8427 && simple_operand_p_2 (arg1
)
8428 /* Needed for sequence points to handle trappings, and
8430 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8432 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8434 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8437 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8438 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8439 else if (TREE_CODE (arg1
) == icode
8440 && simple_operand_p_2 (arg0
)
8441 /* Needed for sequence points to handle trappings, and
8443 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8445 tem
= fold_build2_loc (loc
, ncode
, type
,
8446 arg0
, TREE_OPERAND (arg1
, 0));
8447 return fold_build2_loc (loc
, icode
, type
, tem
,
8448 TREE_OPERAND (arg1
, 1));
8450 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8452 For sequence point consistancy, we need to check for trapping,
8453 and side-effects. */
8454 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8455 && simple_operand_p_2 (arg1
))
8456 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8462 /* Fold a binary expression of code CODE and type TYPE with operands
8463 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8464 Return the folded expression if folding is successful. Otherwise,
8465 return NULL_TREE. */
8468 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8470 enum tree_code compl_code
;
8472 if (code
== MIN_EXPR
)
8473 compl_code
= MAX_EXPR
;
8474 else if (code
== MAX_EXPR
)
8475 compl_code
= MIN_EXPR
;
8479 /* MIN (MAX (a, b), b) == b. */
8480 if (TREE_CODE (op0
) == compl_code
8481 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8482 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8484 /* MIN (MAX (b, a), b) == b. */
8485 if (TREE_CODE (op0
) == compl_code
8486 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8487 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8488 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8490 /* MIN (a, MAX (a, b)) == a. */
8491 if (TREE_CODE (op1
) == compl_code
8492 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8493 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8494 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8496 /* MIN (a, MAX (b, a)) == a. */
8497 if (TREE_CODE (op1
) == compl_code
8498 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8499 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8500 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8505 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8506 by changing CODE to reduce the magnitude of constants involved in
8507 ARG0 of the comparison.
8508 Returns a canonicalized comparison tree if a simplification was
8509 possible, otherwise returns NULL_TREE.
8510 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8511 valid if signed overflow is undefined. */
8514 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8515 tree arg0
, tree arg1
,
8516 bool *strict_overflow_p
)
8518 enum tree_code code0
= TREE_CODE (arg0
);
8519 tree t
, cst0
= NULL_TREE
;
8523 /* Match A +- CST code arg1 and CST code arg1. We can change the
8524 first form only if overflow is undefined. */
8525 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8526 /* In principle pointers also have undefined overflow behavior,
8527 but that causes problems elsewhere. */
8528 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8529 && (code0
== MINUS_EXPR
8530 || code0
== PLUS_EXPR
)
8531 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8532 || code0
== INTEGER_CST
))
8535 /* Identify the constant in arg0 and its sign. */
8536 if (code0
== INTEGER_CST
)
8539 cst0
= TREE_OPERAND (arg0
, 1);
8540 sgn0
= tree_int_cst_sgn (cst0
);
8542 /* Overflowed constants and zero will cause problems. */
8543 if (integer_zerop (cst0
)
8544 || TREE_OVERFLOW (cst0
))
8547 /* See if we can reduce the magnitude of the constant in
8548 arg0 by changing the comparison code. */
8549 if (code0
== INTEGER_CST
)
8551 /* CST <= arg1 -> CST-1 < arg1. */
8552 if (code
== LE_EXPR
&& sgn0
== 1)
8554 /* -CST < arg1 -> -CST-1 <= arg1. */
8555 else if (code
== LT_EXPR
&& sgn0
== -1)
8557 /* CST > arg1 -> CST-1 >= arg1. */
8558 else if (code
== GT_EXPR
&& sgn0
== 1)
8560 /* -CST >= arg1 -> -CST-1 > arg1. */
8561 else if (code
== GE_EXPR
&& sgn0
== -1)
8565 /* arg1 code' CST' might be more canonical. */
8570 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8572 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8574 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8575 else if (code
== GT_EXPR
8576 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8578 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8579 else if (code
== LE_EXPR
8580 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8582 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8583 else if (code
== GE_EXPR
8584 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8588 *strict_overflow_p
= true;
8591 /* Now build the constant reduced in magnitude. But not if that
8592 would produce one outside of its types range. */
8593 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8595 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8596 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8598 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8599 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8600 /* We cannot swap the comparison here as that would cause us to
8601 endlessly recurse. */
8604 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8605 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8606 if (code0
!= INTEGER_CST
)
8607 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8608 t
= fold_convert (TREE_TYPE (arg1
), t
);
8610 /* If swapping might yield to a more canonical form, do so. */
8612 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8614 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8617 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8618 overflow further. Try to decrease the magnitude of constants involved
8619 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8620 and put sole constants at the second argument position.
8621 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8624 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8625 tree arg0
, tree arg1
)
8628 bool strict_overflow_p
;
8629 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8630 "when reducing constant in comparison");
8632 /* Try canonicalization by simplifying arg0. */
8633 strict_overflow_p
= false;
8634 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8635 &strict_overflow_p
);
8638 if (strict_overflow_p
)
8639 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8643 /* Try canonicalization by simplifying arg1 using the swapped
8645 code
= swap_tree_comparison (code
);
8646 strict_overflow_p
= false;
8647 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8648 &strict_overflow_p
);
8649 if (t
&& strict_overflow_p
)
8650 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8654 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8655 space. This is used to avoid issuing overflow warnings for
8656 expressions like &p->x which can not wrap. */
8659 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8661 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8668 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8669 if (offset
== NULL_TREE
)
8670 wi_offset
= wi::zero (precision
);
8671 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8677 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8678 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8682 if (!wi::fits_uhwi_p (total
))
8685 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8689 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8691 if (TREE_CODE (base
) == ADDR_EXPR
)
8693 HOST_WIDE_INT base_size
;
8695 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8696 if (base_size
> 0 && size
< base_size
)
8700 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8703 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8704 kind INTEGER_CST. This makes sure to properly sign-extend the
8707 static HOST_WIDE_INT
8708 size_low_cst (const_tree t
)
8710 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8711 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8712 if (prec
< HOST_BITS_PER_WIDE_INT
)
8713 return sext_hwi (w
, prec
);
8717 /* Subroutine of fold_binary. This routine performs all of the
8718 transformations that are common to the equality/inequality
8719 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8720 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8721 fold_binary should call fold_binary. Fold a comparison with
8722 tree code CODE and type TYPE with operands OP0 and OP1. Return
8723 the folded comparison or NULL_TREE. */
8726 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8729 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8730 tree arg0
, arg1
, tem
;
8735 STRIP_SIGN_NOPS (arg0
);
8736 STRIP_SIGN_NOPS (arg1
);
8738 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8739 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8740 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8741 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8742 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8743 && TREE_CODE (arg1
) == INTEGER_CST
8744 && !TREE_OVERFLOW (arg1
))
8746 const enum tree_code
8747 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8748 tree const1
= TREE_OPERAND (arg0
, 1);
8749 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8750 tree variable
= TREE_OPERAND (arg0
, 0);
8751 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8753 /* If the constant operation overflowed this can be
8754 simplified as a comparison against INT_MAX/INT_MIN. */
8755 if (TREE_OVERFLOW (new_const
)
8756 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8758 int const1_sgn
= tree_int_cst_sgn (const1
);
8759 enum tree_code code2
= code
;
8761 /* Get the sign of the constant on the lhs if the
8762 operation were VARIABLE + CONST1. */
8763 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8764 const1_sgn
= -const1_sgn
;
8766 /* The sign of the constant determines if we overflowed
8767 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8768 Canonicalize to the INT_MIN overflow by swapping the comparison
8770 if (const1_sgn
== -1)
8771 code2
= swap_tree_comparison (code
);
8773 /* We now can look at the canonicalized case
8774 VARIABLE + 1 CODE2 INT_MIN
8775 and decide on the result. */
8782 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8788 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8797 fold_overflow_warning ("assuming signed overflow does not occur "
8798 "when changing X +- C1 cmp C2 to "
8800 WARN_STRICT_OVERFLOW_COMPARISON
);
8801 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8805 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8806 if (TREE_CODE (arg0
) == MINUS_EXPR
8808 && integer_zerop (arg1
))
8810 /* ??? The transformation is valid for the other operators if overflow
8811 is undefined for the type, but performing it here badly interacts
8812 with the transformation in fold_cond_expr_with_comparison which
8813 attempts to synthetize ABS_EXPR. */
8815 fold_overflow_warning ("assuming signed overflow does not occur "
8816 "when changing X - Y cmp 0 to X cmp Y",
8817 WARN_STRICT_OVERFLOW_COMPARISON
);
8818 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8819 TREE_OPERAND (arg0
, 1));
8822 /* For comparisons of pointers we can decompose it to a compile time
8823 comparison of the base objects and the offsets into the object.
8824 This requires at least one operand being an ADDR_EXPR or a
8825 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8826 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8827 && (TREE_CODE (arg0
) == ADDR_EXPR
8828 || TREE_CODE (arg1
) == ADDR_EXPR
8829 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8830 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8832 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8833 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8835 int volatilep
, unsignedp
;
8836 bool indirect_base0
= false, indirect_base1
= false;
8838 /* Get base and offset for the access. Strip ADDR_EXPR for
8839 get_inner_reference, but put it back by stripping INDIRECT_REF
8840 off the base object if possible. indirect_baseN will be true
8841 if baseN is not an address but refers to the object itself. */
8843 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8845 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8846 &bitsize
, &bitpos0
, &offset0
, &mode
,
8847 &unsignedp
, &volatilep
, false);
8848 if (TREE_CODE (base0
) == INDIRECT_REF
)
8849 base0
= TREE_OPERAND (base0
, 0);
8851 indirect_base0
= true;
8853 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8855 base0
= TREE_OPERAND (arg0
, 0);
8856 STRIP_SIGN_NOPS (base0
);
8857 if (TREE_CODE (base0
) == ADDR_EXPR
)
8859 base0
= TREE_OPERAND (base0
, 0);
8860 indirect_base0
= true;
8862 offset0
= TREE_OPERAND (arg0
, 1);
8863 if (tree_fits_shwi_p (offset0
))
8865 HOST_WIDE_INT off
= size_low_cst (offset0
);
8866 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8868 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8870 bitpos0
= off
* BITS_PER_UNIT
;
8871 offset0
= NULL_TREE
;
8877 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8879 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8880 &bitsize
, &bitpos1
, &offset1
, &mode
,
8881 &unsignedp
, &volatilep
, false);
8882 if (TREE_CODE (base1
) == INDIRECT_REF
)
8883 base1
= TREE_OPERAND (base1
, 0);
8885 indirect_base1
= true;
8887 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8889 base1
= TREE_OPERAND (arg1
, 0);
8890 STRIP_SIGN_NOPS (base1
);
8891 if (TREE_CODE (base1
) == ADDR_EXPR
)
8893 base1
= TREE_OPERAND (base1
, 0);
8894 indirect_base1
= true;
8896 offset1
= TREE_OPERAND (arg1
, 1);
8897 if (tree_fits_shwi_p (offset1
))
8899 HOST_WIDE_INT off
= size_low_cst (offset1
);
8900 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8902 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8904 bitpos1
= off
* BITS_PER_UNIT
;
8905 offset1
= NULL_TREE
;
8910 /* A local variable can never be pointed to by
8911 the default SSA name of an incoming parameter. */
8912 if ((TREE_CODE (arg0
) == ADDR_EXPR
8914 && TREE_CODE (base0
) == VAR_DECL
8915 && auto_var_in_fn_p (base0
, current_function_decl
)
8917 && TREE_CODE (base1
) == SSA_NAME
8918 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8919 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8920 || (TREE_CODE (arg1
) == ADDR_EXPR
8922 && TREE_CODE (base1
) == VAR_DECL
8923 && auto_var_in_fn_p (base1
, current_function_decl
)
8925 && TREE_CODE (base0
) == SSA_NAME
8926 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8927 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8929 if (code
== NE_EXPR
)
8930 return constant_boolean_node (1, type
);
8931 else if (code
== EQ_EXPR
)
8932 return constant_boolean_node (0, type
);
8934 /* If we have equivalent bases we might be able to simplify. */
8935 else if (indirect_base0
== indirect_base1
8936 && operand_equal_p (base0
, base1
, 0))
8938 /* We can fold this expression to a constant if the non-constant
8939 offset parts are equal. */
8940 if ((offset0
== offset1
8941 || (offset0
&& offset1
8942 && operand_equal_p (offset0
, offset1
, 0)))
8945 || (indirect_base0
&& DECL_P (base0
))
8946 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8950 && bitpos0
!= bitpos1
8951 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8952 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8953 fold_overflow_warning (("assuming pointer wraparound does not "
8954 "occur when comparing P +- C1 with "
8956 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8961 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8963 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8965 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8967 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8969 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8971 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8975 /* We can simplify the comparison to a comparison of the variable
8976 offset parts if the constant offset parts are equal.
8977 Be careful to use signed sizetype here because otherwise we
8978 mess with array offsets in the wrong way. This is possible
8979 because pointer arithmetic is restricted to retain within an
8980 object and overflow on pointer differences is undefined as of
8981 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8982 else if (bitpos0
== bitpos1
8984 || (indirect_base0
&& DECL_P (base0
))
8985 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8987 /* By converting to signed sizetype we cover middle-end pointer
8988 arithmetic which operates on unsigned pointer types of size
8989 type size and ARRAY_REF offsets which are properly sign or
8990 zero extended from their type in case it is narrower than
8992 if (offset0
== NULL_TREE
)
8993 offset0
= build_int_cst (ssizetype
, 0);
8995 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8996 if (offset1
== NULL_TREE
)
8997 offset1
= build_int_cst (ssizetype
, 0);
8999 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9002 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9003 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9004 fold_overflow_warning (("assuming pointer wraparound does not "
9005 "occur when comparing P +- C1 with "
9007 WARN_STRICT_OVERFLOW_COMPARISON
);
9009 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9012 /* For non-equal bases we can simplify if they are addresses
9013 of local binding decls or constants. */
9014 else if (indirect_base0
&& indirect_base1
9015 /* We know that !operand_equal_p (base0, base1, 0)
9016 because the if condition was false. But make
9017 sure two decls are not the same. */
9019 && TREE_CODE (arg0
) == ADDR_EXPR
9020 && TREE_CODE (arg1
) == ADDR_EXPR
9021 && (((TREE_CODE (base0
) == VAR_DECL
9022 || TREE_CODE (base0
) == PARM_DECL
)
9023 && (targetm
.binds_local_p (base0
)
9024 || CONSTANT_CLASS_P (base1
)))
9025 || CONSTANT_CLASS_P (base0
))
9026 && (((TREE_CODE (base1
) == VAR_DECL
9027 || TREE_CODE (base1
) == PARM_DECL
)
9028 && (targetm
.binds_local_p (base1
)
9029 || CONSTANT_CLASS_P (base0
)))
9030 || CONSTANT_CLASS_P (base1
)))
9032 if (code
== EQ_EXPR
)
9033 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9035 else if (code
== NE_EXPR
)
9036 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9039 /* For equal offsets we can simplify to a comparison of the
9041 else if (bitpos0
== bitpos1
9043 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9045 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9046 && ((offset0
== offset1
)
9047 || (offset0
&& offset1
9048 && operand_equal_p (offset0
, offset1
, 0))))
9051 base0
= build_fold_addr_expr_loc (loc
, base0
);
9053 base1
= build_fold_addr_expr_loc (loc
, base1
);
9054 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9058 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9059 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9060 the resulting offset is smaller in absolute value than the
9061 original one and has the same sign. */
9062 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9063 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9064 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9065 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9066 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9067 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9068 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9070 tree const1
= TREE_OPERAND (arg0
, 1);
9071 tree const2
= TREE_OPERAND (arg1
, 1);
9072 tree variable1
= TREE_OPERAND (arg0
, 0);
9073 tree variable2
= TREE_OPERAND (arg1
, 0);
9075 const char * const warnmsg
= G_("assuming signed overflow does not "
9076 "occur when combining constants around "
9079 /* Put the constant on the side where it doesn't overflow and is
9080 of lower absolute value and of same sign than before. */
9081 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9082 ? MINUS_EXPR
: PLUS_EXPR
,
9084 if (!TREE_OVERFLOW (cst
)
9085 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9086 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9088 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9089 return fold_build2_loc (loc
, code
, type
,
9091 fold_build2_loc (loc
, TREE_CODE (arg1
),
9096 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9097 ? MINUS_EXPR
: PLUS_EXPR
,
9099 if (!TREE_OVERFLOW (cst
)
9100 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9101 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9103 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9104 return fold_build2_loc (loc
, code
, type
,
9105 fold_build2_loc (loc
, TREE_CODE (arg0
),
9112 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9113 signed arithmetic case. That form is created by the compiler
9114 often enough for folding it to be of value. One example is in
9115 computing loop trip counts after Operator Strength Reduction. */
9116 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9117 && TREE_CODE (arg0
) == MULT_EXPR
9118 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9119 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9120 && integer_zerop (arg1
))
9122 tree const1
= TREE_OPERAND (arg0
, 1);
9123 tree const2
= arg1
; /* zero */
9124 tree variable1
= TREE_OPERAND (arg0
, 0);
9125 enum tree_code cmp_code
= code
;
9127 /* Handle unfolded multiplication by zero. */
9128 if (integer_zerop (const1
))
9129 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9131 fold_overflow_warning (("assuming signed overflow does not occur when "
9132 "eliminating multiplication in comparison "
9134 WARN_STRICT_OVERFLOW_COMPARISON
);
9136 /* If const1 is negative we swap the sense of the comparison. */
9137 if (tree_int_cst_sgn (const1
) < 0)
9138 cmp_code
= swap_tree_comparison (cmp_code
);
9140 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9143 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9147 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9149 tree targ0
= strip_float_extensions (arg0
);
9150 tree targ1
= strip_float_extensions (arg1
);
9151 tree newtype
= TREE_TYPE (targ0
);
9153 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9154 newtype
= TREE_TYPE (targ1
);
9156 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9157 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9158 return fold_build2_loc (loc
, code
, type
,
9159 fold_convert_loc (loc
, newtype
, targ0
),
9160 fold_convert_loc (loc
, newtype
, targ1
));
9162 /* (-a) CMP (-b) -> b CMP a */
9163 if (TREE_CODE (arg0
) == NEGATE_EXPR
9164 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9165 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9166 TREE_OPERAND (arg0
, 0));
9168 if (TREE_CODE (arg1
) == REAL_CST
)
9170 REAL_VALUE_TYPE cst
;
9171 cst
= TREE_REAL_CST (arg1
);
9173 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9174 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9175 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9176 TREE_OPERAND (arg0
, 0),
9177 build_real (TREE_TYPE (arg1
),
9178 real_value_negate (&cst
)));
9180 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9181 /* a CMP (-0) -> a CMP 0 */
9182 if (REAL_VALUE_MINUS_ZERO (cst
))
9183 return fold_build2_loc (loc
, code
, type
, arg0
,
9184 build_real (TREE_TYPE (arg1
), dconst0
));
9186 /* x != NaN is always true, other ops are always false. */
9187 if (REAL_VALUE_ISNAN (cst
)
9188 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9190 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9191 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9194 /* Fold comparisons against infinity. */
9195 if (REAL_VALUE_ISINF (cst
)
9196 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9198 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9199 if (tem
!= NULL_TREE
)
9204 /* If this is a comparison of a real constant with a PLUS_EXPR
9205 or a MINUS_EXPR of a real constant, we can convert it into a
9206 comparison with a revised real constant as long as no overflow
9207 occurs when unsafe_math_optimizations are enabled. */
9208 if (flag_unsafe_math_optimizations
9209 && TREE_CODE (arg1
) == REAL_CST
9210 && (TREE_CODE (arg0
) == PLUS_EXPR
9211 || TREE_CODE (arg0
) == MINUS_EXPR
)
9212 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9213 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9214 ? MINUS_EXPR
: PLUS_EXPR
,
9215 arg1
, TREE_OPERAND (arg0
, 1)))
9216 && !TREE_OVERFLOW (tem
))
9217 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9219 /* Likewise, we can simplify a comparison of a real constant with
9220 a MINUS_EXPR whose first operand is also a real constant, i.e.
9221 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9222 floating-point types only if -fassociative-math is set. */
9223 if (flag_associative_math
9224 && TREE_CODE (arg1
) == REAL_CST
9225 && TREE_CODE (arg0
) == MINUS_EXPR
9226 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9227 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9229 && !TREE_OVERFLOW (tem
))
9230 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9231 TREE_OPERAND (arg0
, 1), tem
);
9233 /* Fold comparisons against built-in math functions. */
9234 if (TREE_CODE (arg1
) == REAL_CST
9235 && flag_unsafe_math_optimizations
9236 && ! flag_errno_math
)
9238 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9240 if (fcode
!= END_BUILTINS
)
9242 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9243 if (tem
!= NULL_TREE
)
9249 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9250 && CONVERT_EXPR_P (arg0
))
9252 /* If we are widening one operand of an integer comparison,
9253 see if the other operand is similarly being widened. Perhaps we
9254 can do the comparison in the narrower type. */
9255 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9259 /* Or if we are changing signedness. */
9260 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9265 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9266 constant, we can simplify it. */
9267 if (TREE_CODE (arg1
) == INTEGER_CST
9268 && (TREE_CODE (arg0
) == MIN_EXPR
9269 || TREE_CODE (arg0
) == MAX_EXPR
)
9270 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9272 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9277 /* Simplify comparison of something with itself. (For IEEE
9278 floating-point, we can only do some of these simplifications.) */
9279 if (operand_equal_p (arg0
, arg1
, 0))
9284 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9285 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9286 return constant_boolean_node (1, type
);
9291 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9292 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9293 return constant_boolean_node (1, type
);
9294 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9297 /* For NE, we can only do this simplification if integer
9298 or we don't honor IEEE floating point NaNs. */
9299 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9300 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9302 /* ... fall through ... */
9305 return constant_boolean_node (0, type
);
9311 /* If we are comparing an expression that just has comparisons
9312 of two integer values, arithmetic expressions of those comparisons,
9313 and constants, we can simplify it. There are only three cases
9314 to check: the two values can either be equal, the first can be
9315 greater, or the second can be greater. Fold the expression for
9316 those three values. Since each value must be 0 or 1, we have
9317 eight possibilities, each of which corresponds to the constant 0
9318 or 1 or one of the six possible comparisons.
9320 This handles common cases like (a > b) == 0 but also handles
9321 expressions like ((x > y) - (y > x)) > 0, which supposedly
9322 occur in macroized code. */
9324 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9326 tree cval1
= 0, cval2
= 0;
9329 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9330 /* Don't handle degenerate cases here; they should already
9331 have been handled anyway. */
9332 && cval1
!= 0 && cval2
!= 0
9333 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9334 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9335 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9336 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9337 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9338 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9339 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9341 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9342 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9344 /* We can't just pass T to eval_subst in case cval1 or cval2
9345 was the same as ARG1. */
9348 = fold_build2_loc (loc
, code
, type
,
9349 eval_subst (loc
, arg0
, cval1
, maxval
,
9353 = fold_build2_loc (loc
, code
, type
,
9354 eval_subst (loc
, arg0
, cval1
, maxval
,
9358 = fold_build2_loc (loc
, code
, type
,
9359 eval_subst (loc
, arg0
, cval1
, minval
,
9363 /* All three of these results should be 0 or 1. Confirm they are.
9364 Then use those values to select the proper code to use. */
9366 if (TREE_CODE (high_result
) == INTEGER_CST
9367 && TREE_CODE (equal_result
) == INTEGER_CST
9368 && TREE_CODE (low_result
) == INTEGER_CST
)
9370 /* Make a 3-bit mask with the high-order bit being the
9371 value for `>', the next for '=', and the low for '<'. */
9372 switch ((integer_onep (high_result
) * 4)
9373 + (integer_onep (equal_result
) * 2)
9374 + integer_onep (low_result
))
9378 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9399 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9404 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9405 SET_EXPR_LOCATION (tem
, loc
);
9408 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9413 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9414 into a single range test. */
9415 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9416 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9417 && TREE_CODE (arg1
) == INTEGER_CST
9418 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9419 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9420 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9421 && !TREE_OVERFLOW (arg1
))
9423 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9424 if (tem
!= NULL_TREE
)
9428 /* Fold ~X op ~Y as Y op X. */
9429 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9430 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9432 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9433 return fold_build2_loc (loc
, code
, type
,
9434 fold_convert_loc (loc
, cmp_type
,
9435 TREE_OPERAND (arg1
, 0)),
9436 TREE_OPERAND (arg0
, 0));
9439 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9440 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9441 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9443 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9444 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9445 TREE_OPERAND (arg0
, 0),
9446 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9447 fold_convert_loc (loc
, cmp_type
, arg1
)));
9454 /* Subroutine of fold_binary. Optimize complex multiplications of the
9455 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9456 argument EXPR represents the expression "z" of type TYPE. */
9459 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9461 tree itype
= TREE_TYPE (type
);
9462 tree rpart
, ipart
, tem
;
9464 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9466 rpart
= TREE_OPERAND (expr
, 0);
9467 ipart
= TREE_OPERAND (expr
, 1);
9469 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9471 rpart
= TREE_REALPART (expr
);
9472 ipart
= TREE_IMAGPART (expr
);
9476 expr
= save_expr (expr
);
9477 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9478 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9481 rpart
= save_expr (rpart
);
9482 ipart
= save_expr (ipart
);
9483 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9484 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9485 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9486 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9487 build_zero_cst (itype
));
9491 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9492 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9493 guarantees that P and N have the same least significant log2(M) bits.
9494 N is not otherwise constrained. In particular, N is not normalized to
9495 0 <= N < M as is common. In general, the precise value of P is unknown.
9496 M is chosen as large as possible such that constant N can be determined.
9498 Returns M and sets *RESIDUE to N.
9500 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9501 account. This is not always possible due to PR 35705.
9504 static unsigned HOST_WIDE_INT
9505 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9506 bool allow_func_align
)
9508 enum tree_code code
;
9512 code
= TREE_CODE (expr
);
9513 if (code
== ADDR_EXPR
)
9515 unsigned int bitalign
;
9516 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9517 *residue
/= BITS_PER_UNIT
;
9518 return bitalign
/ BITS_PER_UNIT
;
9520 else if (code
== POINTER_PLUS_EXPR
)
9523 unsigned HOST_WIDE_INT modulus
;
9524 enum tree_code inner_code
;
9526 op0
= TREE_OPERAND (expr
, 0);
9528 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9531 op1
= TREE_OPERAND (expr
, 1);
9533 inner_code
= TREE_CODE (op1
);
9534 if (inner_code
== INTEGER_CST
)
9536 *residue
+= TREE_INT_CST_LOW (op1
);
9539 else if (inner_code
== MULT_EXPR
)
9541 op1
= TREE_OPERAND (op1
, 1);
9542 if (TREE_CODE (op1
) == INTEGER_CST
)
9544 unsigned HOST_WIDE_INT align
;
9546 /* Compute the greatest power-of-2 divisor of op1. */
9547 align
= TREE_INT_CST_LOW (op1
);
9550 /* If align is non-zero and less than *modulus, replace
9551 *modulus with align., If align is 0, then either op1 is 0
9552 or the greatest power-of-2 divisor of op1 doesn't fit in an
9553 unsigned HOST_WIDE_INT. In either case, no additional
9554 constraint is imposed. */
9556 modulus
= MIN (modulus
, align
);
9563 /* If we get here, we were unable to determine anything useful about the
9568 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9569 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9572 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9574 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9576 if (TREE_CODE (arg
) == VECTOR_CST
)
9578 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9579 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9581 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9583 constructor_elt
*elt
;
9585 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9586 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9589 elts
[i
] = elt
->value
;
9593 for (; i
< nelts
; i
++)
9595 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9599 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9600 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9601 NULL_TREE otherwise. */
9604 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9606 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9608 bool need_ctor
= false;
9610 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9611 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9612 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9613 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9616 elts
= XALLOCAVEC (tree
, nelts
* 3);
9617 if (!vec_cst_ctor_to_array (arg0
, elts
)
9618 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9621 for (i
= 0; i
< nelts
; i
++)
9623 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9625 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9630 vec
<constructor_elt
, va_gc
> *v
;
9631 vec_alloc (v
, nelts
);
9632 for (i
= 0; i
< nelts
; i
++)
9633 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9634 return build_constructor (type
, v
);
9637 return build_vector (type
, &elts
[2 * nelts
]);
9640 /* Try to fold a pointer difference of type TYPE two address expressions of
9641 array references AREF0 and AREF1 using location LOC. Return a
9642 simplified expression for the difference or NULL_TREE. */
9645 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9646 tree aref0
, tree aref1
)
9648 tree base0
= TREE_OPERAND (aref0
, 0);
9649 tree base1
= TREE_OPERAND (aref1
, 0);
9650 tree base_offset
= build_int_cst (type
, 0);
9652 /* If the bases are array references as well, recurse. If the bases
9653 are pointer indirections compute the difference of the pointers.
9654 If the bases are equal, we are set. */
9655 if ((TREE_CODE (base0
) == ARRAY_REF
9656 && TREE_CODE (base1
) == ARRAY_REF
9658 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9659 || (INDIRECT_REF_P (base0
)
9660 && INDIRECT_REF_P (base1
)
9661 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9662 TREE_OPERAND (base0
, 0),
9663 TREE_OPERAND (base1
, 0))))
9664 || operand_equal_p (base0
, base1
, 0))
9666 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9667 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9668 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9669 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9670 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9672 fold_build2_loc (loc
, MULT_EXPR
, type
,
9678 /* If the real or vector real constant CST of type TYPE has an exact
9679 inverse, return it, else return NULL. */
9682 exact_inverse (tree type
, tree cst
)
9685 tree unit_type
, *elts
;
9687 unsigned vec_nelts
, i
;
9689 switch (TREE_CODE (cst
))
9692 r
= TREE_REAL_CST (cst
);
9694 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9695 return build_real (type
, r
);
9700 vec_nelts
= VECTOR_CST_NELTS (cst
);
9701 elts
= XALLOCAVEC (tree
, vec_nelts
);
9702 unit_type
= TREE_TYPE (type
);
9703 mode
= TYPE_MODE (unit_type
);
9705 for (i
= 0; i
< vec_nelts
; i
++)
9707 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9708 if (!exact_real_inverse (mode
, &r
))
9710 elts
[i
] = build_real (unit_type
, r
);
9713 return build_vector (type
, elts
);
9720 /* Mask out the tz least significant bits of X of type TYPE where
9721 tz is the number of trailing zeroes in Y. */
9723 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9725 int tz
= wi::ctz (y
);
9727 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9731 /* Return true when T is an address and is known to be nonzero.
9732 For floating point we further ensure that T is not denormal.
9733 Similar logic is present in nonzero_address in rtlanal.h.
9735 If the return value is based on the assumption that signed overflow
9736 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9737 change *STRICT_OVERFLOW_P. */
9740 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9742 tree type
= TREE_TYPE (t
);
9743 enum tree_code code
;
9745 /* Doing something useful for floating point would need more work. */
9746 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9749 code
= TREE_CODE (t
);
9750 switch (TREE_CODE_CLASS (code
))
9753 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9756 case tcc_comparison
:
9757 return tree_binary_nonzero_warnv_p (code
, type
,
9758 TREE_OPERAND (t
, 0),
9759 TREE_OPERAND (t
, 1),
9762 case tcc_declaration
:
9764 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9772 case TRUTH_NOT_EXPR
:
9773 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9776 case TRUTH_AND_EXPR
:
9778 case TRUTH_XOR_EXPR
:
9779 return tree_binary_nonzero_warnv_p (code
, type
,
9780 TREE_OPERAND (t
, 0),
9781 TREE_OPERAND (t
, 1),
9789 case WITH_SIZE_EXPR
:
9791 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9796 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9800 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9805 tree fndecl
= get_callee_fndecl (t
);
9806 if (!fndecl
) return false;
9807 if (flag_delete_null_pointer_checks
&& !flag_check_new
9808 && DECL_IS_OPERATOR_NEW (fndecl
)
9809 && !TREE_NOTHROW (fndecl
))
9811 if (flag_delete_null_pointer_checks
9812 && lookup_attribute ("returns_nonnull",
9813 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9815 return alloca_call_p (t
);
9824 /* Return true when T is an address and is known to be nonzero.
9825 Handle warnings about undefined signed overflow. */
9828 tree_expr_nonzero_p (tree t
)
9830 bool ret
, strict_overflow_p
;
9832 strict_overflow_p
= false;
9833 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9834 if (strict_overflow_p
)
9835 fold_overflow_warning (("assuming signed overflow does not occur when "
9836 "determining that expression is always "
9838 WARN_STRICT_OVERFLOW_MISC
);
9842 /* Fold a binary expression of code CODE and type TYPE with operands
9843 OP0 and OP1. LOC is the location of the resulting expression.
9844 Return the folded expression if folding is successful. Otherwise,
9845 return NULL_TREE. */
9848 fold_binary_loc (location_t loc
,
9849 enum tree_code code
, tree type
, tree op0
, tree op1
)
9851 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9852 tree arg0
, arg1
, tem
;
9853 tree t1
= NULL_TREE
;
9854 bool strict_overflow_p
;
9857 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9858 && TREE_CODE_LENGTH (code
) == 2
9860 && op1
!= NULL_TREE
);
9865 /* Strip any conversions that don't change the mode. This is
9866 safe for every expression, except for a comparison expression
9867 because its signedness is derived from its operands. So, in
9868 the latter case, only strip conversions that don't change the
9869 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9872 Note that this is done as an internal manipulation within the
9873 constant folder, in order to find the simplest representation
9874 of the arguments so that their form can be studied. In any
9875 cases, the appropriate type conversions should be put back in
9876 the tree that will get out of the constant folder. */
9878 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9880 STRIP_SIGN_NOPS (arg0
);
9881 STRIP_SIGN_NOPS (arg1
);
9889 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9890 constant but we can't do arithmetic on them. */
9891 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9892 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9893 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9894 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9895 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9896 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9897 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9899 if (kind
== tcc_binary
)
9901 /* Make sure type and arg0 have the same saturating flag. */
9902 gcc_assert (TYPE_SATURATING (type
)
9903 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9904 tem
= const_binop (code
, arg0
, arg1
);
9906 else if (kind
== tcc_comparison
)
9907 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9911 if (tem
!= NULL_TREE
)
9913 if (TREE_TYPE (tem
) != type
)
9914 tem
= fold_convert_loc (loc
, type
, tem
);
9919 /* If this is a commutative operation, and ARG0 is a constant, move it
9920 to ARG1 to reduce the number of tests below. */
9921 if (commutative_tree_code (code
)
9922 && tree_swap_operands_p (arg0
, arg1
, true))
9923 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9925 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9926 to ARG1 to reduce the number of tests below. */
9927 if (kind
== tcc_comparison
9928 && tree_swap_operands_p (arg0
, arg1
, true))
9929 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9931 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9935 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9937 First check for cases where an arithmetic operation is applied to a
9938 compound, conditional, or comparison operation. Push the arithmetic
9939 operation inside the compound or conditional to see if any folding
9940 can then be done. Convert comparison to conditional for this purpose.
9941 The also optimizes non-constant cases that used to be done in
9944 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9945 one of the operands is a comparison and the other is a comparison, a
9946 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9947 code below would make the expression more complex. Change it to a
9948 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9949 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9951 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9952 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9953 && TREE_CODE (type
) != VECTOR_TYPE
9954 && ((truth_value_p (TREE_CODE (arg0
))
9955 && (truth_value_p (TREE_CODE (arg1
))
9956 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9957 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9958 || (truth_value_p (TREE_CODE (arg1
))
9959 && (truth_value_p (TREE_CODE (arg0
))
9960 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9961 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9963 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9964 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9967 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9968 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9970 if (code
== EQ_EXPR
)
9971 tem
= invert_truthvalue_loc (loc
, tem
);
9973 return fold_convert_loc (loc
, type
, tem
);
9976 if (TREE_CODE_CLASS (code
) == tcc_binary
9977 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9979 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9981 tem
= fold_build2_loc (loc
, code
, type
,
9982 fold_convert_loc (loc
, TREE_TYPE (op0
),
9983 TREE_OPERAND (arg0
, 1)), op1
);
9984 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9987 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9988 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9990 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9991 fold_convert_loc (loc
, TREE_TYPE (op1
),
9992 TREE_OPERAND (arg1
, 1)));
9993 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9997 if (TREE_CODE (arg0
) == COND_EXPR
9998 || TREE_CODE (arg0
) == VEC_COND_EXPR
9999 || COMPARISON_CLASS_P (arg0
))
10001 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10003 /*cond_first_p=*/1);
10004 if (tem
!= NULL_TREE
)
10008 if (TREE_CODE (arg1
) == COND_EXPR
10009 || TREE_CODE (arg1
) == VEC_COND_EXPR
10010 || COMPARISON_CLASS_P (arg1
))
10012 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10014 /*cond_first_p=*/0);
10015 if (tem
!= NULL_TREE
)
10023 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10024 if (TREE_CODE (arg0
) == ADDR_EXPR
10025 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10027 tree iref
= TREE_OPERAND (arg0
, 0);
10028 return fold_build2 (MEM_REF
, type
,
10029 TREE_OPERAND (iref
, 0),
10030 int_const_binop (PLUS_EXPR
, arg1
,
10031 TREE_OPERAND (iref
, 1)));
10034 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10035 if (TREE_CODE (arg0
) == ADDR_EXPR
10036 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10039 HOST_WIDE_INT coffset
;
10040 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10044 return fold_build2 (MEM_REF
, type
,
10045 build_fold_addr_expr (base
),
10046 int_const_binop (PLUS_EXPR
, arg1
,
10047 size_int (coffset
)));
10052 case POINTER_PLUS_EXPR
:
10053 /* 0 +p index -> (type)index */
10054 if (integer_zerop (arg0
))
10055 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10057 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10058 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10059 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10060 return fold_convert_loc (loc
, type
,
10061 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10062 fold_convert_loc (loc
, sizetype
,
10064 fold_convert_loc (loc
, sizetype
,
10067 /* (PTR +p B) +p A -> PTR +p (B + A) */
10068 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10071 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10072 tree arg00
= TREE_OPERAND (arg0
, 0);
10073 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10074 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10075 return fold_convert_loc (loc
, type
,
10076 fold_build_pointer_plus_loc (loc
,
10080 /* PTR_CST +p CST -> CST1 */
10081 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10082 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10083 fold_convert_loc (loc
, type
, arg1
));
10088 /* A + (-B) -> A - B */
10089 if (TREE_CODE (arg1
) == NEGATE_EXPR
10090 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10091 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10092 fold_convert_loc (loc
, type
, arg0
),
10093 fold_convert_loc (loc
, type
,
10094 TREE_OPERAND (arg1
, 0)));
10095 /* (-A) + B -> B - A */
10096 if (TREE_CODE (arg0
) == NEGATE_EXPR
10097 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
)
10098 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10099 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10100 fold_convert_loc (loc
, type
, arg1
),
10101 fold_convert_loc (loc
, type
,
10102 TREE_OPERAND (arg0
, 0)));
10104 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10106 /* Convert ~A + 1 to -A. */
10107 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10108 && integer_each_onep (arg1
))
10109 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10110 fold_convert_loc (loc
, type
,
10111 TREE_OPERAND (arg0
, 0)));
10113 /* ~X + X is -1. */
10114 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10115 && !TYPE_OVERFLOW_TRAPS (type
))
10117 tree tem
= TREE_OPERAND (arg0
, 0);
10120 if (operand_equal_p (tem
, arg1
, 0))
10122 t1
= build_all_ones_cst (type
);
10123 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10127 /* X + ~X is -1. */
10128 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10129 && !TYPE_OVERFLOW_TRAPS (type
))
10131 tree tem
= TREE_OPERAND (arg1
, 0);
10134 if (operand_equal_p (arg0
, tem
, 0))
10136 t1
= build_all_ones_cst (type
);
10137 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10141 /* X + (X / CST) * -CST is X % CST. */
10142 if (TREE_CODE (arg1
) == MULT_EXPR
10143 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10144 && operand_equal_p (arg0
,
10145 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10147 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10148 tree cst1
= TREE_OPERAND (arg1
, 1);
10149 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10151 if (sum
&& integer_zerop (sum
))
10152 return fold_convert_loc (loc
, type
,
10153 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10154 TREE_TYPE (arg0
), arg0
,
10159 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10160 one. Make sure the type is not saturating and has the signedness of
10161 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10162 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10163 if ((TREE_CODE (arg0
) == MULT_EXPR
10164 || TREE_CODE (arg1
) == MULT_EXPR
)
10165 && !TYPE_SATURATING (type
)
10166 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10167 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10168 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10170 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10175 if (! FLOAT_TYPE_P (type
))
10177 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10178 with a constant, and the two constants have no bits in common,
10179 we should treat this as a BIT_IOR_EXPR since this may produce more
10180 simplifications. */
10181 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10182 && TREE_CODE (arg1
) == BIT_AND_EXPR
10183 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10184 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10185 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10186 TREE_OPERAND (arg1
, 1)) == 0)
10188 code
= BIT_IOR_EXPR
;
10192 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10193 (plus (plus (mult) (mult)) (foo)) so that we can
10194 take advantage of the factoring cases below. */
10195 if (TYPE_OVERFLOW_WRAPS (type
)
10196 && (((TREE_CODE (arg0
) == PLUS_EXPR
10197 || TREE_CODE (arg0
) == MINUS_EXPR
)
10198 && TREE_CODE (arg1
) == MULT_EXPR
)
10199 || ((TREE_CODE (arg1
) == PLUS_EXPR
10200 || TREE_CODE (arg1
) == MINUS_EXPR
)
10201 && TREE_CODE (arg0
) == MULT_EXPR
)))
10203 tree parg0
, parg1
, parg
, marg
;
10204 enum tree_code pcode
;
10206 if (TREE_CODE (arg1
) == MULT_EXPR
)
10207 parg
= arg0
, marg
= arg1
;
10209 parg
= arg1
, marg
= arg0
;
10210 pcode
= TREE_CODE (parg
);
10211 parg0
= TREE_OPERAND (parg
, 0);
10212 parg1
= TREE_OPERAND (parg
, 1);
10213 STRIP_NOPS (parg0
);
10214 STRIP_NOPS (parg1
);
10216 if (TREE_CODE (parg0
) == MULT_EXPR
10217 && TREE_CODE (parg1
) != MULT_EXPR
)
10218 return fold_build2_loc (loc
, pcode
, type
,
10219 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10220 fold_convert_loc (loc
, type
,
10222 fold_convert_loc (loc
, type
,
10224 fold_convert_loc (loc
, type
, parg1
));
10225 if (TREE_CODE (parg0
) != MULT_EXPR
10226 && TREE_CODE (parg1
) == MULT_EXPR
)
10228 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10229 fold_convert_loc (loc
, type
, parg0
),
10230 fold_build2_loc (loc
, pcode
, type
,
10231 fold_convert_loc (loc
, type
, marg
),
10232 fold_convert_loc (loc
, type
,
10238 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10239 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10240 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10242 /* Likewise if the operands are reversed. */
10243 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10244 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10246 /* Convert X + -C into X - C. */
10247 if (TREE_CODE (arg1
) == REAL_CST
10248 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10250 tem
= fold_negate_const (arg1
, type
);
10251 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10252 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10253 fold_convert_loc (loc
, type
, arg0
),
10254 fold_convert_loc (loc
, type
, tem
));
10257 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10258 to __complex__ ( x, y ). This is not the same for SNaNs or
10259 if signed zeros are involved. */
10260 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10261 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10262 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10264 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10265 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10266 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10267 bool arg0rz
= false, arg0iz
= false;
10268 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10269 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10271 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10272 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10273 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10275 tree rp
= arg1r
? arg1r
10276 : build1 (REALPART_EXPR
, rtype
, arg1
);
10277 tree ip
= arg0i
? arg0i
10278 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10279 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10281 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10283 tree rp
= arg0r
? arg0r
10284 : build1 (REALPART_EXPR
, rtype
, arg0
);
10285 tree ip
= arg1i
? arg1i
10286 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10287 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10292 if (flag_unsafe_math_optimizations
10293 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10294 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10295 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10298 /* Convert x+x into x*2.0. */
10299 if (operand_equal_p (arg0
, arg1
, 0)
10300 && SCALAR_FLOAT_TYPE_P (type
))
10301 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10302 build_real (type
, dconst2
));
10304 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10305 We associate floats only if the user has specified
10306 -fassociative-math. */
10307 if (flag_associative_math
10308 && TREE_CODE (arg1
) == PLUS_EXPR
10309 && TREE_CODE (arg0
) != MULT_EXPR
)
10311 tree tree10
= TREE_OPERAND (arg1
, 0);
10312 tree tree11
= TREE_OPERAND (arg1
, 1);
10313 if (TREE_CODE (tree11
) == MULT_EXPR
10314 && TREE_CODE (tree10
) == MULT_EXPR
)
10317 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10318 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10321 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10322 We associate floats only if the user has specified
10323 -fassociative-math. */
10324 if (flag_associative_math
10325 && TREE_CODE (arg0
) == PLUS_EXPR
10326 && TREE_CODE (arg1
) != MULT_EXPR
)
10328 tree tree00
= TREE_OPERAND (arg0
, 0);
10329 tree tree01
= TREE_OPERAND (arg0
, 1);
10330 if (TREE_CODE (tree01
) == MULT_EXPR
10331 && TREE_CODE (tree00
) == MULT_EXPR
)
10334 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10335 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10341 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10342 is a rotate of A by C1 bits. */
10343 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10344 is a rotate of A by B bits. */
10346 enum tree_code code0
, code1
;
10348 code0
= TREE_CODE (arg0
);
10349 code1
= TREE_CODE (arg1
);
10350 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10351 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10352 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10353 TREE_OPERAND (arg1
, 0), 0)
10354 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10355 TYPE_UNSIGNED (rtype
))
10356 /* Only create rotates in complete modes. Other cases are not
10357 expanded properly. */
10358 && (element_precision (rtype
)
10359 == element_precision (TYPE_MODE (rtype
))))
10361 tree tree01
, tree11
;
10362 enum tree_code code01
, code11
;
10364 tree01
= TREE_OPERAND (arg0
, 1);
10365 tree11
= TREE_OPERAND (arg1
, 1);
10366 STRIP_NOPS (tree01
);
10367 STRIP_NOPS (tree11
);
10368 code01
= TREE_CODE (tree01
);
10369 code11
= TREE_CODE (tree11
);
10370 if (code01
== INTEGER_CST
10371 && code11
== INTEGER_CST
10372 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10373 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10375 tem
= build2_loc (loc
, LROTATE_EXPR
,
10376 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10377 TREE_OPERAND (arg0
, 0),
10378 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10379 return fold_convert_loc (loc
, type
, tem
);
10381 else if (code11
== MINUS_EXPR
)
10383 tree tree110
, tree111
;
10384 tree110
= TREE_OPERAND (tree11
, 0);
10385 tree111
= TREE_OPERAND (tree11
, 1);
10386 STRIP_NOPS (tree110
);
10387 STRIP_NOPS (tree111
);
10388 if (TREE_CODE (tree110
) == INTEGER_CST
10389 && 0 == compare_tree_int (tree110
,
10391 (TREE_TYPE (TREE_OPERAND
10393 && operand_equal_p (tree01
, tree111
, 0))
10395 fold_convert_loc (loc
, type
,
10396 build2 ((code0
== LSHIFT_EXPR
10399 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10400 TREE_OPERAND (arg0
, 0), tree01
));
10402 else if (code01
== MINUS_EXPR
)
10404 tree tree010
, tree011
;
10405 tree010
= TREE_OPERAND (tree01
, 0);
10406 tree011
= TREE_OPERAND (tree01
, 1);
10407 STRIP_NOPS (tree010
);
10408 STRIP_NOPS (tree011
);
10409 if (TREE_CODE (tree010
) == INTEGER_CST
10410 && 0 == compare_tree_int (tree010
,
10412 (TREE_TYPE (TREE_OPERAND
10414 && operand_equal_p (tree11
, tree011
, 0))
10415 return fold_convert_loc
10417 build2 ((code0
!= LSHIFT_EXPR
10420 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10421 TREE_OPERAND (arg0
, 0), tree11
));
10427 /* In most languages, can't associate operations on floats through
10428 parentheses. Rather than remember where the parentheses were, we
10429 don't associate floats at all, unless the user has specified
10430 -fassociative-math.
10431 And, we need to make sure type is not saturating. */
10433 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10434 && !TYPE_SATURATING (type
))
10436 tree var0
, con0
, lit0
, minus_lit0
;
10437 tree var1
, con1
, lit1
, minus_lit1
;
10441 /* Split both trees into variables, constants, and literals. Then
10442 associate each group together, the constants with literals,
10443 then the result with variables. This increases the chances of
10444 literals being recombined later and of generating relocatable
10445 expressions for the sum of a constant and literal. */
10446 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10447 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10448 code
== MINUS_EXPR
);
10450 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10451 if (code
== MINUS_EXPR
)
10454 /* With undefined overflow prefer doing association in a type
10455 which wraps on overflow, if that is one of the operand types. */
10456 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10457 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10459 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10460 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10461 atype
= TREE_TYPE (arg0
);
10462 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10463 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10464 atype
= TREE_TYPE (arg1
);
10465 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10468 /* With undefined overflow we can only associate constants with one
10469 variable, and constants whose association doesn't overflow. */
10470 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10471 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10478 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10479 tmp0
= TREE_OPERAND (tmp0
, 0);
10480 if (CONVERT_EXPR_P (tmp0
)
10481 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10482 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10483 <= TYPE_PRECISION (atype
)))
10484 tmp0
= TREE_OPERAND (tmp0
, 0);
10485 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10486 tmp1
= TREE_OPERAND (tmp1
, 0);
10487 if (CONVERT_EXPR_P (tmp1
)
10488 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10489 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10490 <= TYPE_PRECISION (atype
)))
10491 tmp1
= TREE_OPERAND (tmp1
, 0);
10492 /* The only case we can still associate with two variables
10493 is if they are the same, modulo negation and bit-pattern
10494 preserving conversions. */
10495 if (!operand_equal_p (tmp0
, tmp1
, 0))
10500 /* Only do something if we found more than two objects. Otherwise,
10501 nothing has changed and we risk infinite recursion. */
10503 && (2 < ((var0
!= 0) + (var1
!= 0)
10504 + (con0
!= 0) + (con1
!= 0)
10505 + (lit0
!= 0) + (lit1
!= 0)
10506 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10508 bool any_overflows
= false;
10509 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10510 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10511 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10512 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10513 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10514 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10515 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10516 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10519 /* Preserve the MINUS_EXPR if the negative part of the literal is
10520 greater than the positive part. Otherwise, the multiplicative
10521 folding code (i.e extract_muldiv) may be fooled in case
10522 unsigned constants are subtracted, like in the following
10523 example: ((X*2 + 4) - 8U)/2. */
10524 if (minus_lit0
&& lit0
)
10526 if (TREE_CODE (lit0
) == INTEGER_CST
10527 && TREE_CODE (minus_lit0
) == INTEGER_CST
10528 && tree_int_cst_lt (lit0
, minus_lit0
))
10530 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10531 MINUS_EXPR
, atype
);
10536 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10537 MINUS_EXPR
, atype
);
10542 /* Don't introduce overflows through reassociation. */
10544 && ((lit0
&& TREE_OVERFLOW (lit0
))
10545 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10552 fold_convert_loc (loc
, type
,
10553 associate_trees (loc
, var0
, minus_lit0
,
10554 MINUS_EXPR
, atype
));
10557 con0
= associate_trees (loc
, con0
, minus_lit0
,
10558 MINUS_EXPR
, atype
);
10560 fold_convert_loc (loc
, type
,
10561 associate_trees (loc
, var0
, con0
,
10562 PLUS_EXPR
, atype
));
10566 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10568 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10576 /* Pointer simplifications for subtraction, simple reassociations. */
10577 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10579 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10580 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10581 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10583 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10584 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10585 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10586 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10587 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10588 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10590 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10593 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10594 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10596 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10597 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10598 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10599 fold_convert_loc (loc
, type
, arg1
));
10601 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10603 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10605 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10607 tree arg10
= fold_convert_loc (loc
, type
,
10608 TREE_OPERAND (arg1
, 0));
10609 tree arg11
= fold_convert_loc (loc
, type
,
10610 TREE_OPERAND (arg1
, 1));
10611 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10612 fold_convert_loc (loc
, type
, arg0
),
10615 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10618 /* A - (-B) -> A + B */
10619 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10620 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10621 fold_convert_loc (loc
, type
,
10622 TREE_OPERAND (arg1
, 0)));
10623 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10624 if (TREE_CODE (arg0
) == NEGATE_EXPR
10625 && negate_expr_p (arg1
)
10626 && reorder_operands_p (arg0
, arg1
))
10627 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10628 fold_convert_loc (loc
, type
,
10629 negate_expr (arg1
)),
10630 fold_convert_loc (loc
, type
,
10631 TREE_OPERAND (arg0
, 0)));
10632 /* Convert -A - 1 to ~A. */
10633 if (TREE_CODE (arg0
) == NEGATE_EXPR
10634 && integer_each_onep (arg1
)
10635 && !TYPE_OVERFLOW_TRAPS (type
))
10636 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10637 fold_convert_loc (loc
, type
,
10638 TREE_OPERAND (arg0
, 0)));
10640 /* Convert -1 - A to ~A. */
10641 if (TREE_CODE (type
) != COMPLEX_TYPE
10642 && integer_all_onesp (arg0
))
10643 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10646 /* X - (X / Y) * Y is X % Y. */
10647 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10648 && TREE_CODE (arg1
) == MULT_EXPR
10649 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10650 && operand_equal_p (arg0
,
10651 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10652 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10653 TREE_OPERAND (arg1
, 1), 0))
10655 fold_convert_loc (loc
, type
,
10656 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10657 arg0
, TREE_OPERAND (arg1
, 1)));
10659 if (! FLOAT_TYPE_P (type
))
10661 if (integer_zerop (arg0
))
10662 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10664 /* Fold A - (A & B) into ~B & A. */
10665 if (!TREE_SIDE_EFFECTS (arg0
)
10666 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10668 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10670 tree arg10
= fold_convert_loc (loc
, type
,
10671 TREE_OPERAND (arg1
, 0));
10672 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10673 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10675 fold_convert_loc (loc
, type
, arg0
));
10677 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10679 tree arg11
= fold_convert_loc (loc
,
10680 type
, TREE_OPERAND (arg1
, 1));
10681 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10682 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10684 fold_convert_loc (loc
, type
, arg0
));
10688 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10689 any power of 2 minus 1. */
10690 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10691 && TREE_CODE (arg1
) == BIT_AND_EXPR
10692 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10693 TREE_OPERAND (arg1
, 0), 0))
10695 tree mask0
= TREE_OPERAND (arg0
, 1);
10696 tree mask1
= TREE_OPERAND (arg1
, 1);
10697 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10699 if (operand_equal_p (tem
, mask1
, 0))
10701 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10702 TREE_OPERAND (arg0
, 0), mask1
);
10703 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10708 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10709 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10710 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10712 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10713 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10714 (-ARG1 + ARG0) reduces to -ARG1. */
10715 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10716 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10718 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10719 __complex__ ( x, -y ). This is not the same for SNaNs or if
10720 signed zeros are involved. */
10721 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10722 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10723 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10725 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10726 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10727 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10728 bool arg0rz
= false, arg0iz
= false;
10729 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10730 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10732 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10733 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10734 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10736 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10738 : build1 (REALPART_EXPR
, rtype
, arg1
));
10739 tree ip
= arg0i
? arg0i
10740 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10741 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10743 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10745 tree rp
= arg0r
? arg0r
10746 : build1 (REALPART_EXPR
, rtype
, arg0
);
10747 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10749 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10750 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10755 /* A - B -> A + (-B) if B is easily negatable. */
10756 if (negate_expr_p (arg1
)
10757 && ((FLOAT_TYPE_P (type
)
10758 /* Avoid this transformation if B is a positive REAL_CST. */
10759 && (TREE_CODE (arg1
) != REAL_CST
10760 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10761 || INTEGRAL_TYPE_P (type
)))
10762 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10763 fold_convert_loc (loc
, type
, arg0
),
10764 fold_convert_loc (loc
, type
,
10765 negate_expr (arg1
)));
10767 /* Try folding difference of addresses. */
10769 HOST_WIDE_INT diff
;
10771 if ((TREE_CODE (arg0
) == ADDR_EXPR
10772 || TREE_CODE (arg1
) == ADDR_EXPR
)
10773 && ptr_difference_const (arg0
, arg1
, &diff
))
10774 return build_int_cst_type (type
, diff
);
10777 /* Fold &a[i] - &a[j] to i-j. */
10778 if (TREE_CODE (arg0
) == ADDR_EXPR
10779 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10780 && TREE_CODE (arg1
) == ADDR_EXPR
10781 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10783 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10784 TREE_OPERAND (arg0
, 0),
10785 TREE_OPERAND (arg1
, 0));
10790 if (FLOAT_TYPE_P (type
)
10791 && flag_unsafe_math_optimizations
10792 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10793 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10794 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10797 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10798 one. Make sure the type is not saturating and has the signedness of
10799 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10800 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10801 if ((TREE_CODE (arg0
) == MULT_EXPR
10802 || TREE_CODE (arg1
) == MULT_EXPR
)
10803 && !TYPE_SATURATING (type
)
10804 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10805 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10806 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10808 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10816 /* (-A) * (-B) -> A * B */
10817 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10818 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10819 fold_convert_loc (loc
, type
,
10820 TREE_OPERAND (arg0
, 0)),
10821 fold_convert_loc (loc
, type
,
10822 negate_expr (arg1
)));
10823 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10824 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10825 fold_convert_loc (loc
, type
,
10826 negate_expr (arg0
)),
10827 fold_convert_loc (loc
, type
,
10828 TREE_OPERAND (arg1
, 0)));
10830 if (! FLOAT_TYPE_P (type
))
10832 /* Transform x * -1 into -x. Make sure to do the negation
10833 on the original operand with conversions not stripped
10834 because we can only strip non-sign-changing conversions. */
10835 if (integer_minus_onep (arg1
))
10836 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10837 /* Transform x * -C into -x * C if x is easily negatable. */
10838 if (TREE_CODE (arg1
) == INTEGER_CST
10839 && tree_int_cst_sgn (arg1
) == -1
10840 && negate_expr_p (arg0
)
10841 && (tem
= negate_expr (arg1
)) != arg1
10842 && !TREE_OVERFLOW (tem
))
10843 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10844 fold_convert_loc (loc
, type
,
10845 negate_expr (arg0
)),
10848 /* (a * (1 << b)) is (a << b) */
10849 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10850 && integer_onep (TREE_OPERAND (arg1
, 0)))
10851 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10852 TREE_OPERAND (arg1
, 1));
10853 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10854 && integer_onep (TREE_OPERAND (arg0
, 0)))
10855 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10856 TREE_OPERAND (arg0
, 1));
10858 /* (A + A) * C -> A * 2 * C */
10859 if (TREE_CODE (arg0
) == PLUS_EXPR
10860 && TREE_CODE (arg1
) == INTEGER_CST
10861 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10862 TREE_OPERAND (arg0
, 1), 0))
10863 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10864 omit_one_operand_loc (loc
, type
,
10865 TREE_OPERAND (arg0
, 0),
10866 TREE_OPERAND (arg0
, 1)),
10867 fold_build2_loc (loc
, MULT_EXPR
, type
,
10868 build_int_cst (type
, 2) , arg1
));
10870 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10871 sign-changing only. */
10872 if (TREE_CODE (arg1
) == INTEGER_CST
10873 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10874 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10875 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10877 strict_overflow_p
= false;
10878 if (TREE_CODE (arg1
) == INTEGER_CST
10879 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10880 &strict_overflow_p
)))
10882 if (strict_overflow_p
)
10883 fold_overflow_warning (("assuming signed overflow does not "
10884 "occur when simplifying "
10886 WARN_STRICT_OVERFLOW_MISC
);
10887 return fold_convert_loc (loc
, type
, tem
);
10890 /* Optimize z * conj(z) for integer complex numbers. */
10891 if (TREE_CODE (arg0
) == CONJ_EXPR
10892 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10893 return fold_mult_zconjz (loc
, type
, arg1
);
10894 if (TREE_CODE (arg1
) == CONJ_EXPR
10895 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10896 return fold_mult_zconjz (loc
, type
, arg0
);
10900 /* Maybe fold x * 0 to 0. The expressions aren't the same
10901 when x is NaN, since x * 0 is also NaN. Nor are they the
10902 same in modes with signed zeros, since multiplying a
10903 negative value by 0 gives -0, not +0. */
10904 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10905 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10906 && real_zerop (arg1
))
10907 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10908 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10909 Likewise for complex arithmetic with signed zeros. */
10910 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10911 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10912 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10913 && real_onep (arg1
))
10914 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10916 /* Transform x * -1.0 into -x. */
10917 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10918 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10919 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10920 && real_minus_onep (arg1
))
10921 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10923 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10924 the result for floating point types due to rounding so it is applied
10925 only if -fassociative-math was specify. */
10926 if (flag_associative_math
10927 && TREE_CODE (arg0
) == RDIV_EXPR
10928 && TREE_CODE (arg1
) == REAL_CST
10929 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10931 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10934 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10935 TREE_OPERAND (arg0
, 1));
10938 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10939 if (operand_equal_p (arg0
, arg1
, 0))
10941 tree tem
= fold_strip_sign_ops (arg0
);
10942 if (tem
!= NULL_TREE
)
10944 tem
= fold_convert_loc (loc
, type
, tem
);
10945 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10949 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10950 This is not the same for NaNs or if signed zeros are
10952 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10953 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10954 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10955 && TREE_CODE (arg1
) == COMPLEX_CST
10956 && real_zerop (TREE_REALPART (arg1
)))
10958 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10959 if (real_onep (TREE_IMAGPART (arg1
)))
10961 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10962 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10964 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10965 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10967 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10968 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10969 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10973 /* Optimize z * conj(z) for floating point complex numbers.
10974 Guarded by flag_unsafe_math_optimizations as non-finite
10975 imaginary components don't produce scalar results. */
10976 if (flag_unsafe_math_optimizations
10977 && TREE_CODE (arg0
) == CONJ_EXPR
10978 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10979 return fold_mult_zconjz (loc
, type
, arg1
);
10980 if (flag_unsafe_math_optimizations
10981 && TREE_CODE (arg1
) == CONJ_EXPR
10982 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10983 return fold_mult_zconjz (loc
, type
, arg0
);
10985 if (flag_unsafe_math_optimizations
)
10987 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10988 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10990 /* Optimizations of root(...)*root(...). */
10991 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10994 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10995 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10997 /* Optimize sqrt(x)*sqrt(x) as x. */
10998 if (BUILTIN_SQRT_P (fcode0
)
10999 && operand_equal_p (arg00
, arg10
, 0)
11000 && ! HONOR_SNANS (TYPE_MODE (type
)))
11003 /* Optimize root(x)*root(y) as root(x*y). */
11004 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11005 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11006 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11009 /* Optimize expN(x)*expN(y) as expN(x+y). */
11010 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11012 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11013 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11014 CALL_EXPR_ARG (arg0
, 0),
11015 CALL_EXPR_ARG (arg1
, 0));
11016 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11019 /* Optimizations of pow(...)*pow(...). */
11020 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11021 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11022 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11024 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11025 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11026 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11027 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11029 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11030 if (operand_equal_p (arg01
, arg11
, 0))
11032 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11033 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11035 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11038 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11039 if (operand_equal_p (arg00
, arg10
, 0))
11041 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11042 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11044 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11048 /* Optimize tan(x)*cos(x) as sin(x). */
11049 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11050 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11051 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11052 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11053 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11054 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11055 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11056 CALL_EXPR_ARG (arg1
, 0), 0))
11058 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11060 if (sinfn
!= NULL_TREE
)
11061 return build_call_expr_loc (loc
, sinfn
, 1,
11062 CALL_EXPR_ARG (arg0
, 0));
11065 /* Optimize x*pow(x,c) as pow(x,c+1). */
11066 if (fcode1
== BUILT_IN_POW
11067 || fcode1
== BUILT_IN_POWF
11068 || fcode1
== BUILT_IN_POWL
)
11070 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11071 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11072 if (TREE_CODE (arg11
) == REAL_CST
11073 && !TREE_OVERFLOW (arg11
)
11074 && operand_equal_p (arg0
, arg10
, 0))
11076 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11080 c
= TREE_REAL_CST (arg11
);
11081 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11082 arg
= build_real (type
, c
);
11083 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11087 /* Optimize pow(x,c)*x as pow(x,c+1). */
11088 if (fcode0
== BUILT_IN_POW
11089 || fcode0
== BUILT_IN_POWF
11090 || fcode0
== BUILT_IN_POWL
)
11092 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11093 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11094 if (TREE_CODE (arg01
) == REAL_CST
11095 && !TREE_OVERFLOW (arg01
)
11096 && operand_equal_p (arg1
, arg00
, 0))
11098 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11102 c
= TREE_REAL_CST (arg01
);
11103 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11104 arg
= build_real (type
, c
);
11105 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11109 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11110 if (!in_gimple_form
11112 && operand_equal_p (arg0
, arg1
, 0))
11114 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11118 tree arg
= build_real (type
, dconst2
);
11119 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11128 if (operand_equal_p (arg0
, arg1
, 0))
11129 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11131 /* ~X | X is -1. */
11132 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11133 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11135 t1
= build_zero_cst (type
);
11136 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11137 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11140 /* X | ~X is -1. */
11141 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11142 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11144 t1
= build_zero_cst (type
);
11145 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11146 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11149 /* Canonicalize (X & C1) | C2. */
11150 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11151 && TREE_CODE (arg1
) == INTEGER_CST
11152 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11154 int width
= TYPE_PRECISION (type
), w
;
11155 wide_int c1
= TREE_OPERAND (arg0
, 1);
11156 wide_int c2
= arg1
;
11158 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11159 if ((c1
& c2
) == c1
)
11160 return omit_one_operand_loc (loc
, type
, arg1
,
11161 TREE_OPERAND (arg0
, 0));
11163 wide_int msk
= wi::mask (width
, false,
11164 TYPE_PRECISION (TREE_TYPE (arg1
)));
11166 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11167 if (msk
.and_not (c1
| c2
) == 0)
11168 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11169 TREE_OPERAND (arg0
, 0), arg1
);
11171 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11172 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11173 mode which allows further optimizations. */
11176 wide_int c3
= c1
.and_not (c2
);
11177 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11179 wide_int mask
= wi::mask (w
, false,
11180 TYPE_PRECISION (type
));
11181 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11189 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11190 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11191 TREE_OPERAND (arg0
, 0),
11192 wide_int_to_tree (type
,
11197 /* (X & Y) | Y is (X, Y). */
11198 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11200 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11201 /* (X & Y) | X is (Y, X). */
11202 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11203 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11204 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11205 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11206 /* X | (X & Y) is (Y, X). */
11207 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11208 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11209 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11210 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11211 /* X | (Y & X) is (Y, X). */
11212 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11213 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11214 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11215 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11217 /* (X & ~Y) | (~X & Y) is X ^ Y */
11218 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11219 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11221 tree a0
, a1
, l0
, l1
, n0
, n1
;
11223 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11224 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11226 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11227 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11229 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11230 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11232 if ((operand_equal_p (n0
, a0
, 0)
11233 && operand_equal_p (n1
, a1
, 0))
11234 || (operand_equal_p (n0
, a1
, 0)
11235 && operand_equal_p (n1
, a0
, 0)))
11236 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11239 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11240 if (t1
!= NULL_TREE
)
11243 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11245 This results in more efficient code for machines without a NAND
11246 instruction. Combine will canonicalize to the first form
11247 which will allow use of NAND instructions provided by the
11248 backend if they exist. */
11249 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11250 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11253 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11254 build2 (BIT_AND_EXPR
, type
,
11255 fold_convert_loc (loc
, type
,
11256 TREE_OPERAND (arg0
, 0)),
11257 fold_convert_loc (loc
, type
,
11258 TREE_OPERAND (arg1
, 0))));
11261 /* See if this can be simplified into a rotate first. If that
11262 is unsuccessful continue in the association code. */
11266 if (integer_all_onesp (arg1
))
11267 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11269 /* ~X ^ X is -1. */
11270 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11271 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11273 t1
= build_zero_cst (type
);
11274 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11275 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11278 /* X ^ ~X is -1. */
11279 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11280 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11282 t1
= build_zero_cst (type
);
11283 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11284 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11287 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11288 with a constant, and the two constants have no bits in common,
11289 we should treat this as a BIT_IOR_EXPR since this may produce more
11290 simplifications. */
11291 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11292 && TREE_CODE (arg1
) == BIT_AND_EXPR
11293 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11294 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11295 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11296 TREE_OPERAND (arg1
, 1)) == 0)
11298 code
= BIT_IOR_EXPR
;
11302 /* (X | Y) ^ X -> Y & ~ X*/
11303 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11304 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11306 tree t2
= TREE_OPERAND (arg0
, 1);
11307 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11309 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11310 fold_convert_loc (loc
, type
, t2
),
11311 fold_convert_loc (loc
, type
, t1
));
11315 /* (Y | X) ^ X -> Y & ~ X*/
11316 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11317 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11319 tree t2
= TREE_OPERAND (arg0
, 0);
11320 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11322 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11323 fold_convert_loc (loc
, type
, t2
),
11324 fold_convert_loc (loc
, type
, t1
));
11328 /* X ^ (X | Y) -> Y & ~ X*/
11329 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11330 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11332 tree t2
= TREE_OPERAND (arg1
, 1);
11333 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11335 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11336 fold_convert_loc (loc
, type
, t2
),
11337 fold_convert_loc (loc
, type
, t1
));
11341 /* X ^ (Y | X) -> Y & ~ X*/
11342 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11343 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11345 tree t2
= TREE_OPERAND (arg1
, 0);
11346 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11348 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11349 fold_convert_loc (loc
, type
, t2
),
11350 fold_convert_loc (loc
, type
, t1
));
11354 /* Convert ~X ^ ~Y to X ^ Y. */
11355 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11356 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11357 return fold_build2_loc (loc
, code
, type
,
11358 fold_convert_loc (loc
, type
,
11359 TREE_OPERAND (arg0
, 0)),
11360 fold_convert_loc (loc
, type
,
11361 TREE_OPERAND (arg1
, 0)));
11363 /* Convert ~X ^ C to X ^ ~C. */
11364 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11365 && TREE_CODE (arg1
) == INTEGER_CST
)
11366 return fold_build2_loc (loc
, code
, type
,
11367 fold_convert_loc (loc
, type
,
11368 TREE_OPERAND (arg0
, 0)),
11369 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11371 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11372 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11373 && INTEGRAL_TYPE_P (type
)
11374 && integer_onep (TREE_OPERAND (arg0
, 1))
11375 && integer_onep (arg1
))
11376 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11377 build_zero_cst (TREE_TYPE (arg0
)));
11379 /* Fold (X & Y) ^ Y as ~X & Y. */
11380 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11381 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11383 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11384 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11385 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11386 fold_convert_loc (loc
, type
, arg1
));
11388 /* Fold (X & Y) ^ X as ~Y & X. */
11389 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11390 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11391 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11393 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11394 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11395 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11396 fold_convert_loc (loc
, type
, arg1
));
11398 /* Fold X ^ (X & Y) as X & ~Y. */
11399 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11400 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11402 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11403 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11404 fold_convert_loc (loc
, type
, arg0
),
11405 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11407 /* Fold X ^ (Y & X) as ~Y & X. */
11408 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11409 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11410 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11412 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11413 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11414 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11415 fold_convert_loc (loc
, type
, arg0
));
11418 /* See if this can be simplified into a rotate first. If that
11419 is unsuccessful continue in the association code. */
11423 if (integer_all_onesp (arg1
))
11424 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11425 if (operand_equal_p (arg0
, arg1
, 0))
11426 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11428 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11429 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11430 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11431 || (TREE_CODE (arg0
) == EQ_EXPR
11432 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11433 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11434 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11436 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11437 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11438 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11439 || (TREE_CODE (arg1
) == EQ_EXPR
11440 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11441 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11442 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11444 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11445 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11446 && TREE_CODE (arg1
) == INTEGER_CST
11447 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11449 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11450 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11451 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11452 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11453 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11455 fold_convert_loc (loc
, type
,
11456 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11457 type
, tmp2
, tmp3
));
11460 /* (X | Y) & Y is (X, Y). */
11461 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11462 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11463 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11464 /* (X | Y) & X is (Y, X). */
11465 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11466 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11467 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11468 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11469 /* X & (X | Y) is (Y, X). */
11470 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11471 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11472 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11473 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11474 /* X & (Y | X) is (Y, X). */
11475 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11476 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11477 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11478 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11480 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11481 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11482 && INTEGRAL_TYPE_P (type
)
11483 && integer_onep (TREE_OPERAND (arg0
, 1))
11484 && integer_onep (arg1
))
11487 tem
= TREE_OPERAND (arg0
, 0);
11488 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11489 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11491 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11492 build_zero_cst (TREE_TYPE (tem
)));
11494 /* Fold ~X & 1 as (X & 1) == 0. */
11495 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11496 && INTEGRAL_TYPE_P (type
)
11497 && integer_onep (arg1
))
11500 tem
= TREE_OPERAND (arg0
, 0);
11501 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11502 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11504 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11505 build_zero_cst (TREE_TYPE (tem
)));
11507 /* Fold !X & 1 as X == 0. */
11508 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11509 && integer_onep (arg1
))
11511 tem
= TREE_OPERAND (arg0
, 0);
11512 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11513 build_zero_cst (TREE_TYPE (tem
)));
11516 /* Fold (X ^ Y) & Y as ~X & Y. */
11517 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11518 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11520 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11521 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11522 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11523 fold_convert_loc (loc
, type
, arg1
));
11525 /* Fold (X ^ Y) & X as ~Y & X. */
11526 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11527 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11528 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11530 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11531 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11532 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11533 fold_convert_loc (loc
, type
, arg1
));
11535 /* Fold X & (X ^ Y) as X & ~Y. */
11536 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11537 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11539 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11540 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11541 fold_convert_loc (loc
, type
, arg0
),
11542 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11544 /* Fold X & (Y ^ X) as ~Y & X. */
11545 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11546 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11547 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11549 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11550 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11551 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11552 fold_convert_loc (loc
, type
, arg0
));
11555 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11556 multiple of 1 << CST. */
11557 if (TREE_CODE (arg1
) == INTEGER_CST
)
11559 wide_int cst1
= arg1
;
11560 wide_int ncst1
= -cst1
;
11561 if ((cst1
& ncst1
) == ncst1
11562 && multiple_of_p (type
, arg0
,
11563 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11564 return fold_convert_loc (loc
, type
, arg0
);
11567 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11569 if (TREE_CODE (arg1
) == INTEGER_CST
11570 && TREE_CODE (arg0
) == MULT_EXPR
11571 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11573 wide_int warg1
= arg1
;
11574 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11577 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11579 else if (masked
!= warg1
)
11581 /* Avoid the transform if arg1 is a mask of some
11582 mode which allows further optimizations. */
11583 int pop
= wi::popcount (warg1
);
11584 if (!(pop
>= BITS_PER_UNIT
11585 && exact_log2 (pop
) != -1
11586 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11587 return fold_build2_loc (loc
, code
, type
, op0
,
11588 wide_int_to_tree (type
, masked
));
11592 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11593 ((A & N) + B) & M -> (A + B) & M
11594 Similarly if (N & M) == 0,
11595 ((A | N) + B) & M -> (A + B) & M
11596 and for - instead of + (or unary - instead of +)
11597 and/or ^ instead of |.
11598 If B is constant and (B & M) == 0, fold into A & M. */
11599 if (TREE_CODE (arg1
) == INTEGER_CST
)
11601 wide_int cst1
= arg1
;
11602 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11603 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11604 && (TREE_CODE (arg0
) == PLUS_EXPR
11605 || TREE_CODE (arg0
) == MINUS_EXPR
11606 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11607 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11608 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11614 /* Now we know that arg0 is (C + D) or (C - D) or
11615 -C and arg1 (M) is == (1LL << cst) - 1.
11616 Store C into PMOP[0] and D into PMOP[1]. */
11617 pmop
[0] = TREE_OPERAND (arg0
, 0);
11619 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11621 pmop
[1] = TREE_OPERAND (arg0
, 1);
11625 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11628 for (; which
>= 0; which
--)
11629 switch (TREE_CODE (pmop
[which
]))
11634 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11637 cst0
= TREE_OPERAND (pmop
[which
], 1);
11639 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11644 else if (cst0
!= 0)
11646 /* If C or D is of the form (A & N) where
11647 (N & M) == M, or of the form (A | N) or
11648 (A ^ N) where (N & M) == 0, replace it with A. */
11649 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11652 /* If C or D is a N where (N & M) == 0, it can be
11653 omitted (assumed 0). */
11654 if ((TREE_CODE (arg0
) == PLUS_EXPR
11655 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11656 && (cst1
& pmop
[which
]) == 0)
11657 pmop
[which
] = NULL
;
11663 /* Only build anything new if we optimized one or both arguments
11665 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11666 || (TREE_CODE (arg0
) != NEGATE_EXPR
11667 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11669 tree utype
= TREE_TYPE (arg0
);
11670 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11672 /* Perform the operations in a type that has defined
11673 overflow behavior. */
11674 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11675 if (pmop
[0] != NULL
)
11676 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11677 if (pmop
[1] != NULL
)
11678 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11681 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11682 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11683 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11685 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11686 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11688 else if (pmop
[0] != NULL
)
11690 else if (pmop
[1] != NULL
)
11693 return build_int_cst (type
, 0);
11695 else if (pmop
[0] == NULL
)
11696 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11698 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11700 /* TEM is now the new binary +, - or unary - replacement. */
11701 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11702 fold_convert_loc (loc
, utype
, arg1
));
11703 return fold_convert_loc (loc
, type
, tem
);
11708 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11709 if (t1
!= NULL_TREE
)
11711 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11712 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11713 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11715 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11717 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11720 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11723 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11725 This results in more efficient code for machines without a NOR
11726 instruction. Combine will canonicalize to the first form
11727 which will allow use of NOR instructions provided by the
11728 backend if they exist. */
11729 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11730 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11732 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11733 build2 (BIT_IOR_EXPR
, type
,
11734 fold_convert_loc (loc
, type
,
11735 TREE_OPERAND (arg0
, 0)),
11736 fold_convert_loc (loc
, type
,
11737 TREE_OPERAND (arg1
, 0))));
11740 /* If arg0 is derived from the address of an object or function, we may
11741 be able to fold this expression using the object or function's
11743 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11745 unsigned HOST_WIDE_INT modulus
, residue
;
11746 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11748 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11749 integer_onep (arg1
));
11751 /* This works because modulus is a power of 2. If this weren't the
11752 case, we'd have to replace it by its greatest power-of-2
11753 divisor: modulus & -modulus. */
11755 return build_int_cst (type
, residue
& low
);
11758 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11759 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11760 if the new mask might be further optimized. */
11761 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11762 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11763 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11764 && TREE_CODE (arg1
) == INTEGER_CST
11765 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11766 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11767 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11768 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11770 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11771 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11772 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11773 tree shift_type
= TREE_TYPE (arg0
);
11775 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11776 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11777 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11778 && TYPE_PRECISION (TREE_TYPE (arg0
))
11779 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11781 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11782 tree arg00
= TREE_OPERAND (arg0
, 0);
11783 /* See if more bits can be proven as zero because of
11785 if (TREE_CODE (arg00
) == NOP_EXPR
11786 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11788 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11789 if (TYPE_PRECISION (inner_type
)
11790 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11791 && TYPE_PRECISION (inner_type
) < prec
)
11793 prec
= TYPE_PRECISION (inner_type
);
11794 /* See if we can shorten the right shift. */
11796 shift_type
= inner_type
;
11797 /* Otherwise X >> C1 is all zeros, so we'll optimize
11798 it into (X, 0) later on by making sure zerobits
11802 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11805 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11806 zerobits
<<= prec
- shiftc
;
11808 /* For arithmetic shift if sign bit could be set, zerobits
11809 can contain actually sign bits, so no transformation is
11810 possible, unless MASK masks them all away. In that
11811 case the shift needs to be converted into logical shift. */
11812 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11813 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11815 if ((mask
& zerobits
) == 0)
11816 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11822 /* ((X << 16) & 0xff00) is (X, 0). */
11823 if ((mask
& zerobits
) == mask
)
11824 return omit_one_operand_loc (loc
, type
,
11825 build_int_cst (type
, 0), arg0
);
11827 newmask
= mask
| zerobits
;
11828 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11830 /* Only do the transformation if NEWMASK is some integer
11832 for (prec
= BITS_PER_UNIT
;
11833 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11834 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11836 if (prec
< HOST_BITS_PER_WIDE_INT
11837 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11841 if (shift_type
!= TREE_TYPE (arg0
))
11843 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11844 fold_convert_loc (loc
, shift_type
,
11845 TREE_OPERAND (arg0
, 0)),
11846 TREE_OPERAND (arg0
, 1));
11847 tem
= fold_convert_loc (loc
, type
, tem
);
11851 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11852 if (!tree_int_cst_equal (newmaskt
, arg1
))
11853 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11861 /* Don't touch a floating-point divide by zero unless the mode
11862 of the constant can represent infinity. */
11863 if (TREE_CODE (arg1
) == REAL_CST
11864 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11865 && real_zerop (arg1
))
11868 /* Optimize A / A to 1.0 if we don't care about
11869 NaNs or Infinities. Skip the transformation
11870 for non-real operands. */
11871 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11872 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11873 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11874 && operand_equal_p (arg0
, arg1
, 0))
11876 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11878 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11881 /* The complex version of the above A / A optimization. */
11882 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11883 && operand_equal_p (arg0
, arg1
, 0))
11885 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11886 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11887 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11889 tree r
= build_real (elem_type
, dconst1
);
11890 /* omit_two_operands will call fold_convert for us. */
11891 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11895 /* (-A) / (-B) -> A / B */
11896 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11897 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11898 TREE_OPERAND (arg0
, 0),
11899 negate_expr (arg1
));
11900 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11901 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11902 negate_expr (arg0
),
11903 TREE_OPERAND (arg1
, 0));
11905 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11906 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11907 && real_onep (arg1
))
11908 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11910 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11911 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11912 && real_minus_onep (arg1
))
11913 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11914 negate_expr (arg0
)));
11916 /* If ARG1 is a constant, we can convert this to a multiply by the
11917 reciprocal. This does not have the same rounding properties,
11918 so only do this if -freciprocal-math. We can actually
11919 always safely do it if ARG1 is a power of two, but it's hard to
11920 tell if it is or not in a portable manner. */
11922 && (TREE_CODE (arg1
) == REAL_CST
11923 || (TREE_CODE (arg1
) == COMPLEX_CST
11924 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11925 || (TREE_CODE (arg1
) == VECTOR_CST
11926 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11928 if (flag_reciprocal_math
11929 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11930 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11931 /* Find the reciprocal if optimizing and the result is exact.
11932 TODO: Complex reciprocal not implemented. */
11933 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11935 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11938 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11941 /* Convert A/B/C to A/(B*C). */
11942 if (flag_reciprocal_math
11943 && TREE_CODE (arg0
) == RDIV_EXPR
)
11944 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11945 fold_build2_loc (loc
, MULT_EXPR
, type
,
11946 TREE_OPERAND (arg0
, 1), arg1
));
11948 /* Convert A/(B/C) to (A/B)*C. */
11949 if (flag_reciprocal_math
11950 && TREE_CODE (arg1
) == RDIV_EXPR
)
11951 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11952 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11953 TREE_OPERAND (arg1
, 0)),
11954 TREE_OPERAND (arg1
, 1));
11956 /* Convert C1/(X*C2) into (C1/C2)/X. */
11957 if (flag_reciprocal_math
11958 && TREE_CODE (arg1
) == MULT_EXPR
11959 && TREE_CODE (arg0
) == REAL_CST
11960 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11962 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11963 TREE_OPERAND (arg1
, 1));
11965 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11966 TREE_OPERAND (arg1
, 0));
11969 if (flag_unsafe_math_optimizations
)
11971 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11972 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11974 /* Optimize sin(x)/cos(x) as tan(x). */
11975 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11976 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11977 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11978 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11979 CALL_EXPR_ARG (arg1
, 0), 0))
11981 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11983 if (tanfn
!= NULL_TREE
)
11984 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11987 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11988 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11989 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11990 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11991 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11992 CALL_EXPR_ARG (arg1
, 0), 0))
11994 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11996 if (tanfn
!= NULL_TREE
)
11998 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11999 CALL_EXPR_ARG (arg0
, 0));
12000 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12001 build_real (type
, dconst1
), tmp
);
12005 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12006 NaNs or Infinities. */
12007 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12008 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12009 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12011 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12012 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12014 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12015 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12016 && operand_equal_p (arg00
, arg01
, 0))
12018 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12020 if (cosfn
!= NULL_TREE
)
12021 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12025 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12026 NaNs or Infinities. */
12027 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12028 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12029 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12031 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12032 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12034 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12035 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12036 && operand_equal_p (arg00
, arg01
, 0))
12038 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12040 if (cosfn
!= NULL_TREE
)
12042 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12043 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12044 build_real (type
, dconst1
),
12050 /* Optimize pow(x,c)/x as pow(x,c-1). */
12051 if (fcode0
== BUILT_IN_POW
12052 || fcode0
== BUILT_IN_POWF
12053 || fcode0
== BUILT_IN_POWL
)
12055 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12056 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12057 if (TREE_CODE (arg01
) == REAL_CST
12058 && !TREE_OVERFLOW (arg01
)
12059 && operand_equal_p (arg1
, arg00
, 0))
12061 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12065 c
= TREE_REAL_CST (arg01
);
12066 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12067 arg
= build_real (type
, c
);
12068 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12072 /* Optimize a/root(b/c) into a*root(c/b). */
12073 if (BUILTIN_ROOT_P (fcode1
))
12075 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12077 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12079 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12080 tree b
= TREE_OPERAND (rootarg
, 0);
12081 tree c
= TREE_OPERAND (rootarg
, 1);
12083 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12085 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12086 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12090 /* Optimize x/expN(y) into x*expN(-y). */
12091 if (BUILTIN_EXPONENT_P (fcode1
))
12093 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12094 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12095 arg1
= build_call_expr_loc (loc
,
12097 fold_convert_loc (loc
, type
, arg
));
12098 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12101 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12102 if (fcode1
== BUILT_IN_POW
12103 || fcode1
== BUILT_IN_POWF
12104 || fcode1
== BUILT_IN_POWL
)
12106 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12107 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12108 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12109 tree neg11
= fold_convert_loc (loc
, type
,
12110 negate_expr (arg11
));
12111 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12112 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12117 case TRUNC_DIV_EXPR
:
12118 /* Optimize (X & (-A)) / A where A is a power of 2,
12120 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12121 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12122 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12124 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12125 arg1
, TREE_OPERAND (arg0
, 1));
12126 if (sum
&& integer_zerop (sum
)) {
12127 tree pow2
= build_int_cst (integer_type_node
,
12128 wi::exact_log2 (arg1
));
12129 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12130 TREE_OPERAND (arg0
, 0), pow2
);
12136 case FLOOR_DIV_EXPR
:
12137 /* Simplify A / (B << N) where A and B are positive and B is
12138 a power of 2, to A >> (N + log2(B)). */
12139 strict_overflow_p
= false;
12140 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12141 && (TYPE_UNSIGNED (type
)
12142 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12144 tree sval
= TREE_OPERAND (arg1
, 0);
12145 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12147 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12148 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
12149 wi::exact_log2 (sval
));
12151 if (strict_overflow_p
)
12152 fold_overflow_warning (("assuming signed overflow does not "
12153 "occur when simplifying A / (B << N)"),
12154 WARN_STRICT_OVERFLOW_MISC
);
12156 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12158 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12159 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12163 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12164 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12165 if (INTEGRAL_TYPE_P (type
)
12166 && TYPE_UNSIGNED (type
)
12167 && code
== FLOOR_DIV_EXPR
)
12168 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12172 case ROUND_DIV_EXPR
:
12173 case CEIL_DIV_EXPR
:
12174 case EXACT_DIV_EXPR
:
12175 if (integer_zerop (arg1
))
12177 /* X / -1 is -X. */
12178 if (!TYPE_UNSIGNED (type
)
12179 && TREE_CODE (arg1
) == INTEGER_CST
12180 && wi::eq_p (arg1
, -1))
12181 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12183 /* Convert -A / -B to A / B when the type is signed and overflow is
12185 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12186 && TREE_CODE (arg0
) == NEGATE_EXPR
12187 && negate_expr_p (arg1
))
12189 if (INTEGRAL_TYPE_P (type
))
12190 fold_overflow_warning (("assuming signed overflow does not occur "
12191 "when distributing negation across "
12193 WARN_STRICT_OVERFLOW_MISC
);
12194 return fold_build2_loc (loc
, code
, type
,
12195 fold_convert_loc (loc
, type
,
12196 TREE_OPERAND (arg0
, 0)),
12197 fold_convert_loc (loc
, type
,
12198 negate_expr (arg1
)));
12200 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12201 && TREE_CODE (arg1
) == NEGATE_EXPR
12202 && negate_expr_p (arg0
))
12204 if (INTEGRAL_TYPE_P (type
))
12205 fold_overflow_warning (("assuming signed overflow does not occur "
12206 "when distributing negation across "
12208 WARN_STRICT_OVERFLOW_MISC
);
12209 return fold_build2_loc (loc
, code
, type
,
12210 fold_convert_loc (loc
, type
,
12211 negate_expr (arg0
)),
12212 fold_convert_loc (loc
, type
,
12213 TREE_OPERAND (arg1
, 0)));
12216 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12217 operation, EXACT_DIV_EXPR.
12219 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12220 At one time others generated faster code, it's not clear if they do
12221 after the last round to changes to the DIV code in expmed.c. */
12222 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12223 && multiple_of_p (type
, arg0
, arg1
))
12224 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12226 strict_overflow_p
= false;
12227 if (TREE_CODE (arg1
) == INTEGER_CST
12228 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12229 &strict_overflow_p
)))
12231 if (strict_overflow_p
)
12232 fold_overflow_warning (("assuming signed overflow does not occur "
12233 "when simplifying division"),
12234 WARN_STRICT_OVERFLOW_MISC
);
12235 return fold_convert_loc (loc
, type
, tem
);
12240 case CEIL_MOD_EXPR
:
12241 case FLOOR_MOD_EXPR
:
12242 case ROUND_MOD_EXPR
:
12243 case TRUNC_MOD_EXPR
:
12244 /* X % -1 is zero. */
12245 if (!TYPE_UNSIGNED (type
)
12246 && TREE_CODE (arg1
) == INTEGER_CST
12247 && wi::eq_p (arg1
, -1))
12248 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12250 /* X % -C is the same as X % C. */
12251 if (code
== TRUNC_MOD_EXPR
12252 && TYPE_SIGN (type
) == SIGNED
12253 && TREE_CODE (arg1
) == INTEGER_CST
12254 && !TREE_OVERFLOW (arg1
)
12255 && wi::neg_p (arg1
)
12256 && !TYPE_OVERFLOW_TRAPS (type
)
12257 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12258 && !sign_bit_p (arg1
, arg1
))
12259 return fold_build2_loc (loc
, code
, type
,
12260 fold_convert_loc (loc
, type
, arg0
),
12261 fold_convert_loc (loc
, type
,
12262 negate_expr (arg1
)));
12264 /* X % -Y is the same as X % Y. */
12265 if (code
== TRUNC_MOD_EXPR
12266 && !TYPE_UNSIGNED (type
)
12267 && TREE_CODE (arg1
) == NEGATE_EXPR
12268 && !TYPE_OVERFLOW_TRAPS (type
))
12269 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12270 fold_convert_loc (loc
, type
,
12271 TREE_OPERAND (arg1
, 0)));
12273 strict_overflow_p
= false;
12274 if (TREE_CODE (arg1
) == INTEGER_CST
12275 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12276 &strict_overflow_p
)))
12278 if (strict_overflow_p
)
12279 fold_overflow_warning (("assuming signed overflow does not occur "
12280 "when simplifying modulus"),
12281 WARN_STRICT_OVERFLOW_MISC
);
12282 return fold_convert_loc (loc
, type
, tem
);
12285 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12286 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12287 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12288 && (TYPE_UNSIGNED (type
)
12289 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12292 /* Also optimize A % (C << N) where C is a power of 2,
12293 to A & ((C << N) - 1). */
12294 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12295 c
= TREE_OPERAND (arg1
, 0);
12297 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12300 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12301 build_int_cst (TREE_TYPE (arg1
), 1));
12302 if (strict_overflow_p
)
12303 fold_overflow_warning (("assuming signed overflow does not "
12304 "occur when simplifying "
12305 "X % (power of two)"),
12306 WARN_STRICT_OVERFLOW_MISC
);
12307 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12308 fold_convert_loc (loc
, type
, arg0
),
12309 fold_convert_loc (loc
, type
, mask
));
12317 if (integer_all_onesp (arg0
))
12318 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12322 /* Optimize -1 >> x for arithmetic right shifts. */
12323 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12324 && tree_expr_nonnegative_p (arg1
))
12325 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12326 /* ... fall through ... */
12330 if (integer_zerop (arg1
))
12331 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12332 if (integer_zerop (arg0
))
12333 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12335 /* Prefer vector1 << scalar to vector1 << vector2
12336 if vector2 is uniform. */
12337 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12338 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12339 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12341 /* Since negative shift count is not well-defined,
12342 don't try to compute it in the compiler. */
12343 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12346 prec
= element_precision (type
);
12348 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12349 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12350 && tree_to_uhwi (arg1
) < prec
12351 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12352 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12354 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12355 + tree_to_uhwi (arg1
));
12357 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12358 being well defined. */
12361 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12363 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12364 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12365 TREE_OPERAND (arg0
, 0));
12370 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12371 build_int_cst (TREE_TYPE (arg1
), low
));
12374 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12375 into x & ((unsigned)-1 >> c) for unsigned types. */
12376 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12377 || (TYPE_UNSIGNED (type
)
12378 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12379 && tree_fits_uhwi_p (arg1
)
12380 && tree_to_uhwi (arg1
) < prec
12381 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12382 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12384 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12385 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12391 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12393 lshift
= build_minus_one_cst (type
);
12394 lshift
= const_binop (code
, lshift
, arg1
);
12396 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12400 /* Rewrite an LROTATE_EXPR by a constant into an
12401 RROTATE_EXPR by a new constant. */
12402 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12404 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12405 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12406 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12409 /* If we have a rotate of a bit operation with the rotate count and
12410 the second operand of the bit operation both constant,
12411 permute the two operations. */
12412 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12413 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12414 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12415 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12416 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12417 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12418 fold_build2_loc (loc
, code
, type
,
12419 TREE_OPERAND (arg0
, 0), arg1
),
12420 fold_build2_loc (loc
, code
, type
,
12421 TREE_OPERAND (arg0
, 1), arg1
));
12423 /* Two consecutive rotates adding up to the some integer
12424 multiple of the precision of the type can be ignored. */
12425 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12426 && TREE_CODE (arg0
) == RROTATE_EXPR
12427 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12428 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12430 return TREE_OPERAND (arg0
, 0);
12432 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12433 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12434 if the latter can be further optimized. */
12435 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12436 && TREE_CODE (arg0
) == BIT_AND_EXPR
12437 && TREE_CODE (arg1
) == INTEGER_CST
12438 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12440 tree mask
= fold_build2_loc (loc
, code
, type
,
12441 fold_convert_loc (loc
, type
,
12442 TREE_OPERAND (arg0
, 1)),
12444 tree shift
= fold_build2_loc (loc
, code
, type
,
12445 fold_convert_loc (loc
, type
,
12446 TREE_OPERAND (arg0
, 0)),
12448 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12456 if (operand_equal_p (arg0
, arg1
, 0))
12457 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12458 if (INTEGRAL_TYPE_P (type
)
12459 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12460 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12461 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12467 if (operand_equal_p (arg0
, arg1
, 0))
12468 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12469 if (INTEGRAL_TYPE_P (type
)
12470 && TYPE_MAX_VALUE (type
)
12471 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12472 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12473 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12478 case TRUTH_ANDIF_EXPR
:
12479 /* Note that the operands of this must be ints
12480 and their values must be 0 or 1.
12481 ("true" is a fixed value perhaps depending on the language.) */
12482 /* If first arg is constant zero, return it. */
12483 if (integer_zerop (arg0
))
12484 return fold_convert_loc (loc
, type
, arg0
);
12485 case TRUTH_AND_EXPR
:
12486 /* If either arg is constant true, drop it. */
12487 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12488 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12489 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12490 /* Preserve sequence points. */
12491 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12492 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12493 /* If second arg is constant zero, result is zero, but first arg
12494 must be evaluated. */
12495 if (integer_zerop (arg1
))
12496 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12497 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12498 case will be handled here. */
12499 if (integer_zerop (arg0
))
12500 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12502 /* !X && X is always false. */
12503 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12504 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12505 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12506 /* X && !X is always false. */
12507 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12508 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12509 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12511 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12512 means A >= Y && A != MAX, but in this case we know that
12515 if (!TREE_SIDE_EFFECTS (arg0
)
12516 && !TREE_SIDE_EFFECTS (arg1
))
12518 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12519 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12520 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12522 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12523 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12524 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12527 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12533 case TRUTH_ORIF_EXPR
:
12534 /* Note that the operands of this must be ints
12535 and their values must be 0 or true.
12536 ("true" is a fixed value perhaps depending on the language.) */
12537 /* If first arg is constant true, return it. */
12538 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12539 return fold_convert_loc (loc
, type
, arg0
);
12540 case TRUTH_OR_EXPR
:
12541 /* If either arg is constant zero, drop it. */
12542 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12543 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12544 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12545 /* Preserve sequence points. */
12546 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12547 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12548 /* If second arg is constant true, result is true, but we must
12549 evaluate first arg. */
12550 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12551 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12552 /* Likewise for first arg, but note this only occurs here for
12554 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12555 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12557 /* !X || X is always true. */
12558 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12559 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12560 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12561 /* X || !X is always true. */
12562 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12563 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12564 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12566 /* (X && !Y) || (!X && Y) is X ^ Y */
12567 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12568 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12570 tree a0
, a1
, l0
, l1
, n0
, n1
;
12572 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12573 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12575 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12576 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12578 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12579 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12581 if ((operand_equal_p (n0
, a0
, 0)
12582 && operand_equal_p (n1
, a1
, 0))
12583 || (operand_equal_p (n0
, a1
, 0)
12584 && operand_equal_p (n1
, a0
, 0)))
12585 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12588 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12594 case TRUTH_XOR_EXPR
:
12595 /* If the second arg is constant zero, drop it. */
12596 if (integer_zerop (arg1
))
12597 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12598 /* If the second arg is constant true, this is a logical inversion. */
12599 if (integer_onep (arg1
))
12601 tem
= invert_truthvalue_loc (loc
, arg0
);
12602 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12604 /* Identical arguments cancel to zero. */
12605 if (operand_equal_p (arg0
, arg1
, 0))
12606 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12608 /* !X ^ X is always true. */
12609 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12610 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12611 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12613 /* X ^ !X is always true. */
12614 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12615 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12616 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12625 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12626 if (tem
!= NULL_TREE
)
12629 /* bool_var != 0 becomes bool_var. */
12630 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12631 && code
== NE_EXPR
)
12632 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12634 /* bool_var == 1 becomes bool_var. */
12635 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12636 && code
== EQ_EXPR
)
12637 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12639 /* bool_var != 1 becomes !bool_var. */
12640 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12641 && code
== NE_EXPR
)
12642 return fold_convert_loc (loc
, type
,
12643 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12644 TREE_TYPE (arg0
), arg0
));
12646 /* bool_var == 0 becomes !bool_var. */
12647 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12648 && code
== EQ_EXPR
)
12649 return fold_convert_loc (loc
, type
,
12650 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12651 TREE_TYPE (arg0
), arg0
));
12653 /* !exp != 0 becomes !exp */
12654 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12655 && code
== NE_EXPR
)
12656 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12658 /* If this is an equality comparison of the address of two non-weak,
12659 unaliased symbols neither of which are extern (since we do not
12660 have access to attributes for externs), then we know the result. */
12661 if (TREE_CODE (arg0
) == ADDR_EXPR
12662 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12663 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12664 && ! lookup_attribute ("alias",
12665 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12666 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12667 && TREE_CODE (arg1
) == ADDR_EXPR
12668 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12669 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12670 && ! lookup_attribute ("alias",
12671 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12672 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12674 /* We know that we're looking at the address of two
12675 non-weak, unaliased, static _DECL nodes.
12677 It is both wasteful and incorrect to call operand_equal_p
12678 to compare the two ADDR_EXPR nodes. It is wasteful in that
12679 all we need to do is test pointer equality for the arguments
12680 to the two ADDR_EXPR nodes. It is incorrect to use
12681 operand_equal_p as that function is NOT equivalent to a
12682 C equality test. It can in fact return false for two
12683 objects which would test as equal using the C equality
12685 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12686 return constant_boolean_node (equal
12687 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12691 /* Similarly for a NEGATE_EXPR. */
12692 if (TREE_CODE (arg0
) == NEGATE_EXPR
12693 && TREE_CODE (arg1
) == INTEGER_CST
12694 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12696 && TREE_CODE (tem
) == INTEGER_CST
12697 && !TREE_OVERFLOW (tem
))
12698 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12700 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12701 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12702 && TREE_CODE (arg1
) == INTEGER_CST
12703 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12704 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12705 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12706 fold_convert_loc (loc
,
12709 TREE_OPERAND (arg0
, 1)));
12711 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12712 if ((TREE_CODE (arg0
) == PLUS_EXPR
12713 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12714 || TREE_CODE (arg0
) == MINUS_EXPR
)
12715 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12718 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12719 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12721 tree val
= TREE_OPERAND (arg0
, 1);
12722 return omit_two_operands_loc (loc
, type
,
12723 fold_build2_loc (loc
, code
, type
,
12725 build_int_cst (TREE_TYPE (val
),
12727 TREE_OPERAND (arg0
, 0), arg1
);
12730 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12731 if (TREE_CODE (arg0
) == MINUS_EXPR
12732 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12733 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12736 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12738 return omit_two_operands_loc (loc
, type
,
12740 ? boolean_true_node
: boolean_false_node
,
12741 TREE_OPERAND (arg0
, 1), arg1
);
12744 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12745 if (TREE_CODE (arg0
) == ABS_EXPR
12746 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12747 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12749 /* If this is an EQ or NE comparison with zero and ARG0 is
12750 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12751 two operations, but the latter can be done in one less insn
12752 on machines that have only two-operand insns or on which a
12753 constant cannot be the first operand. */
12754 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12755 && integer_zerop (arg1
))
12757 tree arg00
= TREE_OPERAND (arg0
, 0);
12758 tree arg01
= TREE_OPERAND (arg0
, 1);
12759 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12760 && integer_onep (TREE_OPERAND (arg00
, 0)))
12762 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12763 arg01
, TREE_OPERAND (arg00
, 1));
12764 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12765 build_int_cst (TREE_TYPE (arg0
), 1));
12766 return fold_build2_loc (loc
, code
, type
,
12767 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12770 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12771 && integer_onep (TREE_OPERAND (arg01
, 0)))
12773 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12774 arg00
, TREE_OPERAND (arg01
, 1));
12775 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12776 build_int_cst (TREE_TYPE (arg0
), 1));
12777 return fold_build2_loc (loc
, code
, type
,
12778 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12783 /* If this is an NE or EQ comparison of zero against the result of a
12784 signed MOD operation whose second operand is a power of 2, make
12785 the MOD operation unsigned since it is simpler and equivalent. */
12786 if (integer_zerop (arg1
)
12787 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12788 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12789 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12790 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12791 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12792 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12794 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12795 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12796 fold_convert_loc (loc
, newtype
,
12797 TREE_OPERAND (arg0
, 0)),
12798 fold_convert_loc (loc
, newtype
,
12799 TREE_OPERAND (arg0
, 1)));
12801 return fold_build2_loc (loc
, code
, type
, newmod
,
12802 fold_convert_loc (loc
, newtype
, arg1
));
12805 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12806 C1 is a valid shift constant, and C2 is a power of two, i.e.
12808 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12809 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12810 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12812 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12813 && integer_zerop (arg1
))
12815 tree itype
= TREE_TYPE (arg0
);
12816 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12817 prec
= TYPE_PRECISION (itype
);
12819 /* Check for a valid shift count. */
12820 if (wi::ltu_p (arg001
, prec
))
12822 tree arg01
= TREE_OPERAND (arg0
, 1);
12823 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12824 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12825 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12826 can be rewritten as (X & (C2 << C1)) != 0. */
12827 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12829 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12830 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12831 return fold_build2_loc (loc
, code
, type
, tem
,
12832 fold_convert_loc (loc
, itype
, arg1
));
12834 /* Otherwise, for signed (arithmetic) shifts,
12835 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12836 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12837 else if (!TYPE_UNSIGNED (itype
))
12838 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12839 arg000
, build_int_cst (itype
, 0));
12840 /* Otherwise, of unsigned (logical) shifts,
12841 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12842 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12844 return omit_one_operand_loc (loc
, type
,
12845 code
== EQ_EXPR
? integer_one_node
12846 : integer_zero_node
,
12851 /* If we have (A & C) == C where C is a power of 2, convert this into
12852 (A & C) != 0. Similarly for NE_EXPR. */
12853 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12854 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12855 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12856 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12857 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12858 integer_zero_node
));
12860 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12861 bit, then fold the expression into A < 0 or A >= 0. */
12862 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12866 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12867 Similarly for NE_EXPR. */
12868 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12869 && TREE_CODE (arg1
) == INTEGER_CST
12870 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12872 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12873 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12874 TREE_OPERAND (arg0
, 1));
12876 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12877 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12879 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12880 if (integer_nonzerop (dandnotc
))
12881 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12884 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12885 Similarly for NE_EXPR. */
12886 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12887 && TREE_CODE (arg1
) == INTEGER_CST
12888 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12890 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12892 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12893 TREE_OPERAND (arg0
, 1),
12894 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12895 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12896 if (integer_nonzerop (candnotd
))
12897 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12900 /* If this is a comparison of a field, we may be able to simplify it. */
12901 if ((TREE_CODE (arg0
) == COMPONENT_REF
12902 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12903 /* Handle the constant case even without -O
12904 to make sure the warnings are given. */
12905 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12907 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12912 /* Optimize comparisons of strlen vs zero to a compare of the
12913 first character of the string vs zero. To wit,
12914 strlen(ptr) == 0 => *ptr == 0
12915 strlen(ptr) != 0 => *ptr != 0
12916 Other cases should reduce to one of these two (or a constant)
12917 due to the return value of strlen being unsigned. */
12918 if (TREE_CODE (arg0
) == CALL_EXPR
12919 && integer_zerop (arg1
))
12921 tree fndecl
= get_callee_fndecl (arg0
);
12924 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12925 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12926 && call_expr_nargs (arg0
) == 1
12927 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12929 tree iref
= build_fold_indirect_ref_loc (loc
,
12930 CALL_EXPR_ARG (arg0
, 0));
12931 return fold_build2_loc (loc
, code
, type
, iref
,
12932 build_int_cst (TREE_TYPE (iref
), 0));
12936 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12937 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12938 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12939 && integer_zerop (arg1
)
12940 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12942 tree arg00
= TREE_OPERAND (arg0
, 0);
12943 tree arg01
= TREE_OPERAND (arg0
, 1);
12944 tree itype
= TREE_TYPE (arg00
);
12945 if (wi::eq_p (arg01
, TYPE_PRECISION (itype
) - 1))
12947 if (TYPE_UNSIGNED (itype
))
12949 itype
= signed_type_for (itype
);
12950 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12952 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12953 type
, arg00
, build_zero_cst (itype
));
12957 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12958 if (integer_zerop (arg1
)
12959 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12960 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12961 TREE_OPERAND (arg0
, 1));
12963 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12964 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12965 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12966 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12967 build_zero_cst (TREE_TYPE (arg0
)));
12968 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12969 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12970 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12971 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12972 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12973 build_zero_cst (TREE_TYPE (arg0
)));
12975 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12976 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12977 && TREE_CODE (arg1
) == INTEGER_CST
12978 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12979 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12980 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12981 TREE_OPERAND (arg0
, 1), arg1
));
12983 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12984 (X & C) == 0 when C is a single bit. */
12985 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12986 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12987 && integer_zerop (arg1
)
12988 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12990 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12991 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12992 TREE_OPERAND (arg0
, 1));
12993 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12995 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12999 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13000 constant C is a power of two, i.e. a single bit. */
13001 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13002 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13003 && integer_zerop (arg1
)
13004 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13005 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13006 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13008 tree arg00
= TREE_OPERAND (arg0
, 0);
13009 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13010 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13013 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13014 when is C is a power of two, i.e. a single bit. */
13015 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13016 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13017 && integer_zerop (arg1
)
13018 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13020 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13022 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13023 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13024 arg000
, TREE_OPERAND (arg0
, 1));
13025 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13026 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13029 if (integer_zerop (arg1
)
13030 && tree_expr_nonzero_p (arg0
))
13032 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13033 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13036 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13037 if (TREE_CODE (arg0
) == NEGATE_EXPR
13038 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13039 return fold_build2_loc (loc
, code
, type
,
13040 TREE_OPERAND (arg0
, 0),
13041 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13042 TREE_OPERAND (arg1
, 0)));
13044 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13045 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13046 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13048 tree arg00
= TREE_OPERAND (arg0
, 0);
13049 tree arg01
= TREE_OPERAND (arg0
, 1);
13050 tree arg10
= TREE_OPERAND (arg1
, 0);
13051 tree arg11
= TREE_OPERAND (arg1
, 1);
13052 tree itype
= TREE_TYPE (arg0
);
13054 if (operand_equal_p (arg01
, arg11
, 0))
13055 return fold_build2_loc (loc
, code
, type
,
13056 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13057 fold_build2_loc (loc
,
13058 BIT_XOR_EXPR
, itype
,
13061 build_zero_cst (itype
));
13063 if (operand_equal_p (arg01
, arg10
, 0))
13064 return fold_build2_loc (loc
, code
, type
,
13065 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13066 fold_build2_loc (loc
,
13067 BIT_XOR_EXPR
, itype
,
13070 build_zero_cst (itype
));
13072 if (operand_equal_p (arg00
, arg11
, 0))
13073 return fold_build2_loc (loc
, code
, type
,
13074 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13075 fold_build2_loc (loc
,
13076 BIT_XOR_EXPR
, itype
,
13079 build_zero_cst (itype
));
13081 if (operand_equal_p (arg00
, arg10
, 0))
13082 return fold_build2_loc (loc
, code
, type
,
13083 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13084 fold_build2_loc (loc
,
13085 BIT_XOR_EXPR
, itype
,
13088 build_zero_cst (itype
));
13091 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13092 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13094 tree arg00
= TREE_OPERAND (arg0
, 0);
13095 tree arg01
= TREE_OPERAND (arg0
, 1);
13096 tree arg10
= TREE_OPERAND (arg1
, 0);
13097 tree arg11
= TREE_OPERAND (arg1
, 1);
13098 tree itype
= TREE_TYPE (arg0
);
13100 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13101 operand_equal_p guarantees no side-effects so we don't need
13102 to use omit_one_operand on Z. */
13103 if (operand_equal_p (arg01
, arg11
, 0))
13104 return fold_build2_loc (loc
, code
, type
, arg00
,
13105 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13107 if (operand_equal_p (arg01
, arg10
, 0))
13108 return fold_build2_loc (loc
, code
, type
, arg00
,
13109 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13111 if (operand_equal_p (arg00
, arg11
, 0))
13112 return fold_build2_loc (loc
, code
, type
, arg01
,
13113 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13115 if (operand_equal_p (arg00
, arg10
, 0))
13116 return fold_build2_loc (loc
, code
, type
, arg01
,
13117 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13120 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13121 if (TREE_CODE (arg01
) == INTEGER_CST
13122 && TREE_CODE (arg11
) == INTEGER_CST
)
13124 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13125 fold_convert_loc (loc
, itype
, arg11
));
13126 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13127 return fold_build2_loc (loc
, code
, type
, tem
,
13128 fold_convert_loc (loc
, itype
, arg10
));
13132 /* Attempt to simplify equality/inequality comparisons of complex
13133 values. Only lower the comparison if the result is known or
13134 can be simplified to a single scalar comparison. */
13135 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13136 || TREE_CODE (arg0
) == COMPLEX_CST
)
13137 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13138 || TREE_CODE (arg1
) == COMPLEX_CST
))
13140 tree real0
, imag0
, real1
, imag1
;
13143 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13145 real0
= TREE_OPERAND (arg0
, 0);
13146 imag0
= TREE_OPERAND (arg0
, 1);
13150 real0
= TREE_REALPART (arg0
);
13151 imag0
= TREE_IMAGPART (arg0
);
13154 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13156 real1
= TREE_OPERAND (arg1
, 0);
13157 imag1
= TREE_OPERAND (arg1
, 1);
13161 real1
= TREE_REALPART (arg1
);
13162 imag1
= TREE_IMAGPART (arg1
);
13165 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13166 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13168 if (integer_zerop (rcond
))
13170 if (code
== EQ_EXPR
)
13171 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13173 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13177 if (code
== NE_EXPR
)
13178 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13180 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13184 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13185 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13187 if (integer_zerop (icond
))
13189 if (code
== EQ_EXPR
)
13190 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13192 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13196 if (code
== NE_EXPR
)
13197 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13199 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13210 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13211 if (tem
!= NULL_TREE
)
13214 /* Transform comparisons of the form X +- C CMP X. */
13215 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13216 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13217 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13218 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13219 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13220 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13222 tree arg01
= TREE_OPERAND (arg0
, 1);
13223 enum tree_code code0
= TREE_CODE (arg0
);
13226 if (TREE_CODE (arg01
) == REAL_CST
)
13227 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13229 is_positive
= tree_int_cst_sgn (arg01
);
13231 /* (X - c) > X becomes false. */
13232 if (code
== GT_EXPR
13233 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13234 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13236 if (TREE_CODE (arg01
) == INTEGER_CST
13237 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13238 fold_overflow_warning (("assuming signed overflow does not "
13239 "occur when assuming that (X - c) > X "
13240 "is always false"),
13241 WARN_STRICT_OVERFLOW_ALL
);
13242 return constant_boolean_node (0, type
);
13245 /* Likewise (X + c) < X becomes false. */
13246 if (code
== LT_EXPR
13247 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13248 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13250 if (TREE_CODE (arg01
) == INTEGER_CST
13251 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13252 fold_overflow_warning (("assuming signed overflow does not "
13253 "occur when assuming that "
13254 "(X + c) < X is always false"),
13255 WARN_STRICT_OVERFLOW_ALL
);
13256 return constant_boolean_node (0, type
);
13259 /* Convert (X - c) <= X to true. */
13260 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13262 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13263 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13265 if (TREE_CODE (arg01
) == INTEGER_CST
13266 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13267 fold_overflow_warning (("assuming signed overflow does not "
13268 "occur when assuming that "
13269 "(X - c) <= X is always true"),
13270 WARN_STRICT_OVERFLOW_ALL
);
13271 return constant_boolean_node (1, type
);
13274 /* Convert (X + c) >= X to true. */
13275 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13277 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13278 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13280 if (TREE_CODE (arg01
) == INTEGER_CST
13281 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13282 fold_overflow_warning (("assuming signed overflow does not "
13283 "occur when assuming that "
13284 "(X + c) >= X is always true"),
13285 WARN_STRICT_OVERFLOW_ALL
);
13286 return constant_boolean_node (1, type
);
13289 if (TREE_CODE (arg01
) == INTEGER_CST
)
13291 /* Convert X + c > X and X - c < X to true for integers. */
13292 if (code
== GT_EXPR
13293 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13294 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13296 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13297 fold_overflow_warning (("assuming signed overflow does "
13298 "not occur when assuming that "
13299 "(X + c) > X is always true"),
13300 WARN_STRICT_OVERFLOW_ALL
);
13301 return constant_boolean_node (1, type
);
13304 if (code
== LT_EXPR
13305 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13306 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13308 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13309 fold_overflow_warning (("assuming signed overflow does "
13310 "not occur when assuming that "
13311 "(X - c) < X is always true"),
13312 WARN_STRICT_OVERFLOW_ALL
);
13313 return constant_boolean_node (1, type
);
13316 /* Convert X + c <= X and X - c >= X to false for integers. */
13317 if (code
== LE_EXPR
13318 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13319 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13321 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13322 fold_overflow_warning (("assuming signed overflow does "
13323 "not occur when assuming that "
13324 "(X + c) <= X is always false"),
13325 WARN_STRICT_OVERFLOW_ALL
);
13326 return constant_boolean_node (0, type
);
13329 if (code
== GE_EXPR
13330 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13331 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13333 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13334 fold_overflow_warning (("assuming signed overflow does "
13335 "not occur when assuming that "
13336 "(X - c) >= X is always false"),
13337 WARN_STRICT_OVERFLOW_ALL
);
13338 return constant_boolean_node (0, type
);
13343 /* Comparisons with the highest or lowest possible integer of
13344 the specified precision will have known values. */
13346 tree arg1_type
= TREE_TYPE (arg1
);
13347 unsigned int prec
= TYPE_PRECISION (arg1_type
);
13349 if (TREE_CODE (arg1
) == INTEGER_CST
13350 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13352 wide_int max
= wi::max_value (arg1_type
);
13353 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
13354 wide_int min
= wi::min_value (arg1_type
);
13356 if (wi::eq_p (arg1
, max
))
13360 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13363 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13366 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13369 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13371 /* The GE_EXPR and LT_EXPR cases above are not normally
13372 reached because of previous transformations. */
13377 else if (wi::eq_p (arg1
, max
- 1))
13381 arg1
= const_binop (PLUS_EXPR
, arg1
,
13382 build_int_cst (TREE_TYPE (arg1
), 1));
13383 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13384 fold_convert_loc (loc
,
13385 TREE_TYPE (arg1
), arg0
),
13388 arg1
= const_binop (PLUS_EXPR
, arg1
,
13389 build_int_cst (TREE_TYPE (arg1
), 1));
13390 return fold_build2_loc (loc
, NE_EXPR
, type
,
13391 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13397 else if (wi::eq_p (arg1
, min
))
13401 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13404 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13407 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13410 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13415 else if (wi::eq_p (arg1
, min
+ 1))
13419 arg1
= const_binop (MINUS_EXPR
, arg1
,
13420 build_int_cst (TREE_TYPE (arg1
), 1));
13421 return fold_build2_loc (loc
, NE_EXPR
, type
,
13422 fold_convert_loc (loc
,
13423 TREE_TYPE (arg1
), arg0
),
13426 arg1
= const_binop (MINUS_EXPR
, arg1
,
13427 build_int_cst (TREE_TYPE (arg1
), 1));
13428 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13429 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13436 else if (wi::eq_p (arg1
, signed_max
)
13437 && TYPE_UNSIGNED (arg1_type
)
13438 /* We will flip the signedness of the comparison operator
13439 associated with the mode of arg1, so the sign bit is
13440 specified by this mode. Check that arg1 is the signed
13441 max associated with this sign bit. */
13442 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13443 /* signed_type does not work on pointer types. */
13444 && INTEGRAL_TYPE_P (arg1_type
))
13446 /* The following case also applies to X < signed_max+1
13447 and X >= signed_max+1 because previous transformations. */
13448 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13450 tree st
= signed_type_for (arg1_type
);
13451 return fold_build2_loc (loc
,
13452 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13453 type
, fold_convert_loc (loc
, st
, arg0
),
13454 build_int_cst (st
, 0));
13460 /* If we are comparing an ABS_EXPR with a constant, we can
13461 convert all the cases into explicit comparisons, but they may
13462 well not be faster than doing the ABS and one comparison.
13463 But ABS (X) <= C is a range comparison, which becomes a subtraction
13464 and a comparison, and is probably faster. */
13465 if (code
== LE_EXPR
13466 && TREE_CODE (arg1
) == INTEGER_CST
13467 && TREE_CODE (arg0
) == ABS_EXPR
13468 && ! TREE_SIDE_EFFECTS (arg0
)
13469 && (0 != (tem
= negate_expr (arg1
)))
13470 && TREE_CODE (tem
) == INTEGER_CST
13471 && !TREE_OVERFLOW (tem
))
13472 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13473 build2 (GE_EXPR
, type
,
13474 TREE_OPERAND (arg0
, 0), tem
),
13475 build2 (LE_EXPR
, type
,
13476 TREE_OPERAND (arg0
, 0), arg1
));
13478 /* Convert ABS_EXPR<x> >= 0 to true. */
13479 strict_overflow_p
= false;
13480 if (code
== GE_EXPR
13481 && (integer_zerop (arg1
)
13482 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13483 && real_zerop (arg1
)))
13484 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13486 if (strict_overflow_p
)
13487 fold_overflow_warning (("assuming signed overflow does not occur "
13488 "when simplifying comparison of "
13489 "absolute value and zero"),
13490 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13491 return omit_one_operand_loc (loc
, type
,
13492 constant_boolean_node (true, type
),
13496 /* Convert ABS_EXPR<x> < 0 to false. */
13497 strict_overflow_p
= false;
13498 if (code
== LT_EXPR
13499 && (integer_zerop (arg1
) || real_zerop (arg1
))
13500 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13502 if (strict_overflow_p
)
13503 fold_overflow_warning (("assuming signed overflow does not occur "
13504 "when simplifying comparison of "
13505 "absolute value and zero"),
13506 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13507 return omit_one_operand_loc (loc
, type
,
13508 constant_boolean_node (false, type
),
13512 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13513 and similarly for >= into !=. */
13514 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13515 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13516 && TREE_CODE (arg1
) == LSHIFT_EXPR
13517 && integer_onep (TREE_OPERAND (arg1
, 0)))
13518 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13519 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13520 TREE_OPERAND (arg1
, 1)),
13521 build_zero_cst (TREE_TYPE (arg0
)));
13523 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13524 otherwise Y might be >= # of bits in X's type and thus e.g.
13525 (unsigned char) (1 << Y) for Y 15 might be 0.
13526 If the cast is widening, then 1 << Y should have unsigned type,
13527 otherwise if Y is number of bits in the signed shift type minus 1,
13528 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13529 31 might be 0xffffffff80000000. */
13530 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13531 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13532 && CONVERT_EXPR_P (arg1
)
13533 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13534 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13535 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13536 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13537 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13538 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13539 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13541 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13542 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13543 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13544 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13545 build_zero_cst (TREE_TYPE (arg0
)));
13550 case UNORDERED_EXPR
:
13558 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13560 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13561 if (t1
!= NULL_TREE
)
13565 /* If the first operand is NaN, the result is constant. */
13566 if (TREE_CODE (arg0
) == REAL_CST
13567 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13568 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13570 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13571 ? integer_zero_node
13572 : integer_one_node
;
13573 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13576 /* If the second operand is NaN, the result is constant. */
13577 if (TREE_CODE (arg1
) == REAL_CST
13578 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13579 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13581 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13582 ? integer_zero_node
13583 : integer_one_node
;
13584 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13587 /* Simplify unordered comparison of something with itself. */
13588 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13589 && operand_equal_p (arg0
, arg1
, 0))
13590 return constant_boolean_node (1, type
);
13592 if (code
== LTGT_EXPR
13593 && !flag_trapping_math
13594 && operand_equal_p (arg0
, arg1
, 0))
13595 return constant_boolean_node (0, type
);
13597 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13599 tree targ0
= strip_float_extensions (arg0
);
13600 tree targ1
= strip_float_extensions (arg1
);
13601 tree newtype
= TREE_TYPE (targ0
);
13603 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13604 newtype
= TREE_TYPE (targ1
);
13606 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13607 return fold_build2_loc (loc
, code
, type
,
13608 fold_convert_loc (loc
, newtype
, targ0
),
13609 fold_convert_loc (loc
, newtype
, targ1
));
13614 case COMPOUND_EXPR
:
13615 /* When pedantic, a compound expression can be neither an lvalue
13616 nor an integer constant expression. */
13617 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13619 /* Don't let (0, 0) be null pointer constant. */
13620 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13621 : fold_convert_loc (loc
, type
, arg1
);
13622 return pedantic_non_lvalue_loc (loc
, tem
);
13625 if ((TREE_CODE (arg0
) == REAL_CST
13626 && TREE_CODE (arg1
) == REAL_CST
)
13627 || (TREE_CODE (arg0
) == INTEGER_CST
13628 && TREE_CODE (arg1
) == INTEGER_CST
))
13629 return build_complex (type
, arg0
, arg1
);
13630 if (TREE_CODE (arg0
) == REALPART_EXPR
13631 && TREE_CODE (arg1
) == IMAGPART_EXPR
13632 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13633 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13634 TREE_OPERAND (arg1
, 0), 0))
13635 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13636 TREE_OPERAND (arg1
, 0));
13640 /* An ASSERT_EXPR should never be passed to fold_binary. */
13641 gcc_unreachable ();
13643 case VEC_PACK_TRUNC_EXPR
:
13644 case VEC_PACK_FIX_TRUNC_EXPR
:
13646 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13649 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13650 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13651 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13654 elts
= XALLOCAVEC (tree
, nelts
);
13655 if (!vec_cst_ctor_to_array (arg0
, elts
)
13656 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13659 for (i
= 0; i
< nelts
; i
++)
13661 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13662 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13663 TREE_TYPE (type
), elts
[i
]);
13664 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13668 return build_vector (type
, elts
);
13671 case VEC_WIDEN_MULT_LO_EXPR
:
13672 case VEC_WIDEN_MULT_HI_EXPR
:
13673 case VEC_WIDEN_MULT_EVEN_EXPR
:
13674 case VEC_WIDEN_MULT_ODD_EXPR
:
13676 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13677 unsigned int out
, ofs
, scale
;
13680 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13681 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13682 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13685 elts
= XALLOCAVEC (tree
, nelts
* 4);
13686 if (!vec_cst_ctor_to_array (arg0
, elts
)
13687 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13690 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13691 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13692 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13693 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13694 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13695 scale
= 1, ofs
= 0;
13696 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13697 scale
= 1, ofs
= 1;
13699 for (out
= 0; out
< nelts
; out
++)
13701 unsigned int in1
= (out
<< scale
) + ofs
;
13702 unsigned int in2
= in1
+ nelts
* 2;
13705 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13706 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13708 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13710 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13711 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13715 return build_vector (type
, elts
);
13720 } /* switch (code) */
13723 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13724 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13728 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13730 switch (TREE_CODE (*tp
))
13736 *walk_subtrees
= 0;
13738 /* ... fall through ... */
13745 /* Return whether the sub-tree ST contains a label which is accessible from
13746 outside the sub-tree. */
13749 contains_label_p (tree st
)
13752 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13755 /* Fold a ternary expression of code CODE and type TYPE with operands
13756 OP0, OP1, and OP2. Return the folded expression if folding is
13757 successful. Otherwise, return NULL_TREE. */
13760 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13761 tree op0
, tree op1
, tree op2
)
13764 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13765 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13767 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13768 && TREE_CODE_LENGTH (code
) == 3);
13770 /* If this is a commutative operation, and OP0 is a constant, move it
13771 to OP1 to reduce the number of tests below. */
13772 if (commutative_ternary_tree_code (code
)
13773 && tree_swap_operands_p (op0
, op1
, true))
13774 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13776 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13780 /* Strip any conversions that don't change the mode. This is safe
13781 for every expression, except for a comparison expression because
13782 its signedness is derived from its operands. So, in the latter
13783 case, only strip conversions that don't change the signedness.
13785 Note that this is done as an internal manipulation within the
13786 constant folder, in order to find the simplest representation of
13787 the arguments so that their form can be studied. In any cases,
13788 the appropriate type conversions should be put back in the tree
13789 that will get out of the constant folder. */
13810 case COMPONENT_REF
:
13811 if (TREE_CODE (arg0
) == CONSTRUCTOR
13812 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13814 unsigned HOST_WIDE_INT idx
;
13816 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13823 case VEC_COND_EXPR
:
13824 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13825 so all simple results must be passed through pedantic_non_lvalue. */
13826 if (TREE_CODE (arg0
) == INTEGER_CST
)
13828 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13829 tem
= integer_zerop (arg0
) ? op2
: op1
;
13830 /* Only optimize constant conditions when the selected branch
13831 has the same type as the COND_EXPR. This avoids optimizing
13832 away "c ? x : throw", where the throw has a void type.
13833 Avoid throwing away that operand which contains label. */
13834 if ((!TREE_SIDE_EFFECTS (unused_op
)
13835 || !contains_label_p (unused_op
))
13836 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13837 || VOID_TYPE_P (type
)))
13838 return pedantic_non_lvalue_loc (loc
, tem
);
13841 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13843 if (integer_all_onesp (arg0
))
13844 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
13845 if (integer_zerop (arg0
))
13846 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
13848 if ((TREE_CODE (arg1
) == VECTOR_CST
13849 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13850 && (TREE_CODE (arg2
) == VECTOR_CST
13851 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13853 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13854 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13855 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13856 for (i
= 0; i
< nelts
; i
++)
13858 tree val
= VECTOR_CST_ELT (arg0
, i
);
13859 if (integer_all_onesp (val
))
13861 else if (integer_zerop (val
))
13862 sel
[i
] = nelts
+ i
;
13863 else /* Currently unreachable. */
13866 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13867 if (t
!= NULL_TREE
)
13872 if (operand_equal_p (arg1
, op2
, 0))
13873 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13875 /* If we have A op B ? A : C, we may be able to convert this to a
13876 simpler expression, depending on the operation and the values
13877 of B and C. Signed zeros prevent all of these transformations,
13878 for reasons given above each one.
13880 Also try swapping the arguments and inverting the conditional. */
13881 if (COMPARISON_CLASS_P (arg0
)
13882 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13883 arg1
, TREE_OPERAND (arg0
, 1))
13884 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13886 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13891 if (COMPARISON_CLASS_P (arg0
)
13892 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13894 TREE_OPERAND (arg0
, 1))
13895 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13897 location_t loc0
= expr_location_or (arg0
, loc
);
13898 tem
= fold_invert_truthvalue (loc0
, arg0
);
13899 if (tem
&& COMPARISON_CLASS_P (tem
))
13901 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13907 /* If the second operand is simpler than the third, swap them
13908 since that produces better jump optimization results. */
13909 if (truth_value_p (TREE_CODE (arg0
))
13910 && tree_swap_operands_p (op1
, op2
, false))
13912 location_t loc0
= expr_location_or (arg0
, loc
);
13913 /* See if this can be inverted. If it can't, possibly because
13914 it was a floating-point inequality comparison, don't do
13916 tem
= fold_invert_truthvalue (loc0
, arg0
);
13918 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13921 /* Convert A ? 1 : 0 to simply A. */
13922 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13923 : (integer_onep (op1
)
13924 && !VECTOR_TYPE_P (type
)))
13925 && integer_zerop (op2
)
13926 /* If we try to convert OP0 to our type, the
13927 call to fold will try to move the conversion inside
13928 a COND, which will recurse. In that case, the COND_EXPR
13929 is probably the best choice, so leave it alone. */
13930 && type
== TREE_TYPE (arg0
))
13931 return pedantic_non_lvalue_loc (loc
, arg0
);
13933 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13934 over COND_EXPR in cases such as floating point comparisons. */
13935 if (integer_zerop (op1
)
13936 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13937 : (integer_onep (op2
)
13938 && !VECTOR_TYPE_P (type
)))
13939 && truth_value_p (TREE_CODE (arg0
)))
13940 return pedantic_non_lvalue_loc (loc
,
13941 fold_convert_loc (loc
, type
,
13942 invert_truthvalue_loc (loc
,
13945 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13946 if (TREE_CODE (arg0
) == LT_EXPR
13947 && integer_zerop (TREE_OPERAND (arg0
, 1))
13948 && integer_zerop (op2
)
13949 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13951 /* sign_bit_p looks through both zero and sign extensions,
13952 but for this optimization only sign extensions are
13954 tree tem2
= TREE_OPERAND (arg0
, 0);
13955 while (tem
!= tem2
)
13957 if (TREE_CODE (tem2
) != NOP_EXPR
13958 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13963 tem2
= TREE_OPERAND (tem2
, 0);
13965 /* sign_bit_p only checks ARG1 bits within A's precision.
13966 If <sign bit of A> has wider type than A, bits outside
13967 of A's precision in <sign bit of A> need to be checked.
13968 If they are all 0, this optimization needs to be done
13969 in unsigned A's type, if they are all 1 in signed A's type,
13970 otherwise this can't be done. */
13972 && TYPE_PRECISION (TREE_TYPE (tem
))
13973 < TYPE_PRECISION (TREE_TYPE (arg1
))
13974 && TYPE_PRECISION (TREE_TYPE (tem
))
13975 < TYPE_PRECISION (type
))
13977 int inner_width
, outer_width
;
13980 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13981 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13982 if (outer_width
> TYPE_PRECISION (type
))
13983 outer_width
= TYPE_PRECISION (type
);
13985 wide_int mask
= wi::shifted_mask
13986 (inner_width
, outer_width
- inner_width
, false,
13987 TYPE_PRECISION (TREE_TYPE (arg1
)));
13989 wide_int common
= mask
& arg1
;
13990 if (common
== mask
)
13992 tem_type
= signed_type_for (TREE_TYPE (tem
));
13993 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13995 else if (common
== 0)
13997 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13998 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14006 fold_convert_loc (loc
, type
,
14007 fold_build2_loc (loc
, BIT_AND_EXPR
,
14008 TREE_TYPE (tem
), tem
,
14009 fold_convert_loc (loc
,
14014 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14015 already handled above. */
14016 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14017 && integer_onep (TREE_OPERAND (arg0
, 1))
14018 && integer_zerop (op2
)
14019 && integer_pow2p (arg1
))
14021 tree tem
= TREE_OPERAND (arg0
, 0);
14023 if (TREE_CODE (tem
) == RSHIFT_EXPR
14024 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
14025 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14026 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
14027 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14028 TREE_OPERAND (tem
, 0), arg1
);
14031 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14032 is probably obsolete because the first operand should be a
14033 truth value (that's why we have the two cases above), but let's
14034 leave it in until we can confirm this for all front-ends. */
14035 if (integer_zerop (op2
)
14036 && TREE_CODE (arg0
) == NE_EXPR
14037 && integer_zerop (TREE_OPERAND (arg0
, 1))
14038 && integer_pow2p (arg1
)
14039 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14041 arg1
, OEP_ONLY_CONST
))
14042 return pedantic_non_lvalue_loc (loc
,
14043 fold_convert_loc (loc
, type
,
14044 TREE_OPERAND (arg0
, 0)));
14046 /* Disable the transformations below for vectors, since
14047 fold_binary_op_with_conditional_arg may undo them immediately,
14048 yielding an infinite loop. */
14049 if (code
== VEC_COND_EXPR
)
14052 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14053 if (integer_zerop (op2
)
14054 && truth_value_p (TREE_CODE (arg0
))
14055 && truth_value_p (TREE_CODE (arg1
))
14056 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14057 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14058 : TRUTH_ANDIF_EXPR
,
14059 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14061 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14062 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14063 && truth_value_p (TREE_CODE (arg0
))
14064 && truth_value_p (TREE_CODE (arg1
))
14065 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14067 location_t loc0
= expr_location_or (arg0
, loc
);
14068 /* Only perform transformation if ARG0 is easily inverted. */
14069 tem
= fold_invert_truthvalue (loc0
, arg0
);
14071 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14074 type
, fold_convert_loc (loc
, type
, tem
),
14078 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14079 if (integer_zerop (arg1
)
14080 && truth_value_p (TREE_CODE (arg0
))
14081 && truth_value_p (TREE_CODE (op2
))
14082 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14084 location_t loc0
= expr_location_or (arg0
, loc
);
14085 /* Only perform transformation if ARG0 is easily inverted. */
14086 tem
= fold_invert_truthvalue (loc0
, arg0
);
14088 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14089 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14090 type
, fold_convert_loc (loc
, type
, tem
),
14094 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14095 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14096 && truth_value_p (TREE_CODE (arg0
))
14097 && truth_value_p (TREE_CODE (op2
))
14098 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14099 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14100 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14101 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14106 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14107 of fold_ternary on them. */
14108 gcc_unreachable ();
14110 case BIT_FIELD_REF
:
14111 if ((TREE_CODE (arg0
) == VECTOR_CST
14112 || (TREE_CODE (arg0
) == CONSTRUCTOR
14113 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14114 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14115 || (TREE_CODE (type
) == VECTOR_TYPE
14116 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14118 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14119 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14120 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14121 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14124 && (idx
% width
) == 0
14125 && (n
% width
) == 0
14126 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14131 if (TREE_CODE (arg0
) == VECTOR_CST
)
14134 return VECTOR_CST_ELT (arg0
, idx
);
14136 tree
*vals
= XALLOCAVEC (tree
, n
);
14137 for (unsigned i
= 0; i
< n
; ++i
)
14138 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14139 return build_vector (type
, vals
);
14142 /* Constructor elements can be subvectors. */
14143 unsigned HOST_WIDE_INT k
= 1;
14144 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14146 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14147 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14148 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14151 /* We keep an exact subset of the constructor elements. */
14152 if ((idx
% k
) == 0 && (n
% k
) == 0)
14154 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14155 return build_constructor (type
, NULL
);
14160 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14161 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14162 return build_zero_cst (type
);
14165 vec
<constructor_elt
, va_gc
> *vals
;
14166 vec_alloc (vals
, n
);
14167 for (unsigned i
= 0;
14168 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14170 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14172 (arg0
, idx
+ i
)->value
);
14173 return build_constructor (type
, vals
);
14175 /* The bitfield references a single constructor element. */
14176 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14178 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14179 return build_zero_cst (type
);
14181 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14183 return fold_build3_loc (loc
, code
, type
,
14184 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14185 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14190 /* A bit-field-ref that referenced the full argument can be stripped. */
14191 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14192 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14193 && integer_zerop (op2
))
14194 return fold_convert_loc (loc
, type
, arg0
);
14196 /* On constants we can use native encode/interpret to constant
14197 fold (nearly) all BIT_FIELD_REFs. */
14198 if (CONSTANT_CLASS_P (arg0
)
14199 && can_native_interpret_type_p (type
)
14200 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14201 /* This limitation should not be necessary, we just need to
14202 round this up to mode size. */
14203 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14204 /* Need bit-shifting of the buffer to relax the following. */
14205 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14207 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14208 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14209 unsigned HOST_WIDE_INT clen
;
14210 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14211 /* ??? We cannot tell native_encode_expr to start at
14212 some random byte only. So limit us to a reasonable amount
14216 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14217 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14219 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14221 tree v
= native_interpret_expr (type
,
14222 b
+ bitpos
/ BITS_PER_UNIT
,
14223 bitsize
/ BITS_PER_UNIT
);
14233 /* For integers we can decompose the FMA if possible. */
14234 if (TREE_CODE (arg0
) == INTEGER_CST
14235 && TREE_CODE (arg1
) == INTEGER_CST
)
14236 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14237 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14238 if (integer_zerop (arg2
))
14239 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14241 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14243 case VEC_PERM_EXPR
:
14244 if (TREE_CODE (arg2
) == VECTOR_CST
)
14246 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14247 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14248 bool need_mask_canon
= false;
14249 bool all_in_vec0
= true;
14250 bool all_in_vec1
= true;
14251 bool maybe_identity
= true;
14252 bool single_arg
= (op0
== op1
);
14253 bool changed
= false;
14255 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14256 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14257 for (i
= 0; i
< nelts
; i
++)
14259 tree val
= VECTOR_CST_ELT (arg2
, i
);
14260 if (TREE_CODE (val
) != INTEGER_CST
)
14263 /* Make sure that the perm value is in an acceptable
14266 if (wi::gtu_p (t
, mask
))
14268 need_mask_canon
= true;
14269 sel
[i
] = t
.to_uhwi () & mask
;
14272 sel
[i
] = t
.to_uhwi ();
14274 if (sel
[i
] < nelts
)
14275 all_in_vec1
= false;
14277 all_in_vec0
= false;
14279 if ((sel
[i
] & (nelts
-1)) != i
)
14280 maybe_identity
= false;
14283 if (maybe_identity
)
14293 else if (all_in_vec1
)
14296 for (i
= 0; i
< nelts
; i
++)
14298 need_mask_canon
= true;
14301 if ((TREE_CODE (op0
) == VECTOR_CST
14302 || TREE_CODE (op0
) == CONSTRUCTOR
)
14303 && (TREE_CODE (op1
) == VECTOR_CST
14304 || TREE_CODE (op1
) == CONSTRUCTOR
))
14306 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
14307 if (t
!= NULL_TREE
)
14311 if (op0
== op1
&& !single_arg
)
14314 if (need_mask_canon
&& arg2
== op2
)
14316 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14317 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14318 for (i
= 0; i
< nelts
; i
++)
14319 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14320 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14325 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14331 } /* switch (code) */
14334 /* Perform constant folding and related simplification of EXPR.
14335 The related simplifications include x*1 => x, x*0 => 0, etc.,
14336 and application of the associative law.
14337 NOP_EXPR conversions may be removed freely (as long as we
14338 are careful not to change the type of the overall expression).
14339 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14340 but we can constant-fold them if they have constant operands. */
14342 #ifdef ENABLE_FOLD_CHECKING
14343 # define fold(x) fold_1 (x)
14344 static tree
fold_1 (tree
);
14350 const tree t
= expr
;
14351 enum tree_code code
= TREE_CODE (t
);
14352 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14354 location_t loc
= EXPR_LOCATION (expr
);
14356 /* Return right away if a constant. */
14357 if (kind
== tcc_constant
)
14360 /* CALL_EXPR-like objects with variable numbers of operands are
14361 treated specially. */
14362 if (kind
== tcc_vl_exp
)
14364 if (code
== CALL_EXPR
)
14366 tem
= fold_call_expr (loc
, expr
, false);
14367 return tem
? tem
: expr
;
14372 if (IS_EXPR_CODE_CLASS (kind
))
14374 tree type
= TREE_TYPE (t
);
14375 tree op0
, op1
, op2
;
14377 switch (TREE_CODE_LENGTH (code
))
14380 op0
= TREE_OPERAND (t
, 0);
14381 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14382 return tem
? tem
: expr
;
14384 op0
= TREE_OPERAND (t
, 0);
14385 op1
= TREE_OPERAND (t
, 1);
14386 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14387 return tem
? tem
: expr
;
14389 op0
= TREE_OPERAND (t
, 0);
14390 op1
= TREE_OPERAND (t
, 1);
14391 op2
= TREE_OPERAND (t
, 2);
14392 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14393 return tem
? tem
: expr
;
14403 tree op0
= TREE_OPERAND (t
, 0);
14404 tree op1
= TREE_OPERAND (t
, 1);
14406 if (TREE_CODE (op1
) == INTEGER_CST
14407 && TREE_CODE (op0
) == CONSTRUCTOR
14408 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14410 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14411 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14412 unsigned HOST_WIDE_INT begin
= 0;
14414 /* Find a matching index by means of a binary search. */
14415 while (begin
!= end
)
14417 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14418 tree index
= (*elts
)[middle
].index
;
14420 if (TREE_CODE (index
) == INTEGER_CST
14421 && tree_int_cst_lt (index
, op1
))
14422 begin
= middle
+ 1;
14423 else if (TREE_CODE (index
) == INTEGER_CST
14424 && tree_int_cst_lt (op1
, index
))
14426 else if (TREE_CODE (index
) == RANGE_EXPR
14427 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14428 begin
= middle
+ 1;
14429 else if (TREE_CODE (index
) == RANGE_EXPR
14430 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14433 return (*elts
)[middle
].value
;
14440 /* Return a VECTOR_CST if possible. */
14443 tree type
= TREE_TYPE (t
);
14444 if (TREE_CODE (type
) != VECTOR_TYPE
)
14447 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14448 unsigned HOST_WIDE_INT idx
, pos
= 0;
14451 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14453 if (!CONSTANT_CLASS_P (value
))
14455 if (TREE_CODE (value
) == VECTOR_CST
)
14457 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14458 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14461 vec
[pos
++] = value
;
14463 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14464 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14466 return build_vector (type
, vec
);
14470 return fold (DECL_INITIAL (t
));
14474 } /* switch (code) */
14477 #ifdef ENABLE_FOLD_CHECKING
14480 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14481 hash_table
<pointer_hash
<const tree_node
> > *);
14482 static void fold_check_failed (const_tree
, const_tree
);
14483 void print_fold_checksum (const_tree
);
14485 /* When --enable-checking=fold, compute a digest of expr before
14486 and after actual fold call to see if fold did not accidentally
14487 change original expr. */
14493 struct md5_ctx ctx
;
14494 unsigned char checksum_before
[16], checksum_after
[16];
14495 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14497 md5_init_ctx (&ctx
);
14498 fold_checksum_tree (expr
, &ctx
, &ht
);
14499 md5_finish_ctx (&ctx
, checksum_before
);
14502 ret
= fold_1 (expr
);
14504 md5_init_ctx (&ctx
);
14505 fold_checksum_tree (expr
, &ctx
, &ht
);
14506 md5_finish_ctx (&ctx
, checksum_after
);
14508 if (memcmp (checksum_before
, checksum_after
, 16))
14509 fold_check_failed (expr
, ret
);
14515 print_fold_checksum (const_tree expr
)
14517 struct md5_ctx ctx
;
14518 unsigned char checksum
[16], cnt
;
14519 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14521 md5_init_ctx (&ctx
);
14522 fold_checksum_tree (expr
, &ctx
, &ht
);
14523 md5_finish_ctx (&ctx
, checksum
);
14524 for (cnt
= 0; cnt
< 16; ++cnt
)
14525 fprintf (stderr
, "%02x", checksum
[cnt
]);
14526 putc ('\n', stderr
);
14530 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14532 internal_error ("fold check: original tree changed by fold");
14536 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14537 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14539 const tree_node
**slot
;
14540 enum tree_code code
;
14541 union tree_node buf
;
14547 slot
= ht
->find_slot (expr
, INSERT
);
14551 code
= TREE_CODE (expr
);
14552 if (TREE_CODE_CLASS (code
) == tcc_declaration
14553 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14555 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14556 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14557 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14558 expr
= (tree
) &buf
;
14560 else if (TREE_CODE_CLASS (code
) == tcc_type
14561 && (TYPE_POINTER_TO (expr
)
14562 || TYPE_REFERENCE_TO (expr
)
14563 || TYPE_CACHED_VALUES_P (expr
)
14564 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14565 || TYPE_NEXT_VARIANT (expr
)))
14567 /* Allow these fields to be modified. */
14569 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14570 expr
= tmp
= (tree
) &buf
;
14571 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14572 TYPE_POINTER_TO (tmp
) = NULL
;
14573 TYPE_REFERENCE_TO (tmp
) = NULL
;
14574 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14575 if (TYPE_CACHED_VALUES_P (tmp
))
14577 TYPE_CACHED_VALUES_P (tmp
) = 0;
14578 TYPE_CACHED_VALUES (tmp
) = NULL
;
14581 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14582 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14583 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14584 if (TREE_CODE_CLASS (code
) != tcc_type
14585 && TREE_CODE_CLASS (code
) != tcc_declaration
14586 && code
!= TREE_LIST
14587 && code
!= SSA_NAME
14588 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14589 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14590 switch (TREE_CODE_CLASS (code
))
14596 md5_process_bytes (TREE_STRING_POINTER (expr
),
14597 TREE_STRING_LENGTH (expr
), ctx
);
14600 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14601 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14604 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14605 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14611 case tcc_exceptional
:
14615 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14616 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14617 expr
= TREE_CHAIN (expr
);
14618 goto recursive_label
;
14621 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14622 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14628 case tcc_expression
:
14629 case tcc_reference
:
14630 case tcc_comparison
:
14633 case tcc_statement
:
14635 len
= TREE_OPERAND_LENGTH (expr
);
14636 for (i
= 0; i
< len
; ++i
)
14637 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14639 case tcc_declaration
:
14640 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14641 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14642 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14644 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14645 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14646 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14647 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14648 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14651 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14653 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14655 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14656 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14658 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14662 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14663 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14664 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14665 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14666 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14667 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14668 if (INTEGRAL_TYPE_P (expr
)
14669 || SCALAR_FLOAT_TYPE_P (expr
))
14671 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14672 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14674 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14675 if (TREE_CODE (expr
) == RECORD_TYPE
14676 || TREE_CODE (expr
) == UNION_TYPE
14677 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14678 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14679 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14686 /* Helper function for outputting the checksum of a tree T. When
14687 debugging with gdb, you can "define mynext" to be "next" followed
14688 by "call debug_fold_checksum (op0)", then just trace down till the
14691 DEBUG_FUNCTION
void
14692 debug_fold_checksum (const_tree t
)
14695 unsigned char checksum
[16];
14696 struct md5_ctx ctx
;
14697 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14699 md5_init_ctx (&ctx
);
14700 fold_checksum_tree (t
, &ctx
, &ht
);
14701 md5_finish_ctx (&ctx
, checksum
);
14704 for (i
= 0; i
< 16; i
++)
14705 fprintf (stderr
, "%d ", checksum
[i
]);
14707 fprintf (stderr
, "\n");
14712 /* Fold a unary tree expression with code CODE of type TYPE with an
14713 operand OP0. LOC is the location of the resulting expression.
14714 Return a folded expression if successful. Otherwise, return a tree
14715 expression with code CODE of type TYPE with an operand OP0. */
14718 fold_build1_stat_loc (location_t loc
,
14719 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14722 #ifdef ENABLE_FOLD_CHECKING
14723 unsigned char checksum_before
[16], checksum_after
[16];
14724 struct md5_ctx ctx
;
14725 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14727 md5_init_ctx (&ctx
);
14728 fold_checksum_tree (op0
, &ctx
, &ht
);
14729 md5_finish_ctx (&ctx
, checksum_before
);
14733 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14735 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14737 #ifdef ENABLE_FOLD_CHECKING
14738 md5_init_ctx (&ctx
);
14739 fold_checksum_tree (op0
, &ctx
, &ht
);
14740 md5_finish_ctx (&ctx
, checksum_after
);
14742 if (memcmp (checksum_before
, checksum_after
, 16))
14743 fold_check_failed (op0
, tem
);
14748 /* Fold a binary tree expression with code CODE of type TYPE with
14749 operands OP0 and OP1. LOC is the location of the resulting
14750 expression. Return a folded expression if successful. Otherwise,
14751 return a tree expression with code CODE of type TYPE with operands
14755 fold_build2_stat_loc (location_t loc
,
14756 enum tree_code code
, tree type
, tree op0
, tree op1
14760 #ifdef ENABLE_FOLD_CHECKING
14761 unsigned char checksum_before_op0
[16],
14762 checksum_before_op1
[16],
14763 checksum_after_op0
[16],
14764 checksum_after_op1
[16];
14765 struct md5_ctx ctx
;
14766 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14768 md5_init_ctx (&ctx
);
14769 fold_checksum_tree (op0
, &ctx
, &ht
);
14770 md5_finish_ctx (&ctx
, checksum_before_op0
);
14773 md5_init_ctx (&ctx
);
14774 fold_checksum_tree (op1
, &ctx
, &ht
);
14775 md5_finish_ctx (&ctx
, checksum_before_op1
);
14779 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14781 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14783 #ifdef ENABLE_FOLD_CHECKING
14784 md5_init_ctx (&ctx
);
14785 fold_checksum_tree (op0
, &ctx
, &ht
);
14786 md5_finish_ctx (&ctx
, checksum_after_op0
);
14789 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14790 fold_check_failed (op0
, tem
);
14792 md5_init_ctx (&ctx
);
14793 fold_checksum_tree (op1
, &ctx
, &ht
);
14794 md5_finish_ctx (&ctx
, checksum_after_op1
);
14796 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14797 fold_check_failed (op1
, tem
);
14802 /* Fold a ternary tree expression with code CODE of type TYPE with
14803 operands OP0, OP1, and OP2. Return a folded expression if
14804 successful. Otherwise, return a tree expression with code CODE of
14805 type TYPE with operands OP0, OP1, and OP2. */
14808 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14809 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14812 #ifdef ENABLE_FOLD_CHECKING
14813 unsigned char checksum_before_op0
[16],
14814 checksum_before_op1
[16],
14815 checksum_before_op2
[16],
14816 checksum_after_op0
[16],
14817 checksum_after_op1
[16],
14818 checksum_after_op2
[16];
14819 struct md5_ctx ctx
;
14820 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14822 md5_init_ctx (&ctx
);
14823 fold_checksum_tree (op0
, &ctx
, &ht
);
14824 md5_finish_ctx (&ctx
, checksum_before_op0
);
14827 md5_init_ctx (&ctx
);
14828 fold_checksum_tree (op1
, &ctx
, &ht
);
14829 md5_finish_ctx (&ctx
, checksum_before_op1
);
14832 md5_init_ctx (&ctx
);
14833 fold_checksum_tree (op2
, &ctx
, &ht
);
14834 md5_finish_ctx (&ctx
, checksum_before_op2
);
14838 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14839 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14841 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14843 #ifdef ENABLE_FOLD_CHECKING
14844 md5_init_ctx (&ctx
);
14845 fold_checksum_tree (op0
, &ctx
, &ht
);
14846 md5_finish_ctx (&ctx
, checksum_after_op0
);
14849 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14850 fold_check_failed (op0
, tem
);
14852 md5_init_ctx (&ctx
);
14853 fold_checksum_tree (op1
, &ctx
, &ht
);
14854 md5_finish_ctx (&ctx
, checksum_after_op1
);
14857 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14858 fold_check_failed (op1
, tem
);
14860 md5_init_ctx (&ctx
);
14861 fold_checksum_tree (op2
, &ctx
, &ht
);
14862 md5_finish_ctx (&ctx
, checksum_after_op2
);
14864 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14865 fold_check_failed (op2
, tem
);
14870 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14871 arguments in ARGARRAY, and a null static chain.
14872 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14873 of type TYPE from the given operands as constructed by build_call_array. */
14876 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14877 int nargs
, tree
*argarray
)
14880 #ifdef ENABLE_FOLD_CHECKING
14881 unsigned char checksum_before_fn
[16],
14882 checksum_before_arglist
[16],
14883 checksum_after_fn
[16],
14884 checksum_after_arglist
[16];
14885 struct md5_ctx ctx
;
14886 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14889 md5_init_ctx (&ctx
);
14890 fold_checksum_tree (fn
, &ctx
, &ht
);
14891 md5_finish_ctx (&ctx
, checksum_before_fn
);
14894 md5_init_ctx (&ctx
);
14895 for (i
= 0; i
< nargs
; i
++)
14896 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14897 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14901 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14903 #ifdef ENABLE_FOLD_CHECKING
14904 md5_init_ctx (&ctx
);
14905 fold_checksum_tree (fn
, &ctx
, &ht
);
14906 md5_finish_ctx (&ctx
, checksum_after_fn
);
14909 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14910 fold_check_failed (fn
, tem
);
14912 md5_init_ctx (&ctx
);
14913 for (i
= 0; i
< nargs
; i
++)
14914 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14915 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14917 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14918 fold_check_failed (NULL_TREE
, tem
);
14923 /* Perform constant folding and related simplification of initializer
14924 expression EXPR. These behave identically to "fold_buildN" but ignore
14925 potential run-time traps and exceptions that fold must preserve. */
14927 #define START_FOLD_INIT \
14928 int saved_signaling_nans = flag_signaling_nans;\
14929 int saved_trapping_math = flag_trapping_math;\
14930 int saved_rounding_math = flag_rounding_math;\
14931 int saved_trapv = flag_trapv;\
14932 int saved_folding_initializer = folding_initializer;\
14933 flag_signaling_nans = 0;\
14934 flag_trapping_math = 0;\
14935 flag_rounding_math = 0;\
14937 folding_initializer = 1;
14939 #define END_FOLD_INIT \
14940 flag_signaling_nans = saved_signaling_nans;\
14941 flag_trapping_math = saved_trapping_math;\
14942 flag_rounding_math = saved_rounding_math;\
14943 flag_trapv = saved_trapv;\
14944 folding_initializer = saved_folding_initializer;
14947 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14948 tree type
, tree op
)
14953 result
= fold_build1_loc (loc
, code
, type
, op
);
14960 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14961 tree type
, tree op0
, tree op1
)
14966 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14973 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14974 int nargs
, tree
*argarray
)
14979 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14985 #undef START_FOLD_INIT
14986 #undef END_FOLD_INIT
14988 /* Determine if first argument is a multiple of second argument. Return 0 if
14989 it is not, or we cannot easily determined it to be.
14991 An example of the sort of thing we care about (at this point; this routine
14992 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14993 fold cases do now) is discovering that
14995 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15001 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15003 This code also handles discovering that
15005 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15007 is a multiple of 8 so we don't have to worry about dealing with a
15008 possible remainder.
15010 Note that we *look* inside a SAVE_EXPR only to determine how it was
15011 calculated; it is not safe for fold to do much of anything else with the
15012 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15013 at run time. For example, the latter example above *cannot* be implemented
15014 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15015 evaluation time of the original SAVE_EXPR is not necessarily the same at
15016 the time the new expression is evaluated. The only optimization of this
15017 sort that would be valid is changing
15019 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15023 SAVE_EXPR (I) * SAVE_EXPR (J)
15025 (where the same SAVE_EXPR (J) is used in the original and the
15026 transformed version). */
15029 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15031 if (operand_equal_p (top
, bottom
, 0))
15034 if (TREE_CODE (type
) != INTEGER_TYPE
)
15037 switch (TREE_CODE (top
))
15040 /* Bitwise and provides a power of two multiple. If the mask is
15041 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15042 if (!integer_pow2p (bottom
))
15047 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15048 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15052 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15053 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15056 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15060 op1
= TREE_OPERAND (top
, 1);
15061 /* const_binop may not detect overflow correctly,
15062 so check for it explicitly here. */
15063 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
15064 && 0 != (t1
= fold_convert (type
,
15065 const_binop (LSHIFT_EXPR
,
15068 && !TREE_OVERFLOW (t1
))
15069 return multiple_of_p (type
, t1
, bottom
);
15074 /* Can't handle conversions from non-integral or wider integral type. */
15075 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15076 || (TYPE_PRECISION (type
)
15077 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15080 /* .. fall through ... */
15083 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15086 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15087 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15090 if (TREE_CODE (bottom
) != INTEGER_CST
15091 || integer_zerop (bottom
)
15092 || (TYPE_UNSIGNED (type
)
15093 && (tree_int_cst_sgn (top
) < 0
15094 || tree_int_cst_sgn (bottom
) < 0)))
15096 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
15104 /* Return true if CODE or TYPE is known to be non-negative. */
15107 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15109 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15110 && truth_value_p (code
))
15111 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15112 have a signed:1 type (where the value is -1 and 0). */
15117 /* Return true if (CODE OP0) is known to be non-negative. If the return
15118 value is based on the assumption that signed overflow is undefined,
15119 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15120 *STRICT_OVERFLOW_P. */
15123 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15124 bool *strict_overflow_p
)
15126 if (TYPE_UNSIGNED (type
))
15132 /* We can't return 1 if flag_wrapv is set because
15133 ABS_EXPR<INT_MIN> = INT_MIN. */
15134 if (!INTEGRAL_TYPE_P (type
))
15136 if (TYPE_OVERFLOW_UNDEFINED (type
))
15138 *strict_overflow_p
= true;
15143 case NON_LVALUE_EXPR
:
15145 case FIX_TRUNC_EXPR
:
15146 return tree_expr_nonnegative_warnv_p (op0
,
15147 strict_overflow_p
);
15151 tree inner_type
= TREE_TYPE (op0
);
15152 tree outer_type
= type
;
15154 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15156 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15157 return tree_expr_nonnegative_warnv_p (op0
,
15158 strict_overflow_p
);
15159 if (INTEGRAL_TYPE_P (inner_type
))
15161 if (TYPE_UNSIGNED (inner_type
))
15163 return tree_expr_nonnegative_warnv_p (op0
,
15164 strict_overflow_p
);
15167 else if (INTEGRAL_TYPE_P (outer_type
))
15169 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15170 return tree_expr_nonnegative_warnv_p (op0
,
15171 strict_overflow_p
);
15172 if (INTEGRAL_TYPE_P (inner_type
))
15173 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15174 && TYPE_UNSIGNED (inner_type
);
15180 return tree_simple_nonnegative_warnv_p (code
, type
);
15183 /* We don't know sign of `t', so be conservative and return false. */
15187 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15188 value is based on the assumption that signed overflow is undefined,
15189 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15190 *STRICT_OVERFLOW_P. */
15193 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15194 tree op1
, bool *strict_overflow_p
)
15196 if (TYPE_UNSIGNED (type
))
15201 case POINTER_PLUS_EXPR
:
15203 if (FLOAT_TYPE_P (type
))
15204 return (tree_expr_nonnegative_warnv_p (op0
,
15206 && tree_expr_nonnegative_warnv_p (op1
,
15207 strict_overflow_p
));
15209 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15210 both unsigned and at least 2 bits shorter than the result. */
15211 if (TREE_CODE (type
) == INTEGER_TYPE
15212 && TREE_CODE (op0
) == NOP_EXPR
15213 && TREE_CODE (op1
) == NOP_EXPR
)
15215 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15216 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15217 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15218 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15220 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15221 TYPE_PRECISION (inner2
)) + 1;
15222 return prec
< TYPE_PRECISION (type
);
15228 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15230 /* x * x is always non-negative for floating point x
15231 or without overflow. */
15232 if (operand_equal_p (op0
, op1
, 0)
15233 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15234 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15236 if (TYPE_OVERFLOW_UNDEFINED (type
))
15237 *strict_overflow_p
= true;
15242 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15243 both unsigned and their total bits is shorter than the result. */
15244 if (TREE_CODE (type
) == INTEGER_TYPE
15245 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15246 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15248 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15249 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15251 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15252 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15255 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15256 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15258 if (TREE_CODE (op0
) == INTEGER_CST
)
15259 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15261 if (TREE_CODE (op1
) == INTEGER_CST
)
15262 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15264 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15265 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15267 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15268 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
15269 : TYPE_PRECISION (inner0
);
15271 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15272 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
15273 : TYPE_PRECISION (inner1
);
15275 return precision0
+ precision1
< TYPE_PRECISION (type
);
15282 return (tree_expr_nonnegative_warnv_p (op0
,
15284 || tree_expr_nonnegative_warnv_p (op1
,
15285 strict_overflow_p
));
15291 case TRUNC_DIV_EXPR
:
15292 case CEIL_DIV_EXPR
:
15293 case FLOOR_DIV_EXPR
:
15294 case ROUND_DIV_EXPR
:
15295 return (tree_expr_nonnegative_warnv_p (op0
,
15297 && tree_expr_nonnegative_warnv_p (op1
,
15298 strict_overflow_p
));
15300 case TRUNC_MOD_EXPR
:
15301 case CEIL_MOD_EXPR
:
15302 case FLOOR_MOD_EXPR
:
15303 case ROUND_MOD_EXPR
:
15304 return tree_expr_nonnegative_warnv_p (op0
,
15305 strict_overflow_p
);
15307 return tree_simple_nonnegative_warnv_p (code
, type
);
15310 /* We don't know sign of `t', so be conservative and return false. */
15314 /* Return true if T is known to be non-negative. If the return
15315 value is based on the assumption that signed overflow is undefined,
15316 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15317 *STRICT_OVERFLOW_P. */
15320 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15322 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15325 switch (TREE_CODE (t
))
15328 return tree_int_cst_sgn (t
) >= 0;
15331 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15334 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15337 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15339 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15340 strict_overflow_p
));
15342 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15345 /* We don't know sign of `t', so be conservative and return false. */
15349 /* Return true if T is known to be non-negative. If the return
15350 value is based on the assumption that signed overflow is undefined,
15351 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15352 *STRICT_OVERFLOW_P. */
15355 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15356 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15358 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15359 switch (DECL_FUNCTION_CODE (fndecl
))
15361 CASE_FLT_FN (BUILT_IN_ACOS
):
15362 CASE_FLT_FN (BUILT_IN_ACOSH
):
15363 CASE_FLT_FN (BUILT_IN_CABS
):
15364 CASE_FLT_FN (BUILT_IN_COSH
):
15365 CASE_FLT_FN (BUILT_IN_ERFC
):
15366 CASE_FLT_FN (BUILT_IN_EXP
):
15367 CASE_FLT_FN (BUILT_IN_EXP10
):
15368 CASE_FLT_FN (BUILT_IN_EXP2
):
15369 CASE_FLT_FN (BUILT_IN_FABS
):
15370 CASE_FLT_FN (BUILT_IN_FDIM
):
15371 CASE_FLT_FN (BUILT_IN_HYPOT
):
15372 CASE_FLT_FN (BUILT_IN_POW10
):
15373 CASE_INT_FN (BUILT_IN_FFS
):
15374 CASE_INT_FN (BUILT_IN_PARITY
):
15375 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15376 CASE_INT_FN (BUILT_IN_CLZ
):
15377 CASE_INT_FN (BUILT_IN_CLRSB
):
15378 case BUILT_IN_BSWAP32
:
15379 case BUILT_IN_BSWAP64
:
15383 CASE_FLT_FN (BUILT_IN_SQRT
):
15384 /* sqrt(-0.0) is -0.0. */
15385 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15387 return tree_expr_nonnegative_warnv_p (arg0
,
15388 strict_overflow_p
);
15390 CASE_FLT_FN (BUILT_IN_ASINH
):
15391 CASE_FLT_FN (BUILT_IN_ATAN
):
15392 CASE_FLT_FN (BUILT_IN_ATANH
):
15393 CASE_FLT_FN (BUILT_IN_CBRT
):
15394 CASE_FLT_FN (BUILT_IN_CEIL
):
15395 CASE_FLT_FN (BUILT_IN_ERF
):
15396 CASE_FLT_FN (BUILT_IN_EXPM1
):
15397 CASE_FLT_FN (BUILT_IN_FLOOR
):
15398 CASE_FLT_FN (BUILT_IN_FMOD
):
15399 CASE_FLT_FN (BUILT_IN_FREXP
):
15400 CASE_FLT_FN (BUILT_IN_ICEIL
):
15401 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15402 CASE_FLT_FN (BUILT_IN_IRINT
):
15403 CASE_FLT_FN (BUILT_IN_IROUND
):
15404 CASE_FLT_FN (BUILT_IN_LCEIL
):
15405 CASE_FLT_FN (BUILT_IN_LDEXP
):
15406 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15407 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15408 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15409 CASE_FLT_FN (BUILT_IN_LLRINT
):
15410 CASE_FLT_FN (BUILT_IN_LLROUND
):
15411 CASE_FLT_FN (BUILT_IN_LRINT
):
15412 CASE_FLT_FN (BUILT_IN_LROUND
):
15413 CASE_FLT_FN (BUILT_IN_MODF
):
15414 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15415 CASE_FLT_FN (BUILT_IN_RINT
):
15416 CASE_FLT_FN (BUILT_IN_ROUND
):
15417 CASE_FLT_FN (BUILT_IN_SCALB
):
15418 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15419 CASE_FLT_FN (BUILT_IN_SCALBN
):
15420 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15421 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15422 CASE_FLT_FN (BUILT_IN_SINH
):
15423 CASE_FLT_FN (BUILT_IN_TANH
):
15424 CASE_FLT_FN (BUILT_IN_TRUNC
):
15425 /* True if the 1st argument is nonnegative. */
15426 return tree_expr_nonnegative_warnv_p (arg0
,
15427 strict_overflow_p
);
15429 CASE_FLT_FN (BUILT_IN_FMAX
):
15430 /* True if the 1st OR 2nd arguments are nonnegative. */
15431 return (tree_expr_nonnegative_warnv_p (arg0
,
15433 || (tree_expr_nonnegative_warnv_p (arg1
,
15434 strict_overflow_p
)));
15436 CASE_FLT_FN (BUILT_IN_FMIN
):
15437 /* True if the 1st AND 2nd arguments are nonnegative. */
15438 return (tree_expr_nonnegative_warnv_p (arg0
,
15440 && (tree_expr_nonnegative_warnv_p (arg1
,
15441 strict_overflow_p
)));
15443 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15444 /* True if the 2nd argument is nonnegative. */
15445 return tree_expr_nonnegative_warnv_p (arg1
,
15446 strict_overflow_p
);
15448 CASE_FLT_FN (BUILT_IN_POWI
):
15449 /* True if the 1st argument is nonnegative or the second
15450 argument is an even integer. */
15451 if (TREE_CODE (arg1
) == INTEGER_CST
15452 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15454 return tree_expr_nonnegative_warnv_p (arg0
,
15455 strict_overflow_p
);
15457 CASE_FLT_FN (BUILT_IN_POW
):
15458 /* True if the 1st argument is nonnegative or the second
15459 argument is an even integer valued real. */
15460 if (TREE_CODE (arg1
) == REAL_CST
)
15465 c
= TREE_REAL_CST (arg1
);
15466 n
= real_to_integer (&c
);
15469 REAL_VALUE_TYPE cint
;
15470 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
15471 if (real_identical (&c
, &cint
))
15475 return tree_expr_nonnegative_warnv_p (arg0
,
15476 strict_overflow_p
);
15481 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15485 /* Return true if T is known to be non-negative. If the return
15486 value is based on the assumption that signed overflow is undefined,
15487 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15488 *STRICT_OVERFLOW_P. */
15491 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15493 enum tree_code code
= TREE_CODE (t
);
15494 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15501 tree temp
= TARGET_EXPR_SLOT (t
);
15502 t
= TARGET_EXPR_INITIAL (t
);
15504 /* If the initializer is non-void, then it's a normal expression
15505 that will be assigned to the slot. */
15506 if (!VOID_TYPE_P (t
))
15507 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15509 /* Otherwise, the initializer sets the slot in some way. One common
15510 way is an assignment statement at the end of the initializer. */
15513 if (TREE_CODE (t
) == BIND_EXPR
)
15514 t
= expr_last (BIND_EXPR_BODY (t
));
15515 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15516 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15517 t
= expr_last (TREE_OPERAND (t
, 0));
15518 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15523 if (TREE_CODE (t
) == MODIFY_EXPR
15524 && TREE_OPERAND (t
, 0) == temp
)
15525 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15526 strict_overflow_p
);
15533 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15534 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15536 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15537 get_callee_fndecl (t
),
15540 strict_overflow_p
);
15542 case COMPOUND_EXPR
:
15544 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15545 strict_overflow_p
);
15547 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15548 strict_overflow_p
);
15550 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15551 strict_overflow_p
);
15554 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15558 /* We don't know sign of `t', so be conservative and return false. */
15562 /* Return true if T is known to be non-negative. If the return
15563 value is based on the assumption that signed overflow is undefined,
15564 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15565 *STRICT_OVERFLOW_P. */
15568 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15570 enum tree_code code
;
15571 if (t
== error_mark_node
)
15574 code
= TREE_CODE (t
);
15575 switch (TREE_CODE_CLASS (code
))
15578 case tcc_comparison
:
15579 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15581 TREE_OPERAND (t
, 0),
15582 TREE_OPERAND (t
, 1),
15583 strict_overflow_p
);
15586 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15588 TREE_OPERAND (t
, 0),
15589 strict_overflow_p
);
15592 case tcc_declaration
:
15593 case tcc_reference
:
15594 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15602 case TRUTH_AND_EXPR
:
15603 case TRUTH_OR_EXPR
:
15604 case TRUTH_XOR_EXPR
:
15605 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15607 TREE_OPERAND (t
, 0),
15608 TREE_OPERAND (t
, 1),
15609 strict_overflow_p
);
15610 case TRUTH_NOT_EXPR
:
15611 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15613 TREE_OPERAND (t
, 0),
15614 strict_overflow_p
);
15621 case WITH_SIZE_EXPR
:
15623 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15626 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15630 /* Return true if `t' is known to be non-negative. Handle warnings
15631 about undefined signed overflow. */
15634 tree_expr_nonnegative_p (tree t
)
15636 bool ret
, strict_overflow_p
;
15638 strict_overflow_p
= false;
15639 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15640 if (strict_overflow_p
)
15641 fold_overflow_warning (("assuming signed overflow does not occur when "
15642 "determining that expression is always "
15644 WARN_STRICT_OVERFLOW_MISC
);
15649 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15650 For floating point we further ensure that T is not denormal.
15651 Similar logic is present in nonzero_address in rtlanal.h.
15653 If the return value is based on the assumption that signed overflow
15654 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15655 change *STRICT_OVERFLOW_P. */
15658 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15659 bool *strict_overflow_p
)
15664 return tree_expr_nonzero_warnv_p (op0
,
15665 strict_overflow_p
);
15669 tree inner_type
= TREE_TYPE (op0
);
15670 tree outer_type
= type
;
15672 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15673 && tree_expr_nonzero_warnv_p (op0
,
15674 strict_overflow_p
));
15678 case NON_LVALUE_EXPR
:
15679 return tree_expr_nonzero_warnv_p (op0
,
15680 strict_overflow_p
);
15689 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15690 For floating point we further ensure that T is not denormal.
15691 Similar logic is present in nonzero_address in rtlanal.h.
15693 If the return value is based on the assumption that signed overflow
15694 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15695 change *STRICT_OVERFLOW_P. */
15698 tree_binary_nonzero_warnv_p (enum tree_code code
,
15701 tree op1
, bool *strict_overflow_p
)
15703 bool sub_strict_overflow_p
;
15706 case POINTER_PLUS_EXPR
:
15708 if (TYPE_OVERFLOW_UNDEFINED (type
))
15710 /* With the presence of negative values it is hard
15711 to say something. */
15712 sub_strict_overflow_p
= false;
15713 if (!tree_expr_nonnegative_warnv_p (op0
,
15714 &sub_strict_overflow_p
)
15715 || !tree_expr_nonnegative_warnv_p (op1
,
15716 &sub_strict_overflow_p
))
15718 /* One of operands must be positive and the other non-negative. */
15719 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15720 overflows, on a twos-complement machine the sum of two
15721 nonnegative numbers can never be zero. */
15722 return (tree_expr_nonzero_warnv_p (op0
,
15724 || tree_expr_nonzero_warnv_p (op1
,
15725 strict_overflow_p
));
15730 if (TYPE_OVERFLOW_UNDEFINED (type
))
15732 if (tree_expr_nonzero_warnv_p (op0
,
15734 && tree_expr_nonzero_warnv_p (op1
,
15735 strict_overflow_p
))
15737 *strict_overflow_p
= true;
15744 sub_strict_overflow_p
= false;
15745 if (tree_expr_nonzero_warnv_p (op0
,
15746 &sub_strict_overflow_p
)
15747 && tree_expr_nonzero_warnv_p (op1
,
15748 &sub_strict_overflow_p
))
15750 if (sub_strict_overflow_p
)
15751 *strict_overflow_p
= true;
15756 sub_strict_overflow_p
= false;
15757 if (tree_expr_nonzero_warnv_p (op0
,
15758 &sub_strict_overflow_p
))
15760 if (sub_strict_overflow_p
)
15761 *strict_overflow_p
= true;
15763 /* When both operands are nonzero, then MAX must be too. */
15764 if (tree_expr_nonzero_warnv_p (op1
,
15765 strict_overflow_p
))
15768 /* MAX where operand 0 is positive is positive. */
15769 return tree_expr_nonnegative_warnv_p (op0
,
15770 strict_overflow_p
);
15772 /* MAX where operand 1 is positive is positive. */
15773 else if (tree_expr_nonzero_warnv_p (op1
,
15774 &sub_strict_overflow_p
)
15775 && tree_expr_nonnegative_warnv_p (op1
,
15776 &sub_strict_overflow_p
))
15778 if (sub_strict_overflow_p
)
15779 *strict_overflow_p
= true;
15785 return (tree_expr_nonzero_warnv_p (op1
,
15787 || tree_expr_nonzero_warnv_p (op0
,
15788 strict_overflow_p
));
15797 /* Return true when T is an address and is known to be nonzero.
15798 For floating point we further ensure that T is not denormal.
15799 Similar logic is present in nonzero_address in rtlanal.h.
15801 If the return value is based on the assumption that signed overflow
15802 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15803 change *STRICT_OVERFLOW_P. */
15806 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15808 bool sub_strict_overflow_p
;
15809 switch (TREE_CODE (t
))
15812 return !integer_zerop (t
);
15816 tree base
= TREE_OPERAND (t
, 0);
15818 if (!DECL_P (base
))
15819 base
= get_base_address (base
);
15824 /* For objects in symbol table check if we know they are non-zero.
15825 Don't do anything for variables and functions before symtab is built;
15826 it is quite possible that they will be declared weak later. */
15827 if (DECL_P (base
) && decl_in_symtab_p (base
))
15829 struct symtab_node
*symbol
;
15831 symbol
= symtab_node::get_create (base
);
15833 return symbol
->nonzero_address ();
15838 /* Function local objects are never NULL. */
15840 && (DECL_CONTEXT (base
)
15841 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15842 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15845 /* Constants are never weak. */
15846 if (CONSTANT_CLASS_P (base
))
15853 sub_strict_overflow_p
= false;
15854 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15855 &sub_strict_overflow_p
)
15856 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15857 &sub_strict_overflow_p
))
15859 if (sub_strict_overflow_p
)
15860 *strict_overflow_p
= true;
15871 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15872 attempt to fold the expression to a constant without modifying TYPE,
15875 If the expression could be simplified to a constant, then return
15876 the constant. If the expression would not be simplified to a
15877 constant, then return NULL_TREE. */
15880 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15882 tree tem
= fold_binary (code
, type
, op0
, op1
);
15883 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15886 /* Given the components of a unary expression CODE, TYPE and OP0,
15887 attempt to fold the expression to a constant without modifying
15890 If the expression could be simplified to a constant, then return
15891 the constant. If the expression would not be simplified to a
15892 constant, then return NULL_TREE. */
15895 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15897 tree tem
= fold_unary (code
, type
, op0
);
15898 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15901 /* If EXP represents referencing an element in a constant string
15902 (either via pointer arithmetic or array indexing), return the
15903 tree representing the value accessed, otherwise return NULL. */
15906 fold_read_from_constant_string (tree exp
)
15908 if ((TREE_CODE (exp
) == INDIRECT_REF
15909 || TREE_CODE (exp
) == ARRAY_REF
)
15910 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15912 tree exp1
= TREE_OPERAND (exp
, 0);
15915 location_t loc
= EXPR_LOCATION (exp
);
15917 if (TREE_CODE (exp
) == INDIRECT_REF
)
15918 string
= string_constant (exp1
, &index
);
15921 tree low_bound
= array_ref_low_bound (exp
);
15922 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15924 /* Optimize the special-case of a zero lower bound.
15926 We convert the low_bound to sizetype to avoid some problems
15927 with constant folding. (E.g. suppose the lower bound is 1,
15928 and its mode is QI. Without the conversion,l (ARRAY
15929 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15930 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15931 if (! integer_zerop (low_bound
))
15932 index
= size_diffop_loc (loc
, index
,
15933 fold_convert_loc (loc
, sizetype
, low_bound
));
15939 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15940 && TREE_CODE (string
) == STRING_CST
15941 && TREE_CODE (index
) == INTEGER_CST
15942 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15943 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15945 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15946 return build_int_cst_type (TREE_TYPE (exp
),
15947 (TREE_STRING_POINTER (string
)
15948 [TREE_INT_CST_LOW (index
)]));
15953 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15954 an integer constant, real, or fixed-point constant.
15956 TYPE is the type of the result. */
15959 fold_negate_const (tree arg0
, tree type
)
15961 tree t
= NULL_TREE
;
15963 switch (TREE_CODE (arg0
))
15968 wide_int val
= wi::neg (arg0
, &overflow
);
15969 t
= force_fit_type (type
, val
, 1,
15970 (overflow
| TREE_OVERFLOW (arg0
))
15971 && !TYPE_UNSIGNED (type
));
15976 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15981 FIXED_VALUE_TYPE f
;
15982 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15983 &(TREE_FIXED_CST (arg0
)), NULL
,
15984 TYPE_SATURATING (type
));
15985 t
= build_fixed (type
, f
);
15986 /* Propagate overflow flags. */
15987 if (overflow_p
| TREE_OVERFLOW (arg0
))
15988 TREE_OVERFLOW (t
) = 1;
15993 gcc_unreachable ();
15999 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16000 an integer constant or real constant.
16002 TYPE is the type of the result. */
16005 fold_abs_const (tree arg0
, tree type
)
16007 tree t
= NULL_TREE
;
16009 switch (TREE_CODE (arg0
))
16013 /* If the value is unsigned or non-negative, then the absolute value
16014 is the same as the ordinary value. */
16015 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
16018 /* If the value is negative, then the absolute value is
16023 wide_int val
= wi::neg (arg0
, &overflow
);
16024 t
= force_fit_type (type
, val
, -1,
16025 overflow
| TREE_OVERFLOW (arg0
));
16031 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16032 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16038 gcc_unreachable ();
16044 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16045 constant. TYPE is the type of the result. */
16048 fold_not_const (const_tree arg0
, tree type
)
16050 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16052 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
16055 /* Given CODE, a relational operator, the target type, TYPE and two
16056 constant operands OP0 and OP1, return the result of the
16057 relational operation. If the result is not a compile time
16058 constant, then return NULL_TREE. */
16061 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16063 int result
, invert
;
16065 /* From here on, the only cases we handle are when the result is
16066 known to be a constant. */
16068 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16070 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16071 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16073 /* Handle the cases where either operand is a NaN. */
16074 if (real_isnan (c0
) || real_isnan (c1
))
16084 case UNORDERED_EXPR
:
16098 if (flag_trapping_math
)
16104 gcc_unreachable ();
16107 return constant_boolean_node (result
, type
);
16110 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16113 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16115 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16116 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16117 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16120 /* Handle equality/inequality of complex constants. */
16121 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16123 tree rcond
= fold_relational_const (code
, type
,
16124 TREE_REALPART (op0
),
16125 TREE_REALPART (op1
));
16126 tree icond
= fold_relational_const (code
, type
,
16127 TREE_IMAGPART (op0
),
16128 TREE_IMAGPART (op1
));
16129 if (code
== EQ_EXPR
)
16130 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16131 else if (code
== NE_EXPR
)
16132 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16137 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16139 unsigned count
= VECTOR_CST_NELTS (op0
);
16140 tree
*elts
= XALLOCAVEC (tree
, count
);
16141 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16142 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16144 for (unsigned i
= 0; i
< count
; i
++)
16146 tree elem_type
= TREE_TYPE (type
);
16147 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16148 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16150 tree tem
= fold_relational_const (code
, elem_type
,
16153 if (tem
== NULL_TREE
)
16156 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16159 return build_vector (type
, elts
);
16162 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16164 To compute GT, swap the arguments and do LT.
16165 To compute GE, do LT and invert the result.
16166 To compute LE, swap the arguments, do LT and invert the result.
16167 To compute NE, do EQ and invert the result.
16169 Therefore, the code below must handle only EQ and LT. */
16171 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16176 code
= swap_tree_comparison (code
);
16179 /* Note that it is safe to invert for real values here because we
16180 have already handled the one case that it matters. */
16183 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16186 code
= invert_tree_comparison (code
, false);
16189 /* Compute a result for LT or EQ if args permit;
16190 Otherwise return T. */
16191 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16193 if (code
== EQ_EXPR
)
16194 result
= tree_int_cst_equal (op0
, op1
);
16196 result
= tree_int_cst_lt (op0
, op1
);
16203 return constant_boolean_node (result
, type
);
16206 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16207 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16211 fold_build_cleanup_point_expr (tree type
, tree expr
)
16213 /* If the expression does not have side effects then we don't have to wrap
16214 it with a cleanup point expression. */
16215 if (!TREE_SIDE_EFFECTS (expr
))
16218 /* If the expression is a return, check to see if the expression inside the
16219 return has no side effects or the right hand side of the modify expression
16220 inside the return. If either don't have side effects set we don't need to
16221 wrap the expression in a cleanup point expression. Note we don't check the
16222 left hand side of the modify because it should always be a return decl. */
16223 if (TREE_CODE (expr
) == RETURN_EXPR
)
16225 tree op
= TREE_OPERAND (expr
, 0);
16226 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16228 op
= TREE_OPERAND (op
, 1);
16229 if (!TREE_SIDE_EFFECTS (op
))
16233 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16236 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16237 of an indirection through OP0, or NULL_TREE if no simplification is
16241 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16247 subtype
= TREE_TYPE (sub
);
16248 if (!POINTER_TYPE_P (subtype
))
16251 if (TREE_CODE (sub
) == ADDR_EXPR
)
16253 tree op
= TREE_OPERAND (sub
, 0);
16254 tree optype
= TREE_TYPE (op
);
16255 /* *&CONST_DECL -> to the value of the const decl. */
16256 if (TREE_CODE (op
) == CONST_DECL
)
16257 return DECL_INITIAL (op
);
16258 /* *&p => p; make sure to handle *&"str"[cst] here. */
16259 if (type
== optype
)
16261 tree fop
= fold_read_from_constant_string (op
);
16267 /* *(foo *)&fooarray => fooarray[0] */
16268 else if (TREE_CODE (optype
) == ARRAY_TYPE
16269 && type
== TREE_TYPE (optype
)
16270 && (!in_gimple_form
16271 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16273 tree type_domain
= TYPE_DOMAIN (optype
);
16274 tree min_val
= size_zero_node
;
16275 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16276 min_val
= TYPE_MIN_VALUE (type_domain
);
16278 && TREE_CODE (min_val
) != INTEGER_CST
)
16280 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16281 NULL_TREE
, NULL_TREE
);
16283 /* *(foo *)&complexfoo => __real__ complexfoo */
16284 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16285 && type
== TREE_TYPE (optype
))
16286 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16287 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16288 else if (TREE_CODE (optype
) == VECTOR_TYPE
16289 && type
== TREE_TYPE (optype
))
16291 tree part_width
= TYPE_SIZE (type
);
16292 tree index
= bitsize_int (0);
16293 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16297 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16298 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16300 tree op00
= TREE_OPERAND (sub
, 0);
16301 tree op01
= TREE_OPERAND (sub
, 1);
16304 if (TREE_CODE (op00
) == ADDR_EXPR
)
16307 op00
= TREE_OPERAND (op00
, 0);
16308 op00type
= TREE_TYPE (op00
);
16310 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16311 if (TREE_CODE (op00type
) == VECTOR_TYPE
16312 && type
== TREE_TYPE (op00type
))
16314 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16315 tree part_width
= TYPE_SIZE (type
);
16316 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16317 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16318 tree index
= bitsize_int (indexi
);
16320 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
16321 return fold_build3_loc (loc
,
16322 BIT_FIELD_REF
, type
, op00
,
16323 part_width
, index
);
16326 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16327 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16328 && type
== TREE_TYPE (op00type
))
16330 tree size
= TYPE_SIZE_UNIT (type
);
16331 if (tree_int_cst_equal (size
, op01
))
16332 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16334 /* ((foo *)&fooarray)[1] => fooarray[1] */
16335 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16336 && type
== TREE_TYPE (op00type
))
16338 tree type_domain
= TYPE_DOMAIN (op00type
);
16339 tree min_val
= size_zero_node
;
16340 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16341 min_val
= TYPE_MIN_VALUE (type_domain
);
16342 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16343 TYPE_SIZE_UNIT (type
));
16344 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16345 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16346 NULL_TREE
, NULL_TREE
);
16351 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16352 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16353 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16354 && (!in_gimple_form
16355 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16358 tree min_val
= size_zero_node
;
16359 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16360 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16361 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16362 min_val
= TYPE_MIN_VALUE (type_domain
);
16364 && TREE_CODE (min_val
) != INTEGER_CST
)
16366 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16373 /* Builds an expression for an indirection through T, simplifying some
16377 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16379 tree type
= TREE_TYPE (TREE_TYPE (t
));
16380 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16385 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16388 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16391 fold_indirect_ref_loc (location_t loc
, tree t
)
16393 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16401 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16402 whose result is ignored. The type of the returned tree need not be
16403 the same as the original expression. */
16406 fold_ignored_result (tree t
)
16408 if (!TREE_SIDE_EFFECTS (t
))
16409 return integer_zero_node
;
16412 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16415 t
= TREE_OPERAND (t
, 0);
16419 case tcc_comparison
:
16420 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16421 t
= TREE_OPERAND (t
, 0);
16422 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16423 t
= TREE_OPERAND (t
, 1);
16428 case tcc_expression
:
16429 switch (TREE_CODE (t
))
16431 case COMPOUND_EXPR
:
16432 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16434 t
= TREE_OPERAND (t
, 0);
16438 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16439 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16441 t
= TREE_OPERAND (t
, 0);
16454 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16457 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16459 tree div
= NULL_TREE
;
16464 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16465 have to do anything. Only do this when we are not given a const,
16466 because in that case, this check is more expensive than just
16468 if (TREE_CODE (value
) != INTEGER_CST
)
16470 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16472 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16476 /* If divisor is a power of two, simplify this to bit manipulation. */
16477 if (divisor
== (divisor
& -divisor
))
16479 if (TREE_CODE (value
) == INTEGER_CST
)
16481 wide_int val
= value
;
16484 if ((val
& (divisor
- 1)) == 0)
16487 overflow_p
= TREE_OVERFLOW (value
);
16488 val
&= ~(divisor
- 1);
16493 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16499 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16500 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16501 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16502 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16508 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16509 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16510 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16516 /* Likewise, but round down. */
16519 round_down_loc (location_t loc
, tree value
, int divisor
)
16521 tree div
= NULL_TREE
;
16523 gcc_assert (divisor
> 0);
16527 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16528 have to do anything. Only do this when we are not given a const,
16529 because in that case, this check is more expensive than just
16531 if (TREE_CODE (value
) != INTEGER_CST
)
16533 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16535 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16539 /* If divisor is a power of two, simplify this to bit manipulation. */
16540 if (divisor
== (divisor
& -divisor
))
16544 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16545 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16550 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16551 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16552 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16558 /* Returns the pointer to the base of the object addressed by EXP and
16559 extracts the information about the offset of the access, storing it
16560 to PBITPOS and POFFSET. */
16563 split_address_to_core_and_offset (tree exp
,
16564 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16568 int unsignedp
, volatilep
;
16569 HOST_WIDE_INT bitsize
;
16570 location_t loc
= EXPR_LOCATION (exp
);
16572 if (TREE_CODE (exp
) == ADDR_EXPR
)
16574 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16575 poffset
, &mode
, &unsignedp
, &volatilep
,
16577 core
= build_fold_addr_expr_loc (loc
, core
);
16583 *poffset
= NULL_TREE
;
16589 /* Returns true if addresses of E1 and E2 differ by a constant, false
16590 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16593 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16596 HOST_WIDE_INT bitpos1
, bitpos2
;
16597 tree toffset1
, toffset2
, tdiff
, type
;
16599 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16600 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16602 if (bitpos1
% BITS_PER_UNIT
!= 0
16603 || bitpos2
% BITS_PER_UNIT
!= 0
16604 || !operand_equal_p (core1
, core2
, 0))
16607 if (toffset1
&& toffset2
)
16609 type
= TREE_TYPE (toffset1
);
16610 if (type
!= TREE_TYPE (toffset2
))
16611 toffset2
= fold_convert (type
, toffset2
);
16613 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16614 if (!cst_and_fits_in_hwi (tdiff
))
16617 *diff
= int_cst_value (tdiff
);
16619 else if (toffset1
|| toffset2
)
16621 /* If only one of the offsets is non-constant, the difference cannot
16628 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16632 /* Simplify the floating point expression EXP when the sign of the
16633 result is not significant. Return NULL_TREE if no simplification
16637 fold_strip_sign_ops (tree exp
)
16640 location_t loc
= EXPR_LOCATION (exp
);
16642 switch (TREE_CODE (exp
))
16646 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16647 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16651 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16653 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16654 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16655 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16656 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16657 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16658 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16661 case COMPOUND_EXPR
:
16662 arg0
= TREE_OPERAND (exp
, 0);
16663 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16665 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16669 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16670 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16672 return fold_build3_loc (loc
,
16673 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16674 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16675 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16680 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16683 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16684 /* Strip copysign function call, return the 1st argument. */
16685 arg0
= CALL_EXPR_ARG (exp
, 0);
16686 arg1
= CALL_EXPR_ARG (exp
, 1);
16687 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16690 /* Strip sign ops from the argument of "odd" math functions. */
16691 if (negate_mathfn_p (fcode
))
16693 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16695 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);