1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
56 #include "diagnostic-core.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
63 #include "tree-iterator.h"
66 #include "langhooks.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
84 /* Nonzero if we are folding constants inside an initializer; zero
86 int folding_initializer
= 0;
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code
{
110 static bool negate_expr_p (tree
);
111 static tree
negate_expr (tree
);
112 static tree
split_tree (location_t
, tree
, tree
, enum tree_code
,
113 tree
*, tree
*, tree
*, int);
114 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
115 static enum comparison_code
comparison_to_compcode (enum tree_code
);
116 static enum tree_code
compcode_to_comparison (enum comparison_code
);
117 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
118 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
119 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
120 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
122 static int simple_operand_p (const_tree
);
123 static bool simple_operand_p_2 (tree
);
124 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
125 static tree
range_predecessor (tree
);
126 static tree
range_successor (tree
);
127 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
132 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
134 static tree
fold_binary_op_with_conditional_arg (location_t
,
135 enum tree_code
, tree
,
138 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
139 static bool reorder_operands_p (const_tree
, const_tree
);
140 static tree
fold_negate_const (tree
, tree
);
141 static tree
fold_not_const (const_tree
, tree
);
142 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
143 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
144 static tree
fold_view_convert_expr (tree
, tree
);
145 static bool vec_cst_ctor_to_array (tree
, tree
*);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
152 expr_location_or (tree t
, location_t loc
)
154 location_t tloc
= EXPR_LOCATION (t
);
155 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
162 protected_set_expr_location_unshare (tree x
, location_t loc
)
164 if (CAN_HAVE_LOCATION_P (x
)
165 && EXPR_LOCATION (x
) != loc
166 && !(TREE_CODE (x
) == SAVE_EXPR
167 || TREE_CODE (x
) == TARGET_EXPR
168 || TREE_CODE (x
) == BIND_EXPR
))
171 SET_EXPR_LOCATION (x
, loc
);
176 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
177 division and returns the quotient. Otherwise returns
181 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
185 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
187 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
192 /* This is nonzero if we should defer warnings about undefined
193 overflow. This facility exists because these warnings are a
194 special case. The code to estimate loop iterations does not want
195 to issue any warnings, since it works with expressions which do not
196 occur in user code. Various bits of cleanup code call fold(), but
197 only use the result if it has certain characteristics (e.g., is a
198 constant); that code only wants to issue a warning if the result is
201 static int fold_deferring_overflow_warnings
;
203 /* If a warning about undefined overflow is deferred, this is the
204 warning. Note that this may cause us to turn two warnings into
205 one, but that is fine since it is sufficient to only give one
206 warning per expression. */
208 static const char* fold_deferred_overflow_warning
;
210 /* If a warning about undefined overflow is deferred, this is the
211 level at which the warning should be emitted. */
213 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
215 /* Start deferring overflow warnings. We could use a stack here to
216 permit nested calls, but at present it is not necessary. */
219 fold_defer_overflow_warnings (void)
221 ++fold_deferring_overflow_warnings
;
224 /* Stop deferring overflow warnings. If there is a pending warning,
225 and ISSUE is true, then issue the warning if appropriate. STMT is
226 the statement with which the warning should be associated (used for
227 location information); STMT may be NULL. CODE is the level of the
228 warning--a warn_strict_overflow_code value. This function will use
229 the smaller of CODE and the deferred code when deciding whether to
230 issue the warning. CODE may be zero to mean to always use the
234 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
239 gcc_assert (fold_deferring_overflow_warnings
> 0);
240 --fold_deferring_overflow_warnings
;
241 if (fold_deferring_overflow_warnings
> 0)
243 if (fold_deferred_overflow_warning
!= NULL
245 && code
< (int) fold_deferred_overflow_code
)
246 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
250 warnmsg
= fold_deferred_overflow_warning
;
251 fold_deferred_overflow_warning
= NULL
;
253 if (!issue
|| warnmsg
== NULL
)
256 if (gimple_no_warning_p (stmt
))
259 /* Use the smallest code level when deciding to issue the
261 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
262 code
= fold_deferred_overflow_code
;
264 if (!issue_strict_overflow_warning (code
))
268 locus
= input_location
;
270 locus
= gimple_location (stmt
);
271 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
274 /* Stop deferring overflow warnings, ignoring any deferred
278 fold_undefer_and_ignore_overflow_warnings (void)
280 fold_undefer_overflow_warnings (false, NULL
, 0);
283 /* Whether we are deferring overflow warnings. */
286 fold_deferring_overflow_warnings_p (void)
288 return fold_deferring_overflow_warnings
> 0;
291 /* This is called when we fold something based on the fact that signed
292 overflow is undefined. */
295 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
297 if (fold_deferring_overflow_warnings
> 0)
299 if (fold_deferred_overflow_warning
== NULL
300 || wc
< fold_deferred_overflow_code
)
302 fold_deferred_overflow_warning
= gmsgid
;
303 fold_deferred_overflow_code
= wc
;
306 else if (issue_strict_overflow_warning (wc
))
307 warning (OPT_Wstrict_overflow
, gmsgid
);
310 /* Return true if the built-in mathematical function specified by CODE
311 is odd, i.e. -f(x) == f(-x). */
314 negate_mathfn_p (combined_fn fn
)
347 return !flag_rounding_math
;
355 /* Check whether we may negate an integer constant T without causing
359 may_negate_without_overflow_p (const_tree t
)
363 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
365 type
= TREE_TYPE (t
);
366 if (TYPE_UNSIGNED (type
))
369 return !wi::only_sign_bit_p (t
);
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
376 negate_expr_p (tree t
)
383 type
= TREE_TYPE (t
);
386 switch (TREE_CODE (t
))
389 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t
);
395 return (INTEGRAL_TYPE_P (type
)
396 && TYPE_OVERFLOW_WRAPS (type
));
402 return !TYPE_OVERFLOW_SANITIZED (type
);
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
410 return negate_expr_p (TREE_REALPART (t
))
411 && negate_expr_p (TREE_IMAGPART (t
));
415 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
418 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
420 for (i
= 0; i
< count
; i
++)
421 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
428 return negate_expr_p (TREE_OPERAND (t
, 0))
429 && negate_expr_p (TREE_OPERAND (t
, 1));
432 return negate_expr_p (TREE_OPERAND (t
, 0));
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
436 || HONOR_SIGNED_ZEROS (element_mode (type
))
437 || (INTEGRAL_TYPE_P (type
)
438 && ! TYPE_OVERFLOW_WRAPS (type
)))
440 /* -(A + B) -> (-B) - A. */
441 if (negate_expr_p (TREE_OPERAND (t
, 1))
442 && reorder_operands_p (TREE_OPERAND (t
, 0),
443 TREE_OPERAND (t
, 1)))
445 /* -(A + B) -> (-A) - B. */
446 return negate_expr_p (TREE_OPERAND (t
, 0));
449 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
450 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
451 && !HONOR_SIGNED_ZEROS (element_mode (type
))
452 && (! INTEGRAL_TYPE_P (type
)
453 || TYPE_OVERFLOW_WRAPS (type
))
454 && reorder_operands_p (TREE_OPERAND (t
, 0),
455 TREE_OPERAND (t
, 1));
458 if (TYPE_UNSIGNED (type
))
460 /* INT_MIN/n * n doesn't overflow while negating one operand it does
461 if n is a power of two. */
462 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
463 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
464 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
465 && ! integer_pow2p (TREE_OPERAND (t
, 0)))
466 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
467 && ! integer_pow2p (TREE_OPERAND (t
, 1)))))
473 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
474 return negate_expr_p (TREE_OPERAND (t
, 1))
475 || negate_expr_p (TREE_OPERAND (t
, 0));
481 if (TYPE_UNSIGNED (type
))
483 if (negate_expr_p (TREE_OPERAND (t
, 0)))
485 /* In general we can't negate B in A / B, because if A is INT_MIN and
486 B is 1, we may turn this into INT_MIN / -1 which is undefined
487 and actually traps on some architectures. */
488 if (! INTEGRAL_TYPE_P (TREE_TYPE (t
))
489 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
490 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
491 && ! integer_onep (TREE_OPERAND (t
, 1))))
492 return negate_expr_p (TREE_OPERAND (t
, 1));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type
) == REAL_TYPE
)
499 tree tem
= strip_float_extensions (t
);
501 return negate_expr_p (tem
);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (get_call_combined_fn (t
)))
508 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
513 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
515 tree op1
= TREE_OPERAND (t
, 1);
516 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
527 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
528 simplification is possible.
529 If negate_expr_p would return true for T, NULL_TREE will never be
533 fold_negate_expr (location_t loc
, tree t
)
535 tree type
= TREE_TYPE (t
);
538 switch (TREE_CODE (t
))
540 /* Convert - (~A) to A + 1. */
542 if (INTEGRAL_TYPE_P (type
))
543 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
544 build_one_cst (type
));
548 tem
= fold_negate_const (t
, type
);
549 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
550 || (ANY_INTEGRAL_TYPE_P (type
)
551 && !TYPE_OVERFLOW_TRAPS (type
)
552 && TYPE_OVERFLOW_WRAPS (type
))
553 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
558 tem
= fold_negate_const (t
, type
);
562 tem
= fold_negate_const (t
, type
);
567 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
568 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
570 return build_complex (type
, rpart
, ipart
);
576 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
577 tree
*elts
= XALLOCAVEC (tree
, count
);
579 for (i
= 0; i
< count
; i
++)
581 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
582 if (elts
[i
] == NULL_TREE
)
586 return build_vector (type
, elts
);
590 if (negate_expr_p (t
))
591 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
592 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
593 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
597 if (negate_expr_p (t
))
598 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
599 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
603 if (!TYPE_OVERFLOW_SANITIZED (type
))
604 return TREE_OPERAND (t
, 0);
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
609 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t
, 1))
613 && reorder_operands_p (TREE_OPERAND (t
, 0),
614 TREE_OPERAND (t
, 1)))
616 tem
= negate_expr (TREE_OPERAND (t
, 1));
617 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
618 tem
, TREE_OPERAND (t
, 0));
621 /* -(A + B) -> (-A) - B. */
622 if (negate_expr_p (TREE_OPERAND (t
, 0)))
624 tem
= negate_expr (TREE_OPERAND (t
, 0));
625 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
626 tem
, TREE_OPERAND (t
, 1));
632 /* - (A - B) -> B - A */
633 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
634 && !HONOR_SIGNED_ZEROS (element_mode (type
))
635 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
636 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
637 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
641 if (TYPE_UNSIGNED (type
))
647 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
649 tem
= TREE_OPERAND (t
, 1);
650 if (negate_expr_p (tem
))
651 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
652 TREE_OPERAND (t
, 0), negate_expr (tem
));
653 tem
= TREE_OPERAND (t
, 0);
654 if (negate_expr_p (tem
))
655 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
656 negate_expr (tem
), TREE_OPERAND (t
, 1));
663 if (TYPE_UNSIGNED (type
))
665 if (negate_expr_p (TREE_OPERAND (t
, 0)))
666 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
667 negate_expr (TREE_OPERAND (t
, 0)),
668 TREE_OPERAND (t
, 1));
669 /* In general we can't negate B in A / B, because if A is INT_MIN and
670 B is 1, we may turn this into INT_MIN / -1 which is undefined
671 and actually traps on some architectures. */
672 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t
))
673 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
674 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
675 && ! integer_onep (TREE_OPERAND (t
, 1))))
676 && negate_expr_p (TREE_OPERAND (t
, 1)))
677 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
679 negate_expr (TREE_OPERAND (t
, 1)));
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type
) == REAL_TYPE
)
686 tem
= strip_float_extensions (t
);
687 if (tem
!= t
&& negate_expr_p (tem
))
688 return fold_convert_loc (loc
, type
, negate_expr (tem
));
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (get_call_combined_fn (t
))
695 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
699 fndecl
= get_callee_fndecl (t
);
700 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
701 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
707 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
709 tree op1
= TREE_OPERAND (t
, 1);
710 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
712 tree ntype
= TYPE_UNSIGNED (type
)
713 ? signed_type_for (type
)
714 : unsigned_type_for (type
);
715 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
716 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
717 return fold_convert_loc (loc
, type
, temp
);
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 loc
= EXPR_LOCATION (t
);
743 type
= TREE_TYPE (t
);
746 tem
= fold_negate_expr (loc
, t
);
748 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
749 return fold_convert_loc (loc
, type
, tem
);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead. If a variable part is of pointer
766 type, it is negated after converting to TYPE. This prevents us from
767 generating illegal MINUS pointer expression. LOC is the location of
768 the converted variable part.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
776 split_tree (location_t loc
, tree in
, tree type
, enum tree_code code
,
777 tree
*conp
, tree
*litp
, tree
*minus_litp
, int negate_p
)
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in
);
788 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
789 || TREE_CODE (in
) == FIXED_CST
)
791 else if (TREE_CODE (in
) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
799 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
801 tree op0
= TREE_OPERAND (in
, 0);
802 tree op1
= TREE_OPERAND (in
, 1);
803 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
804 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
808 || TREE_CODE (op0
) == FIXED_CST
)
809 *litp
= op0
, op0
= 0;
810 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
811 || TREE_CODE (op1
) == FIXED_CST
)
812 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
814 if (op0
!= 0 && TREE_CONSTANT (op0
))
815 *conp
= op0
, op0
= 0;
816 else if (op1
!= 0 && TREE_CONSTANT (op1
))
817 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0
!= 0 && op1
!= 0)
826 var
= op1
, neg_var_p
= neg1_p
;
828 /* Now do any needed negations. */
830 *minus_litp
= *litp
, *litp
= 0;
832 *conp
= negate_expr (*conp
);
833 if (neg_var_p
&& var
)
835 /* Convert to TYPE before negating. */
836 var
= fold_convert_loc (loc
, type
, var
);
837 var
= negate_expr (var
);
840 else if (TREE_CODE (in
) == BIT_NOT_EXPR
841 && code
== PLUS_EXPR
)
843 /* -X - 1 is folded to ~X, undo that here. */
844 *minus_litp
= build_one_cst (TREE_TYPE (in
));
845 var
= negate_expr (TREE_OPERAND (in
, 0));
847 else if (TREE_CONSTANT (in
))
855 *minus_litp
= *litp
, *litp
= 0;
856 else if (*minus_litp
)
857 *litp
= *minus_litp
, *minus_litp
= 0;
858 *conp
= negate_expr (*conp
);
861 /* Convert to TYPE before negating. */
862 var
= fold_convert_loc (loc
, type
, var
);
863 var
= negate_expr (var
);
870 /* Re-associate trees split by the above function. T1 and T2 are
871 either expressions to associate or null. Return the new
872 expression, if any. LOC is the location of the new expression. If
873 we build an operation, do it in TYPE and with CODE. */
876 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
883 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
884 try to fold this since we will have infinite recursion. But do
885 deal with any NEGATE_EXPRs. */
886 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
887 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
889 if (code
== PLUS_EXPR
)
891 if (TREE_CODE (t1
) == NEGATE_EXPR
)
892 return build2_loc (loc
, MINUS_EXPR
, type
,
893 fold_convert_loc (loc
, type
, t2
),
894 fold_convert_loc (loc
, type
,
895 TREE_OPERAND (t1
, 0)));
896 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
897 return build2_loc (loc
, MINUS_EXPR
, type
,
898 fold_convert_loc (loc
, type
, t1
),
899 fold_convert_loc (loc
, type
,
900 TREE_OPERAND (t2
, 0)));
901 else if (integer_zerop (t2
))
902 return fold_convert_loc (loc
, type
, t1
);
904 else if (code
== MINUS_EXPR
)
906 if (integer_zerop (t2
))
907 return fold_convert_loc (loc
, type
, t1
);
910 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
911 fold_convert_loc (loc
, type
, t2
));
914 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
915 fold_convert_loc (loc
, type
, t2
));
918 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
919 for use in int_const_binop, size_binop and size_diffop. */
922 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
924 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
926 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
941 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
942 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
943 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
947 /* Combine two integer constants ARG1 and ARG2 under operation CODE
948 to produce a new constant. Return NULL_TREE if we don't know how
949 to evaluate CODE at compile-time. */
952 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
957 tree type
= TREE_TYPE (arg1
);
958 signop sign
= TYPE_SIGN (type
);
959 bool overflow
= false;
961 wide_int arg2
= wi::to_wide (parg2
, TYPE_PRECISION (type
));
966 res
= wi::bit_or (arg1
, arg2
);
970 res
= wi::bit_xor (arg1
, arg2
);
974 res
= wi::bit_and (arg1
, arg2
);
979 if (wi::neg_p (arg2
))
982 if (code
== RSHIFT_EXPR
)
988 if (code
== RSHIFT_EXPR
)
989 /* It's unclear from the C standard whether shifts can overflow.
990 The following code ignores overflow; perhaps a C standard
991 interpretation ruling is needed. */
992 res
= wi::rshift (arg1
, arg2
, sign
);
994 res
= wi::lshift (arg1
, arg2
);
999 if (wi::neg_p (arg2
))
1002 if (code
== RROTATE_EXPR
)
1003 code
= LROTATE_EXPR
;
1005 code
= RROTATE_EXPR
;
1008 if (code
== RROTATE_EXPR
)
1009 res
= wi::rrotate (arg1
, arg2
);
1011 res
= wi::lrotate (arg1
, arg2
);
1015 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1019 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1023 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1026 case MULT_HIGHPART_EXPR
:
1027 res
= wi::mul_high (arg1
, arg2
, sign
);
1030 case TRUNC_DIV_EXPR
:
1031 case EXACT_DIV_EXPR
:
1034 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1037 case FLOOR_DIV_EXPR
:
1040 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1046 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1049 case ROUND_DIV_EXPR
:
1052 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1055 case TRUNC_MOD_EXPR
:
1058 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1061 case FLOOR_MOD_EXPR
:
1064 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1070 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1073 case ROUND_MOD_EXPR
:
1076 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1080 res
= wi::min (arg1
, arg2
, sign
);
1084 res
= wi::max (arg1
, arg2
, sign
);
1091 t
= force_fit_type (type
, res
, overflowable
,
1092 (((sign
== SIGNED
|| overflowable
== -1)
1094 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1100 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1102 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1105 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1106 constant. We assume ARG1 and ARG2 have the same data type, or at least
1107 are the same kind of constant and the same machine mode. Return zero if
1108 combining the constants is not allowed in the current operating mode. */
1111 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1113 /* Sanity check for the recursive cases. */
1120 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1122 if (code
== POINTER_PLUS_EXPR
)
1123 return int_const_binop (PLUS_EXPR
,
1124 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1126 return int_const_binop (code
, arg1
, arg2
);
1129 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1134 REAL_VALUE_TYPE value
;
1135 REAL_VALUE_TYPE result
;
1139 /* The following codes are handled by real_arithmetic. */
1154 d1
= TREE_REAL_CST (arg1
);
1155 d2
= TREE_REAL_CST (arg2
);
1157 type
= TREE_TYPE (arg1
);
1158 mode
= TYPE_MODE (type
);
1160 /* Don't perform operation if we honor signaling NaNs and
1161 either operand is a signaling NaN. */
1162 if (HONOR_SNANS (mode
)
1163 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1164 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1167 /* Don't perform operation if it would raise a division
1168 by zero exception. */
1169 if (code
== RDIV_EXPR
1170 && real_equal (&d2
, &dconst0
)
1171 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1174 /* If either operand is a NaN, just return it. Otherwise, set up
1175 for floating-point trap; we return an overflow. */
1176 if (REAL_VALUE_ISNAN (d1
))
1178 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1181 t
= build_real (type
, d1
);
1184 else if (REAL_VALUE_ISNAN (d2
))
1186 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1189 t
= build_real (type
, d2
);
1193 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1194 real_convert (&result
, mode
, &value
);
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode
)
1200 && REAL_VALUE_ISINF (result
)
1201 && !REAL_VALUE_ISINF (d1
)
1202 && !REAL_VALUE_ISINF (d2
))
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1211 && (inexact
|| !real_identical (&result
, &value
)))
1214 t
= build_real (type
, result
);
1216 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1220 if (TREE_CODE (arg1
) == FIXED_CST
)
1222 FIXED_VALUE_TYPE f1
;
1223 FIXED_VALUE_TYPE f2
;
1224 FIXED_VALUE_TYPE result
;
1229 /* The following codes are handled by fixed_arithmetic. */
1235 case TRUNC_DIV_EXPR
:
1236 if (TREE_CODE (arg2
) != FIXED_CST
)
1238 f2
= TREE_FIXED_CST (arg2
);
1244 if (TREE_CODE (arg2
) != INTEGER_CST
)
1247 f2
.data
.high
= w2
.elt (1);
1248 f2
.data
.low
= w2
.elt (0);
1257 f1
= TREE_FIXED_CST (arg1
);
1258 type
= TREE_TYPE (arg1
);
1259 sat_p
= TYPE_SATURATING (type
);
1260 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1261 t
= build_fixed (type
, result
);
1262 /* Propagate overflow flags. */
1263 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1264 TREE_OVERFLOW (t
) = 1;
1268 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1270 tree type
= TREE_TYPE (arg1
);
1271 tree r1
= TREE_REALPART (arg1
);
1272 tree i1
= TREE_IMAGPART (arg1
);
1273 tree r2
= TREE_REALPART (arg2
);
1274 tree i2
= TREE_IMAGPART (arg2
);
1281 real
= const_binop (code
, r1
, r2
);
1282 imag
= const_binop (code
, i1
, i2
);
1286 if (COMPLEX_FLOAT_TYPE_P (type
))
1287 return do_mpc_arg2 (arg1
, arg2
, type
,
1288 /* do_nonfinite= */ folding_initializer
,
1291 real
= const_binop (MINUS_EXPR
,
1292 const_binop (MULT_EXPR
, r1
, r2
),
1293 const_binop (MULT_EXPR
, i1
, i2
));
1294 imag
= const_binop (PLUS_EXPR
,
1295 const_binop (MULT_EXPR
, r1
, i2
),
1296 const_binop (MULT_EXPR
, i1
, r2
));
1300 if (COMPLEX_FLOAT_TYPE_P (type
))
1301 return do_mpc_arg2 (arg1
, arg2
, type
,
1302 /* do_nonfinite= */ folding_initializer
,
1305 case TRUNC_DIV_EXPR
:
1307 case FLOOR_DIV_EXPR
:
1308 case ROUND_DIV_EXPR
:
1309 if (flag_complex_method
== 0)
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_straight().
1314 Expand complex division to scalars, straightforward algorithm.
1315 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1319 = const_binop (PLUS_EXPR
,
1320 const_binop (MULT_EXPR
, r2
, r2
),
1321 const_binop (MULT_EXPR
, i2
, i2
));
1323 = const_binop (PLUS_EXPR
,
1324 const_binop (MULT_EXPR
, r1
, r2
),
1325 const_binop (MULT_EXPR
, i1
, i2
));
1327 = const_binop (MINUS_EXPR
,
1328 const_binop (MULT_EXPR
, i1
, r2
),
1329 const_binop (MULT_EXPR
, r1
, i2
));
1331 real
= const_binop (code
, t1
, magsquared
);
1332 imag
= const_binop (code
, t2
, magsquared
);
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_wide().
1339 Expand complex division to scalars, modified algorithm to minimize
1340 overflow with wide input ranges. */
1341 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1342 fold_abs_const (r2
, TREE_TYPE (type
)),
1343 fold_abs_const (i2
, TREE_TYPE (type
)));
1345 if (integer_nonzerop (compare
))
1347 /* In the TRUE branch, we compute
1349 div = (br * ratio) + bi;
1350 tr = (ar * ratio) + ai;
1351 ti = (ai * ratio) - ar;
1354 tree ratio
= const_binop (code
, r2
, i2
);
1355 tree div
= const_binop (PLUS_EXPR
, i2
,
1356 const_binop (MULT_EXPR
, r2
, ratio
));
1357 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1358 real
= const_binop (PLUS_EXPR
, real
, i1
);
1359 real
= const_binop (code
, real
, div
);
1361 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1362 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1363 imag
= const_binop (code
, imag
, div
);
1367 /* In the FALSE branch, we compute
1369 divisor = (d * ratio) + c;
1370 tr = (b * ratio) + a;
1371 ti = b - (a * ratio);
1374 tree ratio
= const_binop (code
, i2
, r2
);
1375 tree div
= const_binop (PLUS_EXPR
, r2
,
1376 const_binop (MULT_EXPR
, i2
, ratio
));
1378 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1379 real
= const_binop (PLUS_EXPR
, real
, r1
);
1380 real
= const_binop (code
, real
, div
);
1382 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1383 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1384 imag
= const_binop (code
, imag
, div
);
1394 return build_complex (type
, real
, imag
);
1397 if (TREE_CODE (arg1
) == VECTOR_CST
1398 && TREE_CODE (arg2
) == VECTOR_CST
)
1400 tree type
= TREE_TYPE (arg1
);
1401 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1402 tree
*elts
= XALLOCAVEC (tree
, count
);
1404 for (i
= 0; i
< count
; i
++)
1406 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1407 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1409 elts
[i
] = const_binop (code
, elem1
, elem2
);
1411 /* It is possible that const_binop cannot handle the given
1412 code and return NULL_TREE */
1413 if (elts
[i
] == NULL_TREE
)
1417 return build_vector (type
, elts
);
1420 /* Shifts allow a scalar offset for a vector. */
1421 if (TREE_CODE (arg1
) == VECTOR_CST
1422 && TREE_CODE (arg2
) == INTEGER_CST
)
1424 tree type
= TREE_TYPE (arg1
);
1425 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1426 tree
*elts
= XALLOCAVEC (tree
, count
);
1428 for (i
= 0; i
< count
; i
++)
1430 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1432 elts
[i
] = const_binop (code
, elem1
, arg2
);
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE. */
1436 if (elts
[i
] == NULL_TREE
)
1440 return build_vector (type
, elts
);
1445 /* Overload that adds a TYPE parameter to be able to dispatch
1446 to fold_relational_const. */
1449 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1451 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1452 return fold_relational_const (code
, type
, arg1
, arg2
);
1454 /* ??? Until we make the const_binop worker take the type of the
1455 result as argument put those cases that need it here. */
1459 if ((TREE_CODE (arg1
) == REAL_CST
1460 && TREE_CODE (arg2
) == REAL_CST
)
1461 || (TREE_CODE (arg1
) == INTEGER_CST
1462 && TREE_CODE (arg2
) == INTEGER_CST
))
1463 return build_complex (type
, arg1
, arg2
);
1466 case VEC_PACK_TRUNC_EXPR
:
1467 case VEC_PACK_FIX_TRUNC_EXPR
:
1469 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1472 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1473 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1474 if (TREE_CODE (arg1
) != VECTOR_CST
1475 || TREE_CODE (arg2
) != VECTOR_CST
)
1478 elts
= XALLOCAVEC (tree
, nelts
);
1479 if (!vec_cst_ctor_to_array (arg1
, elts
)
1480 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1483 for (i
= 0; i
< nelts
; i
++)
1485 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1486 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1487 TREE_TYPE (type
), elts
[i
]);
1488 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1492 return build_vector (type
, elts
);
1495 case VEC_WIDEN_MULT_LO_EXPR
:
1496 case VEC_WIDEN_MULT_HI_EXPR
:
1497 case VEC_WIDEN_MULT_EVEN_EXPR
:
1498 case VEC_WIDEN_MULT_ODD_EXPR
:
1500 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1501 unsigned int out
, ofs
, scale
;
1504 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1506 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1509 elts
= XALLOCAVEC (tree
, nelts
* 4);
1510 if (!vec_cst_ctor_to_array (arg1
, elts
)
1511 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1514 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1515 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1516 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1517 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1518 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1520 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1523 for (out
= 0; out
< nelts
; out
++)
1525 unsigned int in1
= (out
<< scale
) + ofs
;
1526 unsigned int in2
= in1
+ nelts
* 2;
1529 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1530 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1532 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1534 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1535 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1539 return build_vector (type
, elts
);
1545 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type
)
1550 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1552 return const_binop (code
, arg1
, arg2
);
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1559 const_unop (enum tree_code code
, tree type
, tree arg0
)
1561 /* Don't perform the operation, other than NEGATE and ABS, if
1562 flag_signaling_nans is on and the operand is a signaling NaN. */
1563 if (TREE_CODE (arg0
) == REAL_CST
1564 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1565 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1566 && code
!= NEGATE_EXPR
1567 && code
!= ABS_EXPR
)
1574 case FIX_TRUNC_EXPR
:
1575 case FIXED_CONVERT_EXPR
:
1576 return fold_convert_const (code
, type
, arg0
);
1578 case ADDR_SPACE_CONVERT_EXPR
:
1579 /* If the source address is 0, and the source address space
1580 cannot have a valid object at 0, fold to dest type null. */
1581 if (integer_zerop (arg0
)
1582 && !(targetm
.addr_space
.zero_address_valid
1583 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1584 return fold_convert_const (code
, type
, arg0
);
1587 case VIEW_CONVERT_EXPR
:
1588 return fold_view_convert_expr (type
, arg0
);
1592 /* Can't call fold_negate_const directly here as that doesn't
1593 handle all cases and we might not be able to negate some
1595 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1596 if (tem
&& CONSTANT_CLASS_P (tem
))
1602 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1603 return fold_abs_const (arg0
, type
);
1607 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1609 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1611 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1616 if (TREE_CODE (arg0
) == INTEGER_CST
)
1617 return fold_not_const (arg0
, type
);
1618 /* Perform BIT_NOT_EXPR on each element individually. */
1619 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1623 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1625 elements
= XALLOCAVEC (tree
, count
);
1626 for (i
= 0; i
< count
; i
++)
1628 elem
= VECTOR_CST_ELT (arg0
, i
);
1629 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1630 if (elem
== NULL_TREE
)
1635 return build_vector (type
, elements
);
1639 case TRUTH_NOT_EXPR
:
1640 if (TREE_CODE (arg0
) == INTEGER_CST
)
1641 return constant_boolean_node (integer_zerop (arg0
), type
);
1645 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1646 return fold_convert (type
, TREE_REALPART (arg0
));
1650 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1651 return fold_convert (type
, TREE_IMAGPART (arg0
));
1654 case VEC_UNPACK_LO_EXPR
:
1655 case VEC_UNPACK_HI_EXPR
:
1656 case VEC_UNPACK_FLOAT_LO_EXPR
:
1657 case VEC_UNPACK_FLOAT_HI_EXPR
:
1659 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1661 enum tree_code subcode
;
1663 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1664 if (TREE_CODE (arg0
) != VECTOR_CST
)
1667 elts
= XALLOCAVEC (tree
, nelts
* 2);
1668 if (!vec_cst_ctor_to_array (arg0
, elts
))
1671 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1672 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1675 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1678 subcode
= FLOAT_EXPR
;
1680 for (i
= 0; i
< nelts
; i
++)
1682 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1683 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1687 return build_vector (type
, elts
);
1690 case REDUC_MIN_EXPR
:
1691 case REDUC_MAX_EXPR
:
1692 case REDUC_PLUS_EXPR
:
1694 unsigned int nelts
, i
;
1696 enum tree_code subcode
;
1698 if (TREE_CODE (arg0
) != VECTOR_CST
)
1700 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1702 elts
= XALLOCAVEC (tree
, nelts
);
1703 if (!vec_cst_ctor_to_array (arg0
, elts
))
1708 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1709 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1710 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1711 default: gcc_unreachable ();
1714 for (i
= 1; i
< nelts
; i
++)
1716 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1717 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1731 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1732 indicates which particular sizetype to create. */
1735 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1737 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1740 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1741 is a tree code. The type of the result is taken from the operands.
1742 Both must be equivalent integer types, ala int_binop_types_match_p.
1743 If the operands are constant, so is the result. */
1746 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1748 tree type
= TREE_TYPE (arg0
);
1750 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1751 return error_mark_node
;
1753 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1756 /* Handle the special case of two integer constants faster. */
1757 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1759 /* And some specific cases even faster than that. */
1760 if (code
== PLUS_EXPR
)
1762 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1764 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1767 else if (code
== MINUS_EXPR
)
1769 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1772 else if (code
== MULT_EXPR
)
1774 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1778 /* Handle general case of two integer constants. For sizetype
1779 constant calculations we always want to know about overflow,
1780 even in the unsigned case. */
1781 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1784 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1787 /* Given two values, either both of sizetype or both of bitsizetype,
1788 compute the difference between the two values. Return the value
1789 in signed type corresponding to the type of the operands. */
1792 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1794 tree type
= TREE_TYPE (arg0
);
1797 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1800 /* If the type is already signed, just do the simple thing. */
1801 if (!TYPE_UNSIGNED (type
))
1802 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1804 if (type
== sizetype
)
1806 else if (type
== bitsizetype
)
1807 ctype
= sbitsizetype
;
1809 ctype
= signed_type_for (type
);
1811 /* If either operand is not a constant, do the conversions to the signed
1812 type and subtract. The hardware will do the right thing with any
1813 overflow in the subtraction. */
1814 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1815 return size_binop_loc (loc
, MINUS_EXPR
,
1816 fold_convert_loc (loc
, ctype
, arg0
),
1817 fold_convert_loc (loc
, ctype
, arg1
));
1819 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1820 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1821 overflow) and negate (which can't either). Special-case a result
1822 of zero while we're here. */
1823 if (tree_int_cst_equal (arg0
, arg1
))
1824 return build_int_cst (ctype
, 0);
1825 else if (tree_int_cst_lt (arg1
, arg0
))
1826 return fold_convert_loc (loc
, ctype
,
1827 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1829 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1830 fold_convert_loc (loc
, ctype
,
1831 size_binop_loc (loc
,
1836 /* A subroutine of fold_convert_const handling conversions of an
1837 INTEGER_CST to another integer type. */
1840 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1842 /* Given an integer constant, make new constant with new type,
1843 appropriately sign-extended or truncated. Use widest_int
1844 so that any extension is done according ARG1's type. */
1845 return force_fit_type (type
, wi::to_widest (arg1
),
1846 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1847 TREE_OVERFLOW (arg1
));
1850 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1851 to an integer type. */
1854 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1856 bool overflow
= false;
1859 /* The following code implements the floating point to integer
1860 conversion rules required by the Java Language Specification,
1861 that IEEE NaNs are mapped to zero and values that overflow
1862 the target precision saturate, i.e. values greater than
1863 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1864 are mapped to INT_MIN. These semantics are allowed by the
1865 C and C++ standards that simply state that the behavior of
1866 FP-to-integer conversion is unspecified upon overflow. */
1870 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1874 case FIX_TRUNC_EXPR
:
1875 real_trunc (&r
, VOIDmode
, &x
);
1882 /* If R is NaN, return zero and show we have an overflow. */
1883 if (REAL_VALUE_ISNAN (r
))
1886 val
= wi::zero (TYPE_PRECISION (type
));
1889 /* See if R is less than the lower bound or greater than the
1894 tree lt
= TYPE_MIN_VALUE (type
);
1895 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1896 if (real_less (&r
, &l
))
1905 tree ut
= TYPE_MAX_VALUE (type
);
1908 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1909 if (real_less (&u
, &r
))
1918 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1920 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1924 /* A subroutine of fold_convert_const handling conversions of a
1925 FIXED_CST to an integer type. */
1928 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1931 double_int temp
, temp_trunc
;
1934 /* Right shift FIXED_CST to temp by fbit. */
1935 temp
= TREE_FIXED_CST (arg1
).data
;
1936 mode
= TREE_FIXED_CST (arg1
).mode
;
1937 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1939 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1940 HOST_BITS_PER_DOUBLE_INT
,
1941 SIGNED_FIXED_POINT_MODE_P (mode
));
1943 /* Left shift temp to temp_trunc by fbit. */
1944 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1945 HOST_BITS_PER_DOUBLE_INT
,
1946 SIGNED_FIXED_POINT_MODE_P (mode
));
1950 temp
= double_int_zero
;
1951 temp_trunc
= double_int_zero
;
1954 /* If FIXED_CST is negative, we need to round the value toward 0.
1955 By checking if the fractional bits are not zero to add 1 to temp. */
1956 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1957 && temp_trunc
.is_negative ()
1958 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1959 temp
+= double_int_one
;
1961 /* Given a fixed-point constant, make new constant with new type,
1962 appropriately sign-extended or truncated. */
1963 t
= force_fit_type (type
, temp
, -1,
1964 (temp
.is_negative ()
1965 && (TYPE_UNSIGNED (type
)
1966 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1967 | TREE_OVERFLOW (arg1
));
1972 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1973 to another floating point type. */
1976 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1978 REAL_VALUE_TYPE value
;
1981 /* Don't perform the operation if flag_signaling_nans is on
1982 and the operand is a signaling NaN. */
1983 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
1984 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
1987 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1988 t
= build_real (type
, value
);
1990 /* If converting an infinity or NAN to a representation that doesn't
1991 have one, set the overflow bit so that we can produce some kind of
1992 error message at the appropriate point if necessary. It's not the
1993 most user-friendly message, but it's better than nothing. */
1994 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1995 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1996 TREE_OVERFLOW (t
) = 1;
1997 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1998 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1999 TREE_OVERFLOW (t
) = 1;
2000 /* Regular overflow, conversion produced an infinity in a mode that
2001 can't represent them. */
2002 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2003 && REAL_VALUE_ISINF (value
)
2004 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2005 TREE_OVERFLOW (t
) = 1;
2007 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2011 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2012 to a floating point type. */
2015 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2017 REAL_VALUE_TYPE value
;
2020 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2021 t
= build_real (type
, value
);
2023 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2027 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2028 to another fixed-point type. */
2031 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2033 FIXED_VALUE_TYPE value
;
2037 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2038 TYPE_SATURATING (type
));
2039 t
= build_fixed (type
, value
);
2041 /* Propagate overflow flags. */
2042 if (overflow_p
| TREE_OVERFLOW (arg1
))
2043 TREE_OVERFLOW (t
) = 1;
2047 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2048 to a fixed-point type. */
2051 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2053 FIXED_VALUE_TYPE value
;
2058 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2060 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2061 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2062 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2064 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2066 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2067 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2068 TYPE_SATURATING (type
));
2069 t
= build_fixed (type
, value
);
2071 /* Propagate overflow flags. */
2072 if (overflow_p
| TREE_OVERFLOW (arg1
))
2073 TREE_OVERFLOW (t
) = 1;
2077 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2078 to a fixed-point type. */
2081 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2083 FIXED_VALUE_TYPE value
;
2087 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2088 &TREE_REAL_CST (arg1
),
2089 TYPE_SATURATING (type
));
2090 t
= build_fixed (type
, value
);
2092 /* Propagate overflow flags. */
2093 if (overflow_p
| TREE_OVERFLOW (arg1
))
2094 TREE_OVERFLOW (t
) = 1;
2098 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2099 type TYPE. If no simplification can be done return NULL_TREE. */
2102 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2104 if (TREE_TYPE (arg1
) == type
)
2107 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2108 || TREE_CODE (type
) == OFFSET_TYPE
)
2110 if (TREE_CODE (arg1
) == INTEGER_CST
)
2111 return fold_convert_const_int_from_int (type
, arg1
);
2112 else if (TREE_CODE (arg1
) == REAL_CST
)
2113 return fold_convert_const_int_from_real (code
, type
, arg1
);
2114 else if (TREE_CODE (arg1
) == FIXED_CST
)
2115 return fold_convert_const_int_from_fixed (type
, arg1
);
2117 else if (TREE_CODE (type
) == REAL_TYPE
)
2119 if (TREE_CODE (arg1
) == INTEGER_CST
)
2120 return build_real_from_int_cst (type
, arg1
);
2121 else if (TREE_CODE (arg1
) == REAL_CST
)
2122 return fold_convert_const_real_from_real (type
, arg1
);
2123 else if (TREE_CODE (arg1
) == FIXED_CST
)
2124 return fold_convert_const_real_from_fixed (type
, arg1
);
2126 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2128 if (TREE_CODE (arg1
) == FIXED_CST
)
2129 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2130 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2131 return fold_convert_const_fixed_from_int (type
, arg1
);
2132 else if (TREE_CODE (arg1
) == REAL_CST
)
2133 return fold_convert_const_fixed_from_real (type
, arg1
);
2135 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2137 if (TREE_CODE (arg1
) == VECTOR_CST
2138 && TYPE_VECTOR_SUBPARTS (type
) == VECTOR_CST_NELTS (arg1
))
2140 int len
= TYPE_VECTOR_SUBPARTS (type
);
2141 tree elttype
= TREE_TYPE (type
);
2142 tree
*v
= XALLOCAVEC (tree
, len
);
2143 for (int i
= 0; i
< len
; ++i
)
2145 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2146 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2147 if (cvt
== NULL_TREE
)
2151 return build_vector (type
, v
);
2157 /* Construct a vector of zero elements of vector type TYPE. */
2160 build_zero_vector (tree type
)
2164 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2165 return build_vector_from_val (type
, t
);
2168 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2171 fold_convertible_p (const_tree type
, const_tree arg
)
2173 tree orig
= TREE_TYPE (arg
);
2178 if (TREE_CODE (arg
) == ERROR_MARK
2179 || TREE_CODE (type
) == ERROR_MARK
2180 || TREE_CODE (orig
) == ERROR_MARK
)
2183 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2186 switch (TREE_CODE (type
))
2188 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2189 case POINTER_TYPE
: case REFERENCE_TYPE
:
2191 return (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2192 || TREE_CODE (orig
) == OFFSET_TYPE
);
2195 case FIXED_POINT_TYPE
:
2199 return TREE_CODE (type
) == TREE_CODE (orig
);
2206 /* Convert expression ARG to type TYPE. Used by the middle-end for
2207 simple conversions in preference to calling the front-end's convert. */
2210 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2212 tree orig
= TREE_TYPE (arg
);
2218 if (TREE_CODE (arg
) == ERROR_MARK
2219 || TREE_CODE (type
) == ERROR_MARK
2220 || TREE_CODE (orig
) == ERROR_MARK
)
2221 return error_mark_node
;
2223 switch (TREE_CODE (type
))
2226 case REFERENCE_TYPE
:
2227 /* Handle conversions between pointers to different address spaces. */
2228 if (POINTER_TYPE_P (orig
)
2229 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2230 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2231 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2234 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2236 if (TREE_CODE (arg
) == INTEGER_CST
)
2238 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2239 if (tem
!= NULL_TREE
)
2242 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2243 || TREE_CODE (orig
) == OFFSET_TYPE
)
2244 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2245 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2246 return fold_convert_loc (loc
, type
,
2247 fold_build1_loc (loc
, REALPART_EXPR
,
2248 TREE_TYPE (orig
), arg
));
2249 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2250 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2251 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2254 if (TREE_CODE (arg
) == INTEGER_CST
)
2256 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2257 if (tem
!= NULL_TREE
)
2260 else if (TREE_CODE (arg
) == REAL_CST
)
2262 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2263 if (tem
!= NULL_TREE
)
2266 else if (TREE_CODE (arg
) == FIXED_CST
)
2268 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2269 if (tem
!= NULL_TREE
)
2273 switch (TREE_CODE (orig
))
2276 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2277 case POINTER_TYPE
: case REFERENCE_TYPE
:
2278 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2281 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2283 case FIXED_POINT_TYPE
:
2284 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2287 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2288 return fold_convert_loc (loc
, type
, tem
);
2294 case FIXED_POINT_TYPE
:
2295 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2296 || TREE_CODE (arg
) == REAL_CST
)
2298 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2299 if (tem
!= NULL_TREE
)
2300 goto fold_convert_exit
;
2303 switch (TREE_CODE (orig
))
2305 case FIXED_POINT_TYPE
:
2310 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2313 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2314 return fold_convert_loc (loc
, type
, tem
);
2321 switch (TREE_CODE (orig
))
2324 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2325 case POINTER_TYPE
: case REFERENCE_TYPE
:
2327 case FIXED_POINT_TYPE
:
2328 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2329 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2330 fold_convert_loc (loc
, TREE_TYPE (type
),
2331 integer_zero_node
));
2336 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2338 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2339 TREE_OPERAND (arg
, 0));
2340 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2341 TREE_OPERAND (arg
, 1));
2342 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2345 arg
= save_expr (arg
);
2346 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2347 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2348 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2349 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2350 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2358 if (integer_zerop (arg
))
2359 return build_zero_vector (type
);
2360 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2361 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2362 || TREE_CODE (orig
) == VECTOR_TYPE
);
2363 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2366 tem
= fold_ignored_result (arg
);
2367 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2370 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2371 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2375 protected_set_expr_location_unshare (tem
, loc
);
2379 /* Return false if expr can be assumed not to be an lvalue, true
2383 maybe_lvalue_p (const_tree x
)
2385 /* We only need to wrap lvalue tree codes. */
2386 switch (TREE_CODE (x
))
2399 case ARRAY_RANGE_REF
:
2405 case PREINCREMENT_EXPR
:
2406 case PREDECREMENT_EXPR
:
2408 case TRY_CATCH_EXPR
:
2409 case WITH_CLEANUP_EXPR
:
2418 /* Assume the worst for front-end tree codes. */
2419 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2427 /* Return an expr equal to X but certainly not valid as an lvalue. */
2430 non_lvalue_loc (location_t loc
, tree x
)
2432 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2437 if (! maybe_lvalue_p (x
))
2439 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2442 /* When pedantic, return an expr equal to X but certainly not valid as a
2443 pedantic lvalue. Otherwise, return X. */
2446 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2448 return protected_set_expr_location_unshare (x
, loc
);
2451 /* Given a tree comparison code, return the code that is the logical inverse.
2452 It is generally not safe to do this for floating-point comparisons, except
2453 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2454 ERROR_MARK in this case. */
2457 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2459 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2460 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2470 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2472 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2474 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2476 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2490 return UNORDERED_EXPR
;
2491 case UNORDERED_EXPR
:
2492 return ORDERED_EXPR
;
2498 /* Similar, but return the comparison that results if the operands are
2499 swapped. This is safe for floating-point. */
2502 swap_tree_comparison (enum tree_code code
)
2509 case UNORDERED_EXPR
:
2535 /* Convert a comparison tree code from an enum tree_code representation
2536 into a compcode bit-based encoding. This function is the inverse of
2537 compcode_to_comparison. */
2539 static enum comparison_code
2540 comparison_to_compcode (enum tree_code code
)
2557 return COMPCODE_ORD
;
2558 case UNORDERED_EXPR
:
2559 return COMPCODE_UNORD
;
2561 return COMPCODE_UNLT
;
2563 return COMPCODE_UNEQ
;
2565 return COMPCODE_UNLE
;
2567 return COMPCODE_UNGT
;
2569 return COMPCODE_LTGT
;
2571 return COMPCODE_UNGE
;
2577 /* Convert a compcode bit-based encoding of a comparison operator back
2578 to GCC's enum tree_code representation. This function is the
2579 inverse of comparison_to_compcode. */
2581 static enum tree_code
2582 compcode_to_comparison (enum comparison_code code
)
2599 return ORDERED_EXPR
;
2600 case COMPCODE_UNORD
:
2601 return UNORDERED_EXPR
;
2619 /* Return a tree for the comparison which is the combination of
2620 doing the AND or OR (depending on CODE) of the two operations LCODE
2621 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2622 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2623 if this makes the transformation invalid. */
2626 combine_comparisons (location_t loc
,
2627 enum tree_code code
, enum tree_code lcode
,
2628 enum tree_code rcode
, tree truth_type
,
2629 tree ll_arg
, tree lr_arg
)
2631 bool honor_nans
= HONOR_NANS (ll_arg
);
2632 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2633 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2638 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2639 compcode
= lcompcode
& rcompcode
;
2642 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2643 compcode
= lcompcode
| rcompcode
;
2652 /* Eliminate unordered comparisons, as well as LTGT and ORD
2653 which are not used unless the mode has NaNs. */
2654 compcode
&= ~COMPCODE_UNORD
;
2655 if (compcode
== COMPCODE_LTGT
)
2656 compcode
= COMPCODE_NE
;
2657 else if (compcode
== COMPCODE_ORD
)
2658 compcode
= COMPCODE_TRUE
;
2660 else if (flag_trapping_math
)
2662 /* Check that the original operation and the optimized ones will trap
2663 under the same condition. */
2664 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2665 && (lcompcode
!= COMPCODE_EQ
)
2666 && (lcompcode
!= COMPCODE_ORD
);
2667 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2668 && (rcompcode
!= COMPCODE_EQ
)
2669 && (rcompcode
!= COMPCODE_ORD
);
2670 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2671 && (compcode
!= COMPCODE_EQ
)
2672 && (compcode
!= COMPCODE_ORD
);
2674 /* In a short-circuited boolean expression the LHS might be
2675 such that the RHS, if evaluated, will never trap. For
2676 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2677 if neither x nor y is NaN. (This is a mixed blessing: for
2678 example, the expression above will never trap, hence
2679 optimizing it to x < y would be invalid). */
2680 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2681 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2684 /* If the comparison was short-circuited, and only the RHS
2685 trapped, we may now generate a spurious trap. */
2687 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2690 /* If we changed the conditions that cause a trap, we lose. */
2691 if ((ltrap
|| rtrap
) != trap
)
2695 if (compcode
== COMPCODE_TRUE
)
2696 return constant_boolean_node (true, truth_type
);
2697 else if (compcode
== COMPCODE_FALSE
)
2698 return constant_boolean_node (false, truth_type
);
2701 enum tree_code tcode
;
2703 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2704 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2708 /* Return nonzero if two operands (typically of the same tree node)
2709 are necessarily equal. FLAGS modifies behavior as follows:
2711 If OEP_ONLY_CONST is set, only return nonzero for constants.
2712 This function tests whether the operands are indistinguishable;
2713 it does not test whether they are equal using C's == operation.
2714 The distinction is important for IEEE floating point, because
2715 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2716 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2718 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2719 even though it may hold multiple values during a function.
2720 This is because a GCC tree node guarantees that nothing else is
2721 executed between the evaluation of its "operands" (which may often
2722 be evaluated in arbitrary order). Hence if the operands themselves
2723 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2724 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2725 unset means assuming isochronic (or instantaneous) tree equivalence.
2726 Unless comparing arbitrary expression trees, such as from different
2727 statements, this flag can usually be left unset.
2729 If OEP_PURE_SAME is set, then pure functions with identical arguments
2730 are considered the same. It is used when the caller has other ways
2731 to ensure that global memory is unchanged in between.
2733 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2734 not values of expressions.
2736 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2737 any operand with side effect. This is unnecesarily conservative in the
2738 case we know that arg0 and arg1 are in disjoint code paths (such as in
2739 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2740 addresses with TREE_CONSTANT flag set so we know that &var == &var
2741 even if var is volatile. */
2744 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2746 /* When checking, verify at the outermost operand_equal_p call that
2747 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2749 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
2751 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
2755 inchash::hash
hstate0 (0), hstate1 (0);
2756 inchash::add_expr (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
2757 inchash::add_expr (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
2758 hashval_t h0
= hstate0
.end ();
2759 hashval_t h1
= hstate1
.end ();
2760 gcc_assert (h0
== h1
);
2768 /* If either is ERROR_MARK, they aren't equal. */
2769 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2770 || TREE_TYPE (arg0
) == error_mark_node
2771 || TREE_TYPE (arg1
) == error_mark_node
)
2774 /* Similar, if either does not have a type (like a released SSA name),
2775 they aren't equal. */
2776 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2779 /* We cannot consider pointers to different address space equal. */
2780 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2781 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2782 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2783 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2786 /* Check equality of integer constants before bailing out due to
2787 precision differences. */
2788 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2790 /* Address of INTEGER_CST is not defined; check that we did not forget
2791 to drop the OEP_ADDRESS_OF flags. */
2792 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2793 return tree_int_cst_equal (arg0
, arg1
);
2796 if (!(flags
& OEP_ADDRESS_OF
))
2798 /* If both types don't have the same signedness, then we can't consider
2799 them equal. We must check this before the STRIP_NOPS calls
2800 because they may change the signedness of the arguments. As pointers
2801 strictly don't have a signedness, require either two pointers or
2802 two non-pointers as well. */
2803 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2804 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2805 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2808 /* If both types don't have the same precision, then it is not safe
2810 if (element_precision (TREE_TYPE (arg0
))
2811 != element_precision (TREE_TYPE (arg1
)))
2818 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2819 sanity check once the issue is solved. */
2821 /* Addresses of conversions and SSA_NAMEs (and many other things)
2822 are not defined. Check that we did not forget to drop the
2823 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2824 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
2825 && TREE_CODE (arg0
) != SSA_NAME
);
2828 /* In case both args are comparisons but with different comparison
2829 code, try to swap the comparison operands of one arg to produce
2830 a match and compare that variant. */
2831 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2832 && COMPARISON_CLASS_P (arg0
)
2833 && COMPARISON_CLASS_P (arg1
))
2835 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2837 if (TREE_CODE (arg0
) == swap_code
)
2838 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2839 TREE_OPERAND (arg1
, 1), flags
)
2840 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2841 TREE_OPERAND (arg1
, 0), flags
);
2844 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
2846 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2847 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
2849 else if (flags
& OEP_ADDRESS_OF
)
2851 /* If we are interested in comparing addresses ignore
2852 MEM_REF wrappings of the base that can appear just for
2854 if (TREE_CODE (arg0
) == MEM_REF
2856 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
2857 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
2858 && integer_zerop (TREE_OPERAND (arg0
, 1)))
2860 else if (TREE_CODE (arg1
) == MEM_REF
2862 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
2863 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
2864 && integer_zerop (TREE_OPERAND (arg1
, 1)))
2872 /* When not checking adddresses, this is needed for conversions and for
2873 COMPONENT_REF. Might as well play it safe and always test this. */
2874 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2875 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2876 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
2877 && !(flags
& OEP_ADDRESS_OF
)))
2880 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2881 We don't care about side effects in that case because the SAVE_EXPR
2882 takes care of that for us. In all other cases, two expressions are
2883 equal if they have no side effects. If we have two identical
2884 expressions with side effects that should be treated the same due
2885 to the only side effects being identical SAVE_EXPR's, that will
2886 be detected in the recursive calls below.
2887 If we are taking an invariant address of two identical objects
2888 they are necessarily equal as well. */
2889 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2890 && (TREE_CODE (arg0
) == SAVE_EXPR
2891 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
2892 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2895 /* Next handle constant cases, those for which we can return 1 even
2896 if ONLY_CONST is set. */
2897 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2898 switch (TREE_CODE (arg0
))
2901 return tree_int_cst_equal (arg0
, arg1
);
2904 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2905 TREE_FIXED_CST (arg1
));
2908 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
2912 if (!HONOR_SIGNED_ZEROS (arg0
))
2914 /* If we do not distinguish between signed and unsigned zero,
2915 consider them equal. */
2916 if (real_zerop (arg0
) && real_zerop (arg1
))
2925 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2928 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2930 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2931 VECTOR_CST_ELT (arg1
, i
), flags
))
2938 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2940 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2944 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2945 && ! memcmp (TREE_STRING_POINTER (arg0
),
2946 TREE_STRING_POINTER (arg1
),
2947 TREE_STRING_LENGTH (arg0
)));
2950 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2951 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2952 flags
| OEP_ADDRESS_OF
2953 | OEP_MATCH_SIDE_EFFECTS
);
2955 /* In GIMPLE empty constructors are allowed in initializers of
2957 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0
))
2958 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1
)));
2963 if (flags
& OEP_ONLY_CONST
)
2966 /* Define macros to test an operand from arg0 and arg1 for equality and a
2967 variant that allows null and views null as being different from any
2968 non-null value. In the latter case, if either is null, the both
2969 must be; otherwise, do the normal comparison. */
2970 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2971 TREE_OPERAND (arg1, N), flags)
2973 #define OP_SAME_WITH_NULL(N) \
2974 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2975 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2977 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2980 /* Two conversions are equal only if signedness and modes match. */
2981 switch (TREE_CODE (arg0
))
2984 case FIX_TRUNC_EXPR
:
2985 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2986 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2996 case tcc_comparison
:
2998 if (OP_SAME (0) && OP_SAME (1))
3001 /* For commutative ops, allow the other order. */
3002 return (commutative_tree_code (TREE_CODE (arg0
))
3003 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3004 TREE_OPERAND (arg1
, 1), flags
)
3005 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3006 TREE_OPERAND (arg1
, 0), flags
));
3009 /* If either of the pointer (or reference) expressions we are
3010 dereferencing contain a side effect, these cannot be equal,
3011 but their addresses can be. */
3012 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3013 && (TREE_SIDE_EFFECTS (arg0
)
3014 || TREE_SIDE_EFFECTS (arg1
)))
3017 switch (TREE_CODE (arg0
))
3020 if (!(flags
& OEP_ADDRESS_OF
)
3021 && (TYPE_ALIGN (TREE_TYPE (arg0
))
3022 != TYPE_ALIGN (TREE_TYPE (arg1
))))
3024 flags
&= ~OEP_ADDRESS_OF
;
3028 /* Require the same offset. */
3029 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3030 TYPE_SIZE (TREE_TYPE (arg1
)),
3031 flags
& ~OEP_ADDRESS_OF
))
3036 case VIEW_CONVERT_EXPR
:
3039 case TARGET_MEM_REF
:
3041 if (!(flags
& OEP_ADDRESS_OF
))
3043 /* Require equal access sizes */
3044 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3045 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3046 || !TYPE_SIZE (TREE_TYPE (arg1
))
3047 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3048 TYPE_SIZE (TREE_TYPE (arg1
)),
3051 /* Verify that access happens in similar types. */
3052 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3054 /* Verify that accesses are TBAA compatible. */
3055 if (!alias_ptr_types_compatible_p
3056 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3057 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3058 || (MR_DEPENDENCE_CLIQUE (arg0
)
3059 != MR_DEPENDENCE_CLIQUE (arg1
))
3060 || (MR_DEPENDENCE_BASE (arg0
)
3061 != MR_DEPENDENCE_BASE (arg1
)))
3063 /* Verify that alignment is compatible. */
3064 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3065 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3068 flags
&= ~OEP_ADDRESS_OF
;
3069 return (OP_SAME (0) && OP_SAME (1)
3070 /* TARGET_MEM_REF require equal extra operands. */
3071 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3072 || (OP_SAME_WITH_NULL (2)
3073 && OP_SAME_WITH_NULL (3)
3074 && OP_SAME_WITH_NULL (4))));
3077 case ARRAY_RANGE_REF
:
3080 flags
&= ~OEP_ADDRESS_OF
;
3081 /* Compare the array index by value if it is constant first as we
3082 may have different types but same value here. */
3083 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3084 TREE_OPERAND (arg1
, 1))
3086 && OP_SAME_WITH_NULL (2)
3087 && OP_SAME_WITH_NULL (3)
3088 /* Compare low bound and element size as with OEP_ADDRESS_OF
3089 we have to account for the offset of the ref. */
3090 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3091 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3092 || (operand_equal_p (array_ref_low_bound
3093 (CONST_CAST_TREE (arg0
)),
3095 (CONST_CAST_TREE (arg1
)), flags
)
3096 && operand_equal_p (array_ref_element_size
3097 (CONST_CAST_TREE (arg0
)),
3098 array_ref_element_size
3099 (CONST_CAST_TREE (arg1
)),
3103 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3104 may be NULL when we're called to compare MEM_EXPRs. */
3105 if (!OP_SAME_WITH_NULL (0)
3108 flags
&= ~OEP_ADDRESS_OF
;
3109 return OP_SAME_WITH_NULL (2);
3114 flags
&= ~OEP_ADDRESS_OF
;
3115 return OP_SAME (1) && OP_SAME (2);
3121 case tcc_expression
:
3122 switch (TREE_CODE (arg0
))
3125 /* Be sure we pass right ADDRESS_OF flag. */
3126 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3127 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3128 TREE_OPERAND (arg1
, 0),
3129 flags
| OEP_ADDRESS_OF
);
3131 case TRUTH_NOT_EXPR
:
3134 case TRUTH_ANDIF_EXPR
:
3135 case TRUTH_ORIF_EXPR
:
3136 return OP_SAME (0) && OP_SAME (1);
3139 case WIDEN_MULT_PLUS_EXPR
:
3140 case WIDEN_MULT_MINUS_EXPR
:
3143 /* The multiplcation operands are commutative. */
3146 case TRUTH_AND_EXPR
:
3148 case TRUTH_XOR_EXPR
:
3149 if (OP_SAME (0) && OP_SAME (1))
3152 /* Otherwise take into account this is a commutative operation. */
3153 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3154 TREE_OPERAND (arg1
, 1), flags
)
3155 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3156 TREE_OPERAND (arg1
, 0), flags
));
3159 if (! OP_SAME (1) || ! OP_SAME (2))
3161 flags
&= ~OEP_ADDRESS_OF
;
3166 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3173 switch (TREE_CODE (arg0
))
3176 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3177 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3178 /* If not both CALL_EXPRs are either internal or normal function
3179 functions, then they are not equal. */
3181 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3183 /* If the CALL_EXPRs call different internal functions, then they
3185 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3190 /* If the CALL_EXPRs call different functions, then they are not
3192 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3197 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3199 unsigned int cef
= call_expr_flags (arg0
);
3200 if (flags
& OEP_PURE_SAME
)
3201 cef
&= ECF_CONST
| ECF_PURE
;
3208 /* Now see if all the arguments are the same. */
3210 const_call_expr_arg_iterator iter0
, iter1
;
3212 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3213 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3215 a0
= next_const_call_expr_arg (&iter0
),
3216 a1
= next_const_call_expr_arg (&iter1
))
3217 if (! operand_equal_p (a0
, a1
, flags
))
3220 /* If we get here and both argument lists are exhausted
3221 then the CALL_EXPRs are equal. */
3222 return ! (a0
|| a1
);
3228 case tcc_declaration
:
3229 /* Consider __builtin_sqrt equal to sqrt. */
3230 return (TREE_CODE (arg0
) == FUNCTION_DECL
3231 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3232 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3233 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3235 case tcc_exceptional
:
3236 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3238 /* In GIMPLE constructors are used only to build vectors from
3239 elements. Individual elements in the constructor must be
3240 indexed in increasing order and form an initial sequence.
3242 We make no effort to compare constructors in generic.
3243 (see sem_variable::equals in ipa-icf which can do so for
3245 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3246 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3249 /* Be sure that vectors constructed have the same representation.
3250 We only tested element precision and modes to match.
3251 Vectors may be BLKmode and thus also check that the number of
3253 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))
3254 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)))
3257 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3258 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3259 unsigned int len
= vec_safe_length (v0
);
3261 if (len
!= vec_safe_length (v1
))
3264 for (unsigned int i
= 0; i
< len
; i
++)
3266 constructor_elt
*c0
= &(*v0
)[i
];
3267 constructor_elt
*c1
= &(*v1
)[i
];
3269 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3270 /* In GIMPLE the indexes can be either NULL or matching i.
3271 Double check this so we won't get false
3272 positives for GENERIC. */
3274 && (TREE_CODE (c0
->index
) != INTEGER_CST
3275 || !compare_tree_int (c0
->index
, i
)))
3277 && (TREE_CODE (c1
->index
) != INTEGER_CST
3278 || !compare_tree_int (c1
->index
, i
))))
3290 #undef OP_SAME_WITH_NULL
3293 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3294 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3296 When in doubt, return 0. */
3299 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3301 int unsignedp1
, unsignedpo
;
3302 tree primarg0
, primarg1
, primother
;
3303 unsigned int correct_width
;
3305 if (operand_equal_p (arg0
, arg1
, 0))
3308 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3309 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3312 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3313 and see if the inner values are the same. This removes any
3314 signedness comparison, which doesn't matter here. */
3315 primarg0
= arg0
, primarg1
= arg1
;
3316 STRIP_NOPS (primarg0
);
3317 STRIP_NOPS (primarg1
);
3318 if (operand_equal_p (primarg0
, primarg1
, 0))
3321 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3322 actual comparison operand, ARG0.
3324 First throw away any conversions to wider types
3325 already present in the operands. */
3327 primarg1
= get_narrower (arg1
, &unsignedp1
);
3328 primother
= get_narrower (other
, &unsignedpo
);
3330 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3331 if (unsignedp1
== unsignedpo
3332 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3333 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3335 tree type
= TREE_TYPE (arg0
);
3337 /* Make sure shorter operand is extended the right way
3338 to match the longer operand. */
3339 primarg1
= fold_convert (signed_or_unsigned_type_for
3340 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3342 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3349 /* See if ARG is an expression that is either a comparison or is performing
3350 arithmetic on comparisons. The comparisons must only be comparing
3351 two different values, which will be stored in *CVAL1 and *CVAL2; if
3352 they are nonzero it means that some operands have already been found.
3353 No variables may be used anywhere else in the expression except in the
3354 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3355 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3357 If this is true, return 1. Otherwise, return zero. */
3360 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3362 enum tree_code code
= TREE_CODE (arg
);
3363 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3365 /* We can handle some of the tcc_expression cases here. */
3366 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3368 else if (tclass
== tcc_expression
3369 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3370 || code
== COMPOUND_EXPR
))
3371 tclass
= tcc_binary
;
3373 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3374 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3376 /* If we've already found a CVAL1 or CVAL2, this expression is
3377 two complex to handle. */
3378 if (*cval1
|| *cval2
)
3388 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3391 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3392 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3393 cval1
, cval2
, save_p
));
3398 case tcc_expression
:
3399 if (code
== COND_EXPR
)
3400 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3401 cval1
, cval2
, save_p
)
3402 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3403 cval1
, cval2
, save_p
)
3404 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3405 cval1
, cval2
, save_p
));
3408 case tcc_comparison
:
3409 /* First see if we can handle the first operand, then the second. For
3410 the second operand, we know *CVAL1 can't be zero. It must be that
3411 one side of the comparison is each of the values; test for the
3412 case where this isn't true by failing if the two operands
3415 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3416 TREE_OPERAND (arg
, 1), 0))
3420 *cval1
= TREE_OPERAND (arg
, 0);
3421 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3423 else if (*cval2
== 0)
3424 *cval2
= TREE_OPERAND (arg
, 0);
3425 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3430 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3432 else if (*cval2
== 0)
3433 *cval2
= TREE_OPERAND (arg
, 1);
3434 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3446 /* ARG is a tree that is known to contain just arithmetic operations and
3447 comparisons. Evaluate the operations in the tree substituting NEW0 for
3448 any occurrence of OLD0 as an operand of a comparison and likewise for
3452 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3453 tree old1
, tree new1
)
3455 tree type
= TREE_TYPE (arg
);
3456 enum tree_code code
= TREE_CODE (arg
);
3457 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3459 /* We can handle some of the tcc_expression cases here. */
3460 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3462 else if (tclass
== tcc_expression
3463 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3464 tclass
= tcc_binary
;
3469 return fold_build1_loc (loc
, code
, type
,
3470 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3471 old0
, new0
, old1
, new1
));
3474 return fold_build2_loc (loc
, code
, type
,
3475 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3476 old0
, new0
, old1
, new1
),
3477 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3478 old0
, new0
, old1
, new1
));
3480 case tcc_expression
:
3484 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3488 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3492 return fold_build3_loc (loc
, code
, type
,
3493 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3494 old0
, new0
, old1
, new1
),
3495 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3496 old0
, new0
, old1
, new1
),
3497 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3498 old0
, new0
, old1
, new1
));
3502 /* Fall through - ??? */
3504 case tcc_comparison
:
3506 tree arg0
= TREE_OPERAND (arg
, 0);
3507 tree arg1
= TREE_OPERAND (arg
, 1);
3509 /* We need to check both for exact equality and tree equality. The
3510 former will be true if the operand has a side-effect. In that
3511 case, we know the operand occurred exactly once. */
3513 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3515 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3518 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3520 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3523 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3531 /* Return a tree for the case when the result of an expression is RESULT
3532 converted to TYPE and OMITTED was previously an operand of the expression
3533 but is now not needed (e.g., we folded OMITTED * 0).
3535 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3536 the conversion of RESULT to TYPE. */
3539 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3541 tree t
= fold_convert_loc (loc
, type
, result
);
3543 /* If the resulting operand is an empty statement, just return the omitted
3544 statement casted to void. */
3545 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3546 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3547 fold_ignored_result (omitted
));
3549 if (TREE_SIDE_EFFECTS (omitted
))
3550 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3551 fold_ignored_result (omitted
), t
);
3553 return non_lvalue_loc (loc
, t
);
3556 /* Return a tree for the case when the result of an expression is RESULT
3557 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3558 of the expression but are now not needed.
3560 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3561 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3562 evaluated before OMITTED2. Otherwise, if neither has side effects,
3563 just do the conversion of RESULT to TYPE. */
3566 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3567 tree omitted1
, tree omitted2
)
3569 tree t
= fold_convert_loc (loc
, type
, result
);
3571 if (TREE_SIDE_EFFECTS (omitted2
))
3572 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3573 if (TREE_SIDE_EFFECTS (omitted1
))
3574 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3576 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3580 /* Return a simplified tree node for the truth-negation of ARG. This
3581 never alters ARG itself. We assume that ARG is an operation that
3582 returns a truth value (0 or 1).
3584 FIXME: one would think we would fold the result, but it causes
3585 problems with the dominator optimizer. */
3588 fold_truth_not_expr (location_t loc
, tree arg
)
3590 tree type
= TREE_TYPE (arg
);
3591 enum tree_code code
= TREE_CODE (arg
);
3592 location_t loc1
, loc2
;
3594 /* If this is a comparison, we can simply invert it, except for
3595 floating-point non-equality comparisons, in which case we just
3596 enclose a TRUTH_NOT_EXPR around what we have. */
3598 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3600 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3601 if (FLOAT_TYPE_P (op_type
)
3602 && flag_trapping_math
3603 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3604 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3607 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3608 if (code
== ERROR_MARK
)
3611 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3612 TREE_OPERAND (arg
, 1));
3613 if (TREE_NO_WARNING (arg
))
3614 TREE_NO_WARNING (ret
) = 1;
3621 return constant_boolean_node (integer_zerop (arg
), type
);
3623 case TRUTH_AND_EXPR
:
3624 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3625 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3626 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3627 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3628 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3631 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3632 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3633 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3634 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3635 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3637 case TRUTH_XOR_EXPR
:
3638 /* Here we can invert either operand. We invert the first operand
3639 unless the second operand is a TRUTH_NOT_EXPR in which case our
3640 result is the XOR of the first operand with the inside of the
3641 negation of the second operand. */
3643 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3644 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3645 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3647 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3648 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3649 TREE_OPERAND (arg
, 1));
3651 case TRUTH_ANDIF_EXPR
:
3652 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3653 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3654 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3655 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3656 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3658 case TRUTH_ORIF_EXPR
:
3659 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3660 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3661 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3662 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3663 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3665 case TRUTH_NOT_EXPR
:
3666 return TREE_OPERAND (arg
, 0);
3670 tree arg1
= TREE_OPERAND (arg
, 1);
3671 tree arg2
= TREE_OPERAND (arg
, 2);
3673 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3674 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3676 /* A COND_EXPR may have a throw as one operand, which
3677 then has void type. Just leave void operands
3679 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3680 VOID_TYPE_P (TREE_TYPE (arg1
))
3681 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3682 VOID_TYPE_P (TREE_TYPE (arg2
))
3683 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3687 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3688 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3689 TREE_OPERAND (arg
, 0),
3690 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3692 case NON_LVALUE_EXPR
:
3693 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3694 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3697 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3698 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3700 /* ... fall through ... */
3703 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3704 return build1_loc (loc
, TREE_CODE (arg
), type
,
3705 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3708 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3710 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3713 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3715 case CLEANUP_POINT_EXPR
:
3716 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3717 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3718 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3725 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3726 assume that ARG is an operation that returns a truth value (0 or 1
3727 for scalars, 0 or -1 for vectors). Return the folded expression if
3728 folding is successful. Otherwise, return NULL_TREE. */
3731 fold_invert_truthvalue (location_t loc
, tree arg
)
3733 tree type
= TREE_TYPE (arg
);
3734 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3740 /* Return a simplified tree node for the truth-negation of ARG. This
3741 never alters ARG itself. We assume that ARG is an operation that
3742 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3745 invert_truthvalue_loc (location_t loc
, tree arg
)
3747 if (TREE_CODE (arg
) == ERROR_MARK
)
3750 tree type
= TREE_TYPE (arg
);
3751 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3757 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3758 with code CODE. This optimization is unsafe. */
3760 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3761 tree arg0
, tree arg1
)
3763 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3764 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3766 /* (A / C) +- (B / C) -> (A +- B) / C. */
3768 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3769 TREE_OPERAND (arg1
, 1), 0))
3770 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3771 fold_build2_loc (loc
, code
, type
,
3772 TREE_OPERAND (arg0
, 0),
3773 TREE_OPERAND (arg1
, 0)),
3774 TREE_OPERAND (arg0
, 1));
3776 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3777 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3778 TREE_OPERAND (arg1
, 0), 0)
3779 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3780 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3782 REAL_VALUE_TYPE r0
, r1
;
3783 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3784 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3786 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3788 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3789 real_arithmetic (&r0
, code
, &r0
, &r1
);
3790 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3791 TREE_OPERAND (arg0
, 0),
3792 build_real (type
, r0
));
3798 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3799 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3800 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3801 is the original memory reference used to preserve the alias set of
3805 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
3806 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
3807 int unsignedp
, int reversep
)
3809 tree result
, bftype
;
3811 if (get_alias_set (inner
) != get_alias_set (orig_inner
))
3812 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
3813 build_fold_addr_expr (inner
),
3815 (reference_alias_ptr_type (orig_inner
), 0));
3817 if (bitpos
== 0 && !reversep
)
3819 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3820 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3821 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3822 && tree_fits_shwi_p (size
)
3823 && tree_to_shwi (size
) == bitsize
)
3824 return fold_convert_loc (loc
, type
, inner
);
3828 if (TYPE_PRECISION (bftype
) != bitsize
3829 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3830 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3832 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3833 size_int (bitsize
), bitsize_int (bitpos
));
3834 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
3837 result
= fold_convert_loc (loc
, type
, result
);
3842 /* Optimize a bit-field compare.
3844 There are two cases: First is a compare against a constant and the
3845 second is a comparison of two items where the fields are at the same
3846 bit position relative to the start of a chunk (byte, halfword, word)
3847 large enough to contain it. In these cases we can avoid the shift
3848 implicit in bitfield extractions.
3850 For constants, we emit a compare of the shifted constant with the
3851 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3852 compared. For two fields at the same position, we do the ANDs with the
3853 similar mask and compare the result of the ANDs.
3855 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3856 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3857 are the left and right operands of the comparison, respectively.
3859 If the optimization described above can be done, we return the resulting
3860 tree. Otherwise we return zero. */
3863 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3864 tree compare_type
, tree lhs
, tree rhs
)
3866 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3867 tree type
= TREE_TYPE (lhs
);
3869 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3870 machine_mode lmode
, rmode
, nmode
;
3871 int lunsignedp
, runsignedp
;
3872 int lreversep
, rreversep
;
3873 int lvolatilep
= 0, rvolatilep
= 0;
3874 tree linner
, rinner
= NULL_TREE
;
3878 /* Get all the information about the extractions being done. If the bit size
3879 if the same as the size of the underlying object, we aren't doing an
3880 extraction at all and so can do nothing. We also don't want to
3881 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3882 then will no longer be able to replace it. */
3883 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3884 &lunsignedp
, &lreversep
, &lvolatilep
, false);
3885 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3886 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3890 rreversep
= lreversep
;
3893 /* If this is not a constant, we can only do something if bit positions,
3894 sizes, signedness and storage order are the same. */
3896 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3897 &runsignedp
, &rreversep
, &rvolatilep
, false);
3899 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3900 || lunsignedp
!= runsignedp
|| lreversep
!= rreversep
|| offset
!= 0
3901 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3905 /* See if we can find a mode to refer to this field. We should be able to,
3906 but fail if we can't. */
3907 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3908 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3909 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3910 TYPE_ALIGN (TREE_TYPE (rinner
))),
3912 if (nmode
== VOIDmode
)
3915 /* Set signed and unsigned types of the precision of this mode for the
3917 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3919 /* Compute the bit position and size for the new reference and our offset
3920 within it. If the new reference is the same size as the original, we
3921 won't optimize anything, so return zero. */
3922 nbitsize
= GET_MODE_BITSIZE (nmode
);
3923 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3925 if (nbitsize
== lbitsize
)
3928 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
3929 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3931 /* Make the mask to be used against the extracted field. */
3932 mask
= build_int_cst_type (unsigned_type
, -1);
3933 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3934 mask
= const_binop (RSHIFT_EXPR
, mask
,
3935 size_int (nbitsize
- lbitsize
- lbitpos
));
3938 /* If not comparing with constant, just rework the comparison
3940 return fold_build2_loc (loc
, code
, compare_type
,
3941 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3942 make_bit_field_ref (loc
, linner
, lhs
,
3947 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3948 make_bit_field_ref (loc
, rinner
, rhs
,
3954 /* Otherwise, we are handling the constant case. See if the constant is too
3955 big for the field. Warn and return a tree for 0 (false) if so. We do
3956 this not only for its own sake, but to avoid having to test for this
3957 error case below. If we didn't, we might generate wrong code.
3959 For unsigned fields, the constant shifted right by the field length should
3960 be all zero. For signed fields, the high-order bits should agree with
3965 if (wi::lrshift (rhs
, lbitsize
) != 0)
3967 warning (0, "comparison is always %d due to width of bit-field",
3969 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3974 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3975 if (tem
!= 0 && tem
!= -1)
3977 warning (0, "comparison is always %d due to width of bit-field",
3979 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3983 /* Single-bit compares should always be against zero. */
3984 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3986 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3987 rhs
= build_int_cst (type
, 0);
3990 /* Make a new bitfield reference, shift the constant over the
3991 appropriate number of bits and mask it with the computed mask
3992 (in case this was a signed field). If we changed it, make a new one. */
3993 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
3994 nbitsize
, nbitpos
, 1, lreversep
);
3996 rhs
= const_binop (BIT_AND_EXPR
,
3997 const_binop (LSHIFT_EXPR
,
3998 fold_convert_loc (loc
, unsigned_type
, rhs
),
3999 size_int (lbitpos
)),
4002 lhs
= build2_loc (loc
, code
, compare_type
,
4003 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4007 /* Subroutine for fold_truth_andor_1: decode a field reference.
4009 If EXP is a comparison reference, we return the innermost reference.
4011 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4012 set to the starting bit number.
4014 If the innermost field can be completely contained in a mode-sized
4015 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4017 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4018 otherwise it is not changed.
4020 *PUNSIGNEDP is set to the signedness of the field.
4022 *PREVERSEP is set to the storage order of the field.
4024 *PMASK is set to the mask used. This is either contained in a
4025 BIT_AND_EXPR or derived from the width of the field.
4027 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4029 Return 0 if this is not a component reference or is one that we can't
4030 do anything with. */
4033 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4034 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4035 int *punsignedp
, int *preversep
, int *pvolatilep
,
4036 tree
*pmask
, tree
*pand_mask
)
4039 tree outer_type
= 0;
4041 tree mask
, inner
, offset
;
4043 unsigned int precision
;
4045 /* All the optimizations using this function assume integer fields.
4046 There are problems with FP fields since the type_for_size call
4047 below can fail for, e.g., XFmode. */
4048 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4051 /* We are interested in the bare arrangement of bits, so strip everything
4052 that doesn't affect the machine mode. However, record the type of the
4053 outermost expression if it may matter below. */
4054 if (CONVERT_EXPR_P (exp
)
4055 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4056 outer_type
= TREE_TYPE (exp
);
4059 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4061 and_mask
= TREE_OPERAND (exp
, 1);
4062 exp
= TREE_OPERAND (exp
, 0);
4063 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4064 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4068 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4069 punsignedp
, preversep
, pvolatilep
, false);
4070 if ((inner
== exp
&& and_mask
== 0)
4071 || *pbitsize
< 0 || offset
!= 0
4072 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4077 /* If the number of bits in the reference is the same as the bitsize of
4078 the outer type, then the outer type gives the signedness. Otherwise
4079 (in case of a small bitfield) the signedness is unchanged. */
4080 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4081 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4083 /* Compute the mask to access the bitfield. */
4084 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4085 precision
= TYPE_PRECISION (unsigned_type
);
4087 mask
= build_int_cst_type (unsigned_type
, -1);
4089 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4090 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4092 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4094 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4095 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4098 *pand_mask
= and_mask
;
4102 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4103 bit positions and MASK is SIGNED. */
4106 all_ones_mask_p (const_tree mask
, unsigned int size
)
4108 tree type
= TREE_TYPE (mask
);
4109 unsigned int precision
= TYPE_PRECISION (type
);
4111 /* If this function returns true when the type of the mask is
4112 UNSIGNED, then there will be errors. In particular see
4113 gcc.c-torture/execute/990326-1.c. There does not appear to be
4114 any documentation paper trail as to why this is so. But the pre
4115 wide-int worked with that restriction and it has been preserved
4117 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4120 return wi::mask (size
, false, precision
) == mask
;
4123 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4124 represents the sign bit of EXP's type. If EXP represents a sign
4125 or zero extension, also test VAL against the unextended type.
4126 The return value is the (sub)expression whose sign bit is VAL,
4127 or NULL_TREE otherwise. */
4130 sign_bit_p (tree exp
, const_tree val
)
4135 /* Tree EXP must have an integral type. */
4136 t
= TREE_TYPE (exp
);
4137 if (! INTEGRAL_TYPE_P (t
))
4140 /* Tree VAL must be an integer constant. */
4141 if (TREE_CODE (val
) != INTEGER_CST
4142 || TREE_OVERFLOW (val
))
4145 width
= TYPE_PRECISION (t
);
4146 if (wi::only_sign_bit_p (val
, width
))
4149 /* Handle extension from a narrower type. */
4150 if (TREE_CODE (exp
) == NOP_EXPR
4151 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4152 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4157 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4158 to be evaluated unconditionally. */
4161 simple_operand_p (const_tree exp
)
4163 /* Strip any conversions that don't change the machine mode. */
4166 return (CONSTANT_CLASS_P (exp
)
4167 || TREE_CODE (exp
) == SSA_NAME
4169 && ! TREE_ADDRESSABLE (exp
)
4170 && ! TREE_THIS_VOLATILE (exp
)
4171 && ! DECL_NONLOCAL (exp
)
4172 /* Don't regard global variables as simple. They may be
4173 allocated in ways unknown to the compiler (shared memory,
4174 #pragma weak, etc). */
4175 && ! TREE_PUBLIC (exp
)
4176 && ! DECL_EXTERNAL (exp
)
4177 /* Weakrefs are not safe to be read, since they can be NULL.
4178 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4179 have DECL_WEAK flag set. */
4180 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4181 /* Loading a static variable is unduly expensive, but global
4182 registers aren't expensive. */
4183 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4186 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4187 to be evaluated unconditionally.
4188 I addition to simple_operand_p, we assume that comparisons, conversions,
4189 and logic-not operations are simple, if their operands are simple, too. */
4192 simple_operand_p_2 (tree exp
)
4194 enum tree_code code
;
4196 if (TREE_SIDE_EFFECTS (exp
)
4197 || tree_could_trap_p (exp
))
4200 while (CONVERT_EXPR_P (exp
))
4201 exp
= TREE_OPERAND (exp
, 0);
4203 code
= TREE_CODE (exp
);
4205 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4206 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4207 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4209 if (code
== TRUTH_NOT_EXPR
)
4210 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4212 return simple_operand_p (exp
);
4216 /* The following functions are subroutines to fold_range_test and allow it to
4217 try to change a logical combination of comparisons into a range test.
4220 X == 2 || X == 3 || X == 4 || X == 5
4224 (unsigned) (X - 2) <= 3
4226 We describe each set of comparisons as being either inside or outside
4227 a range, using a variable named like IN_P, and then describe the
4228 range with a lower and upper bound. If one of the bounds is omitted,
4229 it represents either the highest or lowest value of the type.
4231 In the comments below, we represent a range by two numbers in brackets
4232 preceded by a "+" to designate being inside that range, or a "-" to
4233 designate being outside that range, so the condition can be inverted by
4234 flipping the prefix. An omitted bound is represented by a "-". For
4235 example, "- [-, 10]" means being outside the range starting at the lowest
4236 possible value and ending at 10, in other words, being greater than 10.
4237 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4240 We set up things so that the missing bounds are handled in a consistent
4241 manner so neither a missing bound nor "true" and "false" need to be
4242 handled using a special case. */
4244 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4245 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4246 and UPPER1_P are nonzero if the respective argument is an upper bound
4247 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4248 must be specified for a comparison. ARG1 will be converted to ARG0's
4249 type if both are specified. */
4252 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4253 tree arg1
, int upper1_p
)
4259 /* If neither arg represents infinity, do the normal operation.
4260 Else, if not a comparison, return infinity. Else handle the special
4261 comparison rules. Note that most of the cases below won't occur, but
4262 are handled for consistency. */
4264 if (arg0
!= 0 && arg1
!= 0)
4266 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4267 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4269 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4272 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4275 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4276 for neither. In real maths, we cannot assume open ended ranges are
4277 the same. But, this is computer arithmetic, where numbers are finite.
4278 We can therefore make the transformation of any unbounded range with
4279 the value Z, Z being greater than any representable number. This permits
4280 us to treat unbounded ranges as equal. */
4281 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4282 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4286 result
= sgn0
== sgn1
;
4289 result
= sgn0
!= sgn1
;
4292 result
= sgn0
< sgn1
;
4295 result
= sgn0
<= sgn1
;
4298 result
= sgn0
> sgn1
;
4301 result
= sgn0
>= sgn1
;
4307 return constant_boolean_node (result
, type
);
4310 /* Helper routine for make_range. Perform one step for it, return
4311 new expression if the loop should continue or NULL_TREE if it should
4315 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4316 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4317 bool *strict_overflow_p
)
4319 tree arg0_type
= TREE_TYPE (arg0
);
4320 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4321 int in_p
= *p_in_p
, n_in_p
;
4325 case TRUTH_NOT_EXPR
:
4326 /* We can only do something if the range is testing for zero. */
4327 if (low
== NULL_TREE
|| high
== NULL_TREE
4328 || ! integer_zerop (low
) || ! integer_zerop (high
))
4333 case EQ_EXPR
: case NE_EXPR
:
4334 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4335 /* We can only do something if the range is testing for zero
4336 and if the second operand is an integer constant. Note that
4337 saying something is "in" the range we make is done by
4338 complementing IN_P since it will set in the initial case of
4339 being not equal to zero; "out" is leaving it alone. */
4340 if (low
== NULL_TREE
|| high
== NULL_TREE
4341 || ! integer_zerop (low
) || ! integer_zerop (high
)
4342 || TREE_CODE (arg1
) != INTEGER_CST
)
4347 case NE_EXPR
: /* - [c, c] */
4350 case EQ_EXPR
: /* + [c, c] */
4351 in_p
= ! in_p
, low
= high
= arg1
;
4353 case GT_EXPR
: /* - [-, c] */
4354 low
= 0, high
= arg1
;
4356 case GE_EXPR
: /* + [c, -] */
4357 in_p
= ! in_p
, low
= arg1
, high
= 0;
4359 case LT_EXPR
: /* - [c, -] */
4360 low
= arg1
, high
= 0;
4362 case LE_EXPR
: /* + [-, c] */
4363 in_p
= ! in_p
, low
= 0, high
= arg1
;
4369 /* If this is an unsigned comparison, we also know that EXP is
4370 greater than or equal to zero. We base the range tests we make
4371 on that fact, so we record it here so we can parse existing
4372 range tests. We test arg0_type since often the return type
4373 of, e.g. EQ_EXPR, is boolean. */
4374 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4376 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4378 build_int_cst (arg0_type
, 0),
4382 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4384 /* If the high bound is missing, but we have a nonzero low
4385 bound, reverse the range so it goes from zero to the low bound
4387 if (high
== 0 && low
&& ! integer_zerop (low
))
4390 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4391 build_int_cst (TREE_TYPE (low
), 1), 0);
4392 low
= build_int_cst (arg0_type
, 0);
4402 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4403 low and high are non-NULL, then normalize will DTRT. */
4404 if (!TYPE_UNSIGNED (arg0_type
)
4405 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4407 if (low
== NULL_TREE
)
4408 low
= TYPE_MIN_VALUE (arg0_type
);
4409 if (high
== NULL_TREE
)
4410 high
= TYPE_MAX_VALUE (arg0_type
);
4413 /* (-x) IN [a,b] -> x in [-b, -a] */
4414 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4415 build_int_cst (exp_type
, 0),
4417 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4418 build_int_cst (exp_type
, 0),
4420 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4426 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4427 build_int_cst (exp_type
, 1));
4431 if (TREE_CODE (arg1
) != INTEGER_CST
)
4434 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4435 move a constant to the other side. */
4436 if (!TYPE_UNSIGNED (arg0_type
)
4437 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4440 /* If EXP is signed, any overflow in the computation is undefined,
4441 so we don't worry about it so long as our computations on
4442 the bounds don't overflow. For unsigned, overflow is defined
4443 and this is exactly the right thing. */
4444 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4445 arg0_type
, low
, 0, arg1
, 0);
4446 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4447 arg0_type
, high
, 1, arg1
, 0);
4448 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4449 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4452 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4453 *strict_overflow_p
= true;
4456 /* Check for an unsigned range which has wrapped around the maximum
4457 value thus making n_high < n_low, and normalize it. */
4458 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4460 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4461 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4462 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4463 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4465 /* If the range is of the form +/- [ x+1, x ], we won't
4466 be able to normalize it. But then, it represents the
4467 whole range or the empty set, so make it
4469 if (tree_int_cst_equal (n_low
, low
)
4470 && tree_int_cst_equal (n_high
, high
))
4476 low
= n_low
, high
= n_high
;
4484 case NON_LVALUE_EXPR
:
4485 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4488 if (! INTEGRAL_TYPE_P (arg0_type
)
4489 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4490 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4493 n_low
= low
, n_high
= high
;
4496 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4499 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4501 /* If we're converting arg0 from an unsigned type, to exp,
4502 a signed type, we will be doing the comparison as unsigned.
4503 The tests above have already verified that LOW and HIGH
4506 So we have to ensure that we will handle large unsigned
4507 values the same way that the current signed bounds treat
4510 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4514 /* For fixed-point modes, we need to pass the saturating flag
4515 as the 2nd parameter. */
4516 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4518 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4519 TYPE_SATURATING (arg0_type
));
4522 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4524 /* A range without an upper bound is, naturally, unbounded.
4525 Since convert would have cropped a very large value, use
4526 the max value for the destination type. */
4528 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4529 : TYPE_MAX_VALUE (arg0_type
);
4531 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4532 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4533 fold_convert_loc (loc
, arg0_type
,
4535 build_int_cst (arg0_type
, 1));
4537 /* If the low bound is specified, "and" the range with the
4538 range for which the original unsigned value will be
4542 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4543 1, fold_convert_loc (loc
, arg0_type
,
4548 in_p
= (n_in_p
== in_p
);
4552 /* Otherwise, "or" the range with the range of the input
4553 that will be interpreted as negative. */
4554 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4555 1, fold_convert_loc (loc
, arg0_type
,
4560 in_p
= (in_p
!= n_in_p
);
4574 /* Given EXP, a logical expression, set the range it is testing into
4575 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4576 actually being tested. *PLOW and *PHIGH will be made of the same
4577 type as the returned expression. If EXP is not a comparison, we
4578 will most likely not be returning a useful value and range. Set
4579 *STRICT_OVERFLOW_P to true if the return value is only valid
4580 because signed overflow is undefined; otherwise, do not change
4581 *STRICT_OVERFLOW_P. */
4584 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4585 bool *strict_overflow_p
)
4587 enum tree_code code
;
4588 tree arg0
, arg1
= NULL_TREE
;
4589 tree exp_type
, nexp
;
4592 location_t loc
= EXPR_LOCATION (exp
);
4594 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4595 and see if we can refine the range. Some of the cases below may not
4596 happen, but it doesn't seem worth worrying about this. We "continue"
4597 the outer loop when we've changed something; otherwise we "break"
4598 the switch, which will "break" the while. */
4601 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4605 code
= TREE_CODE (exp
);
4606 exp_type
= TREE_TYPE (exp
);
4609 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4611 if (TREE_OPERAND_LENGTH (exp
) > 0)
4612 arg0
= TREE_OPERAND (exp
, 0);
4613 if (TREE_CODE_CLASS (code
) == tcc_binary
4614 || TREE_CODE_CLASS (code
) == tcc_comparison
4615 || (TREE_CODE_CLASS (code
) == tcc_expression
4616 && TREE_OPERAND_LENGTH (exp
) > 1))
4617 arg1
= TREE_OPERAND (exp
, 1);
4619 if (arg0
== NULL_TREE
)
4622 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4623 &high
, &in_p
, strict_overflow_p
);
4624 if (nexp
== NULL_TREE
)
4629 /* If EXP is a constant, we can evaluate whether this is true or false. */
4630 if (TREE_CODE (exp
) == INTEGER_CST
)
4632 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4634 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4640 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4644 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4645 type, TYPE, return an expression to test if EXP is in (or out of, depending
4646 on IN_P) the range. Return 0 if the test couldn't be created. */
4649 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4650 tree low
, tree high
)
4652 tree etype
= TREE_TYPE (exp
), value
;
4654 /* Disable this optimization for function pointer expressions
4655 on targets that require function pointer canonicalization. */
4656 if (targetm
.have_canonicalize_funcptr_for_compare ()
4657 && TREE_CODE (etype
) == POINTER_TYPE
4658 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4663 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4665 return invert_truthvalue_loc (loc
, value
);
4670 if (low
== 0 && high
== 0)
4671 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4674 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4675 fold_convert_loc (loc
, etype
, high
));
4678 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4679 fold_convert_loc (loc
, etype
, low
));
4681 if (operand_equal_p (low
, high
, 0))
4682 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4683 fold_convert_loc (loc
, etype
, low
));
4685 if (integer_zerop (low
))
4687 if (! TYPE_UNSIGNED (etype
))
4689 etype
= unsigned_type_for (etype
);
4690 high
= fold_convert_loc (loc
, etype
, high
);
4691 exp
= fold_convert_loc (loc
, etype
, exp
);
4693 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4696 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4697 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4699 int prec
= TYPE_PRECISION (etype
);
4701 if (wi::mask (prec
- 1, false, prec
) == high
)
4703 if (TYPE_UNSIGNED (etype
))
4705 tree signed_etype
= signed_type_for (etype
);
4706 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4708 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4710 etype
= signed_etype
;
4711 exp
= fold_convert_loc (loc
, etype
, exp
);
4713 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4714 build_int_cst (etype
, 0));
4718 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4719 This requires wrap-around arithmetics for the type of the expression.
4720 First make sure that arithmetics in this type is valid, then make sure
4721 that it wraps around. */
4722 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4723 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4724 TYPE_UNSIGNED (etype
));
4726 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4728 tree utype
, minv
, maxv
;
4730 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4731 for the type in question, as we rely on this here. */
4732 utype
= unsigned_type_for (etype
);
4733 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4734 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4735 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4736 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4738 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4745 high
= fold_convert_loc (loc
, etype
, high
);
4746 low
= fold_convert_loc (loc
, etype
, low
);
4747 exp
= fold_convert_loc (loc
, etype
, exp
);
4749 value
= const_binop (MINUS_EXPR
, high
, low
);
4752 if (POINTER_TYPE_P (etype
))
4754 if (value
!= 0 && !TREE_OVERFLOW (value
))
4756 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4757 return build_range_check (loc
, type
,
4758 fold_build_pointer_plus_loc (loc
, exp
, low
),
4759 1, build_int_cst (etype
, 0), value
);
4764 if (value
!= 0 && !TREE_OVERFLOW (value
))
4765 return build_range_check (loc
, type
,
4766 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4767 1, build_int_cst (etype
, 0), value
);
4772 /* Return the predecessor of VAL in its type, handling the infinite case. */
4775 range_predecessor (tree val
)
4777 tree type
= TREE_TYPE (val
);
4779 if (INTEGRAL_TYPE_P (type
)
4780 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4783 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4784 build_int_cst (TREE_TYPE (val
), 1), 0);
4787 /* Return the successor of VAL in its type, handling the infinite case. */
4790 range_successor (tree val
)
4792 tree type
= TREE_TYPE (val
);
4794 if (INTEGRAL_TYPE_P (type
)
4795 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4798 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4799 build_int_cst (TREE_TYPE (val
), 1), 0);
4802 /* Given two ranges, see if we can merge them into one. Return 1 if we
4803 can, 0 if we can't. Set the output range into the specified parameters. */
4806 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4807 tree high0
, int in1_p
, tree low1
, tree high1
)
4815 int lowequal
= ((low0
== 0 && low1
== 0)
4816 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4817 low0
, 0, low1
, 0)));
4818 int highequal
= ((high0
== 0 && high1
== 0)
4819 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4820 high0
, 1, high1
, 1)));
4822 /* Make range 0 be the range that starts first, or ends last if they
4823 start at the same value. Swap them if it isn't. */
4824 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4827 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4828 high1
, 1, high0
, 1))))
4830 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4831 tem
= low0
, low0
= low1
, low1
= tem
;
4832 tem
= high0
, high0
= high1
, high1
= tem
;
4835 /* Now flag two cases, whether the ranges are disjoint or whether the
4836 second range is totally subsumed in the first. Note that the tests
4837 below are simplified by the ones above. */
4838 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4839 high0
, 1, low1
, 0));
4840 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4841 high1
, 1, high0
, 1));
4843 /* We now have four cases, depending on whether we are including or
4844 excluding the two ranges. */
4847 /* If they don't overlap, the result is false. If the second range
4848 is a subset it is the result. Otherwise, the range is from the start
4849 of the second to the end of the first. */
4851 in_p
= 0, low
= high
= 0;
4853 in_p
= 1, low
= low1
, high
= high1
;
4855 in_p
= 1, low
= low1
, high
= high0
;
4858 else if (in0_p
&& ! in1_p
)
4860 /* If they don't overlap, the result is the first range. If they are
4861 equal, the result is false. If the second range is a subset of the
4862 first, and the ranges begin at the same place, we go from just after
4863 the end of the second range to the end of the first. If the second
4864 range is not a subset of the first, or if it is a subset and both
4865 ranges end at the same place, the range starts at the start of the
4866 first range and ends just before the second range.
4867 Otherwise, we can't describe this as a single range. */
4869 in_p
= 1, low
= low0
, high
= high0
;
4870 else if (lowequal
&& highequal
)
4871 in_p
= 0, low
= high
= 0;
4872 else if (subset
&& lowequal
)
4874 low
= range_successor (high1
);
4879 /* We are in the weird situation where high0 > high1 but
4880 high1 has no successor. Punt. */
4884 else if (! subset
|| highequal
)
4887 high
= range_predecessor (low1
);
4891 /* low0 < low1 but low1 has no predecessor. Punt. */
4899 else if (! in0_p
&& in1_p
)
4901 /* If they don't overlap, the result is the second range. If the second
4902 is a subset of the first, the result is false. Otherwise,
4903 the range starts just after the first range and ends at the
4904 end of the second. */
4906 in_p
= 1, low
= low1
, high
= high1
;
4907 else if (subset
|| highequal
)
4908 in_p
= 0, low
= high
= 0;
4911 low
= range_successor (high0
);
4916 /* high1 > high0 but high0 has no successor. Punt. */
4924 /* The case where we are excluding both ranges. Here the complex case
4925 is if they don't overlap. In that case, the only time we have a
4926 range is if they are adjacent. If the second is a subset of the
4927 first, the result is the first. Otherwise, the range to exclude
4928 starts at the beginning of the first range and ends at the end of the
4932 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4933 range_successor (high0
),
4935 in_p
= 0, low
= low0
, high
= high1
;
4938 /* Canonicalize - [min, x] into - [-, x]. */
4939 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4940 switch (TREE_CODE (TREE_TYPE (low0
)))
4943 if (TYPE_PRECISION (TREE_TYPE (low0
))
4944 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4948 if (tree_int_cst_equal (low0
,
4949 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4953 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4954 && integer_zerop (low0
))
4961 /* Canonicalize - [x, max] into - [x, -]. */
4962 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4963 switch (TREE_CODE (TREE_TYPE (high1
)))
4966 if (TYPE_PRECISION (TREE_TYPE (high1
))
4967 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4971 if (tree_int_cst_equal (high1
,
4972 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4976 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4977 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4979 build_int_cst (TREE_TYPE (high1
), 1),
4987 /* The ranges might be also adjacent between the maximum and
4988 minimum values of the given type. For
4989 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4990 return + [x + 1, y - 1]. */
4991 if (low0
== 0 && high1
== 0)
4993 low
= range_successor (high0
);
4994 high
= range_predecessor (low1
);
4995 if (low
== 0 || high
== 0)
5005 in_p
= 0, low
= low0
, high
= high0
;
5007 in_p
= 0, low
= low0
, high
= high1
;
5010 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5015 /* Subroutine of fold, looking inside expressions of the form
5016 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5017 of the COND_EXPR. This function is being used also to optimize
5018 A op B ? C : A, by reversing the comparison first.
5020 Return a folded expression whose code is not a COND_EXPR
5021 anymore, or NULL_TREE if no folding opportunity is found. */
5024 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5025 tree arg0
, tree arg1
, tree arg2
)
5027 enum tree_code comp_code
= TREE_CODE (arg0
);
5028 tree arg00
= TREE_OPERAND (arg0
, 0);
5029 tree arg01
= TREE_OPERAND (arg0
, 1);
5030 tree arg1_type
= TREE_TYPE (arg1
);
5036 /* If we have A op 0 ? A : -A, consider applying the following
5039 A == 0? A : -A same as -A
5040 A != 0? A : -A same as A
5041 A >= 0? A : -A same as abs (A)
5042 A > 0? A : -A same as abs (A)
5043 A <= 0? A : -A same as -abs (A)
5044 A < 0? A : -A same as -abs (A)
5046 None of these transformations work for modes with signed
5047 zeros. If A is +/-0, the first two transformations will
5048 change the sign of the result (from +0 to -0, or vice
5049 versa). The last four will fix the sign of the result,
5050 even though the original expressions could be positive or
5051 negative, depending on the sign of A.
5053 Note that all these transformations are correct if A is
5054 NaN, since the two alternatives (A and -A) are also NaNs. */
5055 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5056 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5057 ? real_zerop (arg01
)
5058 : integer_zerop (arg01
))
5059 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5060 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5061 /* In the case that A is of the form X-Y, '-A' (arg2) may
5062 have already been folded to Y-X, check for that. */
5063 || (TREE_CODE (arg1
) == MINUS_EXPR
5064 && TREE_CODE (arg2
) == MINUS_EXPR
5065 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5066 TREE_OPERAND (arg2
, 1), 0)
5067 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5068 TREE_OPERAND (arg2
, 0), 0))))
5073 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5074 return pedantic_non_lvalue_loc (loc
,
5075 fold_convert_loc (loc
, type
,
5076 negate_expr (tem
)));
5079 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5082 if (flag_trapping_math
)
5087 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5089 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5090 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5093 if (flag_trapping_math
)
5097 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5099 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5100 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5102 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5106 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5107 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5108 both transformations are correct when A is NaN: A != 0
5109 is then true, and A == 0 is false. */
5111 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5112 && integer_zerop (arg01
) && integer_zerop (arg2
))
5114 if (comp_code
== NE_EXPR
)
5115 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5116 else if (comp_code
== EQ_EXPR
)
5117 return build_zero_cst (type
);
5120 /* Try some transformations of A op B ? A : B.
5122 A == B? A : B same as B
5123 A != B? A : B same as A
5124 A >= B? A : B same as max (A, B)
5125 A > B? A : B same as max (B, A)
5126 A <= B? A : B same as min (A, B)
5127 A < B? A : B same as min (B, A)
5129 As above, these transformations don't work in the presence
5130 of signed zeros. For example, if A and B are zeros of
5131 opposite sign, the first two transformations will change
5132 the sign of the result. In the last four, the original
5133 expressions give different results for (A=+0, B=-0) and
5134 (A=-0, B=+0), but the transformed expressions do not.
5136 The first two transformations are correct if either A or B
5137 is a NaN. In the first transformation, the condition will
5138 be false, and B will indeed be chosen. In the case of the
5139 second transformation, the condition A != B will be true,
5140 and A will be chosen.
5142 The conversions to max() and min() are not correct if B is
5143 a number and A is not. The conditions in the original
5144 expressions will be false, so all four give B. The min()
5145 and max() versions would give a NaN instead. */
5146 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5147 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5148 /* Avoid these transformations if the COND_EXPR may be used
5149 as an lvalue in the C++ front-end. PR c++/19199. */
5151 || VECTOR_TYPE_P (type
)
5152 || (! lang_GNU_CXX ()
5153 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5154 || ! maybe_lvalue_p (arg1
)
5155 || ! maybe_lvalue_p (arg2
)))
5157 tree comp_op0
= arg00
;
5158 tree comp_op1
= arg01
;
5159 tree comp_type
= TREE_TYPE (comp_op0
);
5161 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5162 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5172 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5174 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5179 /* In C++ a ?: expression can be an lvalue, so put the
5180 operand which will be used if they are equal first
5181 so that we can convert this back to the
5182 corresponding COND_EXPR. */
5183 if (!HONOR_NANS (arg1
))
5185 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5186 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5187 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5188 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5189 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5190 comp_op1
, comp_op0
);
5191 return pedantic_non_lvalue_loc (loc
,
5192 fold_convert_loc (loc
, type
, tem
));
5199 if (!HONOR_NANS (arg1
))
5201 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5202 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5203 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5204 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5205 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5206 comp_op1
, comp_op0
);
5207 return pedantic_non_lvalue_loc (loc
,
5208 fold_convert_loc (loc
, type
, tem
));
5212 if (!HONOR_NANS (arg1
))
5213 return pedantic_non_lvalue_loc (loc
,
5214 fold_convert_loc (loc
, type
, arg2
));
5217 if (!HONOR_NANS (arg1
))
5218 return pedantic_non_lvalue_loc (loc
,
5219 fold_convert_loc (loc
, type
, arg1
));
5222 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5227 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5228 we might still be able to simplify this. For example,
5229 if C1 is one less or one more than C2, this might have started
5230 out as a MIN or MAX and been transformed by this function.
5231 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5233 if (INTEGRAL_TYPE_P (type
)
5234 && TREE_CODE (arg01
) == INTEGER_CST
5235 && TREE_CODE (arg2
) == INTEGER_CST
)
5239 if (TREE_CODE (arg1
) == INTEGER_CST
)
5241 /* We can replace A with C1 in this case. */
5242 arg1
= fold_convert_loc (loc
, type
, arg01
);
5243 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5246 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5247 MIN_EXPR, to preserve the signedness of the comparison. */
5248 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5250 && operand_equal_p (arg01
,
5251 const_binop (PLUS_EXPR
, arg2
,
5252 build_int_cst (type
, 1)),
5255 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5256 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5258 return pedantic_non_lvalue_loc (loc
,
5259 fold_convert_loc (loc
, type
, tem
));
5264 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5266 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5268 && operand_equal_p (arg01
,
5269 const_binop (MINUS_EXPR
, arg2
,
5270 build_int_cst (type
, 1)),
5273 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5274 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5276 return pedantic_non_lvalue_loc (loc
,
5277 fold_convert_loc (loc
, type
, tem
));
5282 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5283 MAX_EXPR, to preserve the signedness of the comparison. */
5284 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5286 && operand_equal_p (arg01
,
5287 const_binop (MINUS_EXPR
, arg2
,
5288 build_int_cst (type
, 1)),
5291 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5292 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5294 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5299 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5300 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5302 && operand_equal_p (arg01
,
5303 const_binop (PLUS_EXPR
, arg2
,
5304 build_int_cst (type
, 1)),
5307 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5308 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5310 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5324 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5325 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5326 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5330 /* EXP is some logical combination of boolean tests. See if we can
5331 merge it into some range test. Return the new tree if so. */
5334 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5337 int or_op
= (code
== TRUTH_ORIF_EXPR
5338 || code
== TRUTH_OR_EXPR
);
5339 int in0_p
, in1_p
, in_p
;
5340 tree low0
, low1
, low
, high0
, high1
, high
;
5341 bool strict_overflow_p
= false;
5343 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5344 "when simplifying range test");
5346 if (!INTEGRAL_TYPE_P (type
))
5349 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5350 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5352 /* If this is an OR operation, invert both sides; we will invert
5353 again at the end. */
5355 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5357 /* If both expressions are the same, if we can merge the ranges, and we
5358 can build the range test, return it or it inverted. If one of the
5359 ranges is always true or always false, consider it to be the same
5360 expression as the other. */
5361 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5362 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5364 && 0 != (tem
= (build_range_check (loc
, type
,
5366 : rhs
!= 0 ? rhs
: integer_zero_node
,
5369 if (strict_overflow_p
)
5370 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5371 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5374 /* On machines where the branch cost is expensive, if this is a
5375 short-circuited branch and the underlying object on both sides
5376 is the same, make a non-short-circuit operation. */
5377 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5378 && lhs
!= 0 && rhs
!= 0
5379 && (code
== TRUTH_ANDIF_EXPR
5380 || code
== TRUTH_ORIF_EXPR
)
5381 && operand_equal_p (lhs
, rhs
, 0))
5383 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5384 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5385 which cases we can't do this. */
5386 if (simple_operand_p (lhs
))
5387 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5388 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5391 else if (!lang_hooks
.decls
.global_bindings_p ()
5392 && !CONTAINS_PLACEHOLDER_P (lhs
))
5394 tree common
= save_expr (lhs
);
5396 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5397 or_op
? ! in0_p
: in0_p
,
5399 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5400 or_op
? ! in1_p
: in1_p
,
5403 if (strict_overflow_p
)
5404 fold_overflow_warning (warnmsg
,
5405 WARN_STRICT_OVERFLOW_COMPARISON
);
5406 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5407 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5416 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5417 bit value. Arrange things so the extra bits will be set to zero if and
5418 only if C is signed-extended to its full width. If MASK is nonzero,
5419 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5422 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5424 tree type
= TREE_TYPE (c
);
5425 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5428 if (p
== modesize
|| unsignedp
)
5431 /* We work by getting just the sign bit into the low-order bit, then
5432 into the high-order bit, then sign-extend. We then XOR that value
5434 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5436 /* We must use a signed type in order to get an arithmetic right shift.
5437 However, we must also avoid introducing accidental overflows, so that
5438 a subsequent call to integer_zerop will work. Hence we must
5439 do the type conversion here. At this point, the constant is either
5440 zero or one, and the conversion to a signed type can never overflow.
5441 We could get an overflow if this conversion is done anywhere else. */
5442 if (TYPE_UNSIGNED (type
))
5443 temp
= fold_convert (signed_type_for (type
), temp
);
5445 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5446 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5448 temp
= const_binop (BIT_AND_EXPR
, temp
,
5449 fold_convert (TREE_TYPE (c
), mask
));
5450 /* If necessary, convert the type back to match the type of C. */
5451 if (TYPE_UNSIGNED (type
))
5452 temp
= fold_convert (type
, temp
);
5454 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5457 /* For an expression that has the form
5461 we can drop one of the inner expressions and simplify to
5465 LOC is the location of the resulting expression. OP is the inner
5466 logical operation; the left-hand side in the examples above, while CMPOP
5467 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5468 removing a condition that guards another, as in
5469 (A != NULL && A->...) || A == NULL
5470 which we must not transform. If RHS_ONLY is true, only eliminate the
5471 right-most operand of the inner logical operation. */
5474 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5477 tree type
= TREE_TYPE (cmpop
);
5478 enum tree_code code
= TREE_CODE (cmpop
);
5479 enum tree_code truthop_code
= TREE_CODE (op
);
5480 tree lhs
= TREE_OPERAND (op
, 0);
5481 tree rhs
= TREE_OPERAND (op
, 1);
5482 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5483 enum tree_code rhs_code
= TREE_CODE (rhs
);
5484 enum tree_code lhs_code
= TREE_CODE (lhs
);
5485 enum tree_code inv_code
;
5487 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5490 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5493 if (rhs_code
== truthop_code
)
5495 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5496 if (newrhs
!= NULL_TREE
)
5499 rhs_code
= TREE_CODE (rhs
);
5502 if (lhs_code
== truthop_code
&& !rhs_only
)
5504 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5505 if (newlhs
!= NULL_TREE
)
5508 lhs_code
= TREE_CODE (lhs
);
5512 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5513 if (inv_code
== rhs_code
5514 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5515 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5517 if (!rhs_only
&& inv_code
== lhs_code
5518 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5519 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5521 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5522 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5527 /* Find ways of folding logical expressions of LHS and RHS:
5528 Try to merge two comparisons to the same innermost item.
5529 Look for range tests like "ch >= '0' && ch <= '9'".
5530 Look for combinations of simple terms on machines with expensive branches
5531 and evaluate the RHS unconditionally.
5533 For example, if we have p->a == 2 && p->b == 4 and we can make an
5534 object large enough to span both A and B, we can do this with a comparison
5535 against the object ANDed with the a mask.
5537 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5538 operations to do this with one comparison.
5540 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5541 function and the one above.
5543 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5544 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5546 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5549 We return the simplified tree or 0 if no optimization is possible. */
5552 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5555 /* If this is the "or" of two comparisons, we can do something if
5556 the comparisons are NE_EXPR. If this is the "and", we can do something
5557 if the comparisons are EQ_EXPR. I.e.,
5558 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5560 WANTED_CODE is this operation code. For single bit fields, we can
5561 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5562 comparison for one-bit fields. */
5564 enum tree_code wanted_code
;
5565 enum tree_code lcode
, rcode
;
5566 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5567 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5568 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5569 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5570 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5571 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5572 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5573 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
5574 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5575 machine_mode lnmode
, rnmode
;
5576 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5577 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5578 tree l_const
, r_const
;
5579 tree lntype
, rntype
, result
;
5580 HOST_WIDE_INT first_bit
, end_bit
;
5583 /* Start by getting the comparison codes. Fail if anything is volatile.
5584 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5585 it were surrounded with a NE_EXPR. */
5587 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5590 lcode
= TREE_CODE (lhs
);
5591 rcode
= TREE_CODE (rhs
);
5593 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5595 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5596 build_int_cst (TREE_TYPE (lhs
), 0));
5600 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5602 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5603 build_int_cst (TREE_TYPE (rhs
), 0));
5607 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5608 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5611 ll_arg
= TREE_OPERAND (lhs
, 0);
5612 lr_arg
= TREE_OPERAND (lhs
, 1);
5613 rl_arg
= TREE_OPERAND (rhs
, 0);
5614 rr_arg
= TREE_OPERAND (rhs
, 1);
5616 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5617 if (simple_operand_p (ll_arg
)
5618 && simple_operand_p (lr_arg
))
5620 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5621 && operand_equal_p (lr_arg
, rr_arg
, 0))
5623 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5624 truth_type
, ll_arg
, lr_arg
);
5628 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5629 && operand_equal_p (lr_arg
, rl_arg
, 0))
5631 result
= combine_comparisons (loc
, code
, lcode
,
5632 swap_tree_comparison (rcode
),
5633 truth_type
, ll_arg
, lr_arg
);
5639 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5640 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5642 /* If the RHS can be evaluated unconditionally and its operands are
5643 simple, it wins to evaluate the RHS unconditionally on machines
5644 with expensive branches. In this case, this isn't a comparison
5645 that can be merged. */
5647 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5649 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5650 && simple_operand_p (rl_arg
)
5651 && simple_operand_p (rr_arg
))
5653 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5654 if (code
== TRUTH_OR_EXPR
5655 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5656 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5657 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5658 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5659 return build2_loc (loc
, NE_EXPR
, truth_type
,
5660 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5662 build_int_cst (TREE_TYPE (ll_arg
), 0));
5664 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5665 if (code
== TRUTH_AND_EXPR
5666 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5667 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5668 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5669 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5670 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5671 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5673 build_int_cst (TREE_TYPE (ll_arg
), 0));
5676 /* See if the comparisons can be merged. Then get all the parameters for
5679 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5680 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5683 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
5685 ll_inner
= decode_field_reference (loc
, &ll_arg
,
5686 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5687 &ll_unsignedp
, &ll_reversep
, &volatilep
,
5688 &ll_mask
, &ll_and_mask
);
5689 lr_inner
= decode_field_reference (loc
, &lr_arg
,
5690 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5691 &lr_unsignedp
, &lr_reversep
, &volatilep
,
5692 &lr_mask
, &lr_and_mask
);
5693 rl_inner
= decode_field_reference (loc
, &rl_arg
,
5694 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5695 &rl_unsignedp
, &rl_reversep
, &volatilep
,
5696 &rl_mask
, &rl_and_mask
);
5697 rr_inner
= decode_field_reference (loc
, &rr_arg
,
5698 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5699 &rr_unsignedp
, &rr_reversep
, &volatilep
,
5700 &rr_mask
, &rr_and_mask
);
5702 /* It must be true that the inner operation on the lhs of each
5703 comparison must be the same if we are to be able to do anything.
5704 Then see if we have constants. If not, the same must be true for
5707 || ll_reversep
!= rl_reversep
5708 || ll_inner
== 0 || rl_inner
== 0
5709 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5712 if (TREE_CODE (lr_arg
) == INTEGER_CST
5713 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5715 l_const
= lr_arg
, r_const
= rr_arg
;
5716 lr_reversep
= ll_reversep
;
5718 else if (lr_reversep
!= rr_reversep
5719 || lr_inner
== 0 || rr_inner
== 0
5720 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5723 l_const
= r_const
= 0;
5725 /* If either comparison code is not correct for our logical operation,
5726 fail. However, we can convert a one-bit comparison against zero into
5727 the opposite comparison against that bit being set in the field. */
5729 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5730 if (lcode
!= wanted_code
)
5732 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5734 /* Make the left operand unsigned, since we are only interested
5735 in the value of one bit. Otherwise we are doing the wrong
5744 /* This is analogous to the code for l_const above. */
5745 if (rcode
!= wanted_code
)
5747 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5756 /* See if we can find a mode that contains both fields being compared on
5757 the left. If we can't, fail. Otherwise, update all constants and masks
5758 to be relative to a field of that size. */
5759 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5760 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5761 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5762 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5764 if (lnmode
== VOIDmode
)
5767 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5768 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5769 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5770 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5772 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5774 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5775 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5778 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5779 size_int (xll_bitpos
));
5780 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5781 size_int (xrl_bitpos
));
5785 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5786 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5787 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5788 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5789 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5792 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5794 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5799 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5800 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5801 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5802 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5803 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5806 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5808 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5812 /* If the right sides are not constant, do the same for it. Also,
5813 disallow this optimization if a size or signedness mismatch occurs
5814 between the left and right sides. */
5817 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5818 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5819 /* Make sure the two fields on the right
5820 correspond to the left without being swapped. */
5821 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5824 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5825 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5826 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5827 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5829 if (rnmode
== VOIDmode
)
5832 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5833 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5834 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5835 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5837 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5839 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5840 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5843 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5845 size_int (xlr_bitpos
));
5846 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5848 size_int (xrr_bitpos
));
5850 /* Make a mask that corresponds to both fields being compared.
5851 Do this for both items being compared. If the operands are the
5852 same size and the bits being compared are in the same position
5853 then we can do this by masking both and comparing the masked
5855 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5856 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5857 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5859 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
5860 lntype
, lnbitsize
, lnbitpos
,
5861 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5862 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5863 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5865 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
5866 rntype
, rnbitsize
, rnbitpos
,
5867 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
5868 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5869 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5871 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5874 /* There is still another way we can do something: If both pairs of
5875 fields being compared are adjacent, we may be able to make a wider
5876 field containing them both.
5878 Note that we still must mask the lhs/rhs expressions. Furthermore,
5879 the mask must be shifted to account for the shift done by
5880 make_bit_field_ref. */
5881 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5882 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5883 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5884 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5888 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
5889 ll_bitsize
+ rl_bitsize
,
5890 MIN (ll_bitpos
, rl_bitpos
),
5891 ll_unsignedp
, ll_reversep
);
5892 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
5893 lr_bitsize
+ rr_bitsize
,
5894 MIN (lr_bitpos
, rr_bitpos
),
5895 lr_unsignedp
, lr_reversep
);
5897 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5898 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5899 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5900 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5902 /* Convert to the smaller type before masking out unwanted bits. */
5904 if (lntype
!= rntype
)
5906 if (lnbitsize
> rnbitsize
)
5908 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5909 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5912 else if (lnbitsize
< rnbitsize
)
5914 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5915 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5920 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5921 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5923 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5924 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5926 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5932 /* Handle the case of comparisons with constants. If there is something in
5933 common between the masks, those bits of the constants must be the same.
5934 If not, the condition is always false. Test for this to avoid generating
5935 incorrect code below. */
5936 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5937 if (! integer_zerop (result
)
5938 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5939 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5941 if (wanted_code
== NE_EXPR
)
5943 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5944 return constant_boolean_node (true, truth_type
);
5948 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5949 return constant_boolean_node (false, truth_type
);
5953 /* Construct the expression we will return. First get the component
5954 reference we will make. Unless the mask is all ones the width of
5955 that field, perform the mask operation. Then compare with the
5957 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
5958 lntype
, lnbitsize
, lnbitpos
,
5959 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5961 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5962 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5963 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5965 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5966 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5969 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5973 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5977 enum tree_code op_code
;
5980 int consts_equal
, consts_lt
;
5983 STRIP_SIGN_NOPS (arg0
);
5985 op_code
= TREE_CODE (arg0
);
5986 minmax_const
= TREE_OPERAND (arg0
, 1);
5987 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5988 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5989 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5990 inner
= TREE_OPERAND (arg0
, 0);
5992 /* If something does not permit us to optimize, return the original tree. */
5993 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5994 || TREE_CODE (comp_const
) != INTEGER_CST
5995 || TREE_OVERFLOW (comp_const
)
5996 || TREE_CODE (minmax_const
) != INTEGER_CST
5997 || TREE_OVERFLOW (minmax_const
))
6000 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6001 and GT_EXPR, doing the rest with recursive calls using logical
6005 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
6008 = optimize_minmax_comparison (loc
,
6009 invert_tree_comparison (code
, false),
6012 return invert_truthvalue_loc (loc
, tem
);
6018 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
6019 optimize_minmax_comparison
6020 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
6021 optimize_minmax_comparison
6022 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
6025 if (op_code
== MAX_EXPR
&& consts_equal
)
6026 /* MAX (X, 0) == 0 -> X <= 0 */
6027 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
6029 else if (op_code
== MAX_EXPR
&& consts_lt
)
6030 /* MAX (X, 0) == 5 -> X == 5 */
6031 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
6033 else if (op_code
== MAX_EXPR
)
6034 /* MAX (X, 0) == -1 -> false */
6035 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6037 else if (consts_equal
)
6038 /* MIN (X, 0) == 0 -> X >= 0 */
6039 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
6042 /* MIN (X, 0) == 5 -> false */
6043 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6046 /* MIN (X, 0) == -1 -> X == -1 */
6047 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
6050 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
6051 /* MAX (X, 0) > 0 -> X > 0
6052 MAX (X, 0) > 5 -> X > 5 */
6053 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
6055 else if (op_code
== MAX_EXPR
)
6056 /* MAX (X, 0) > -1 -> true */
6057 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
6059 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
6060 /* MIN (X, 0) > 0 -> false
6061 MIN (X, 0) > 5 -> false */
6062 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6065 /* MIN (X, 0) > -1 -> X > -1 */
6066 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
6073 /* T is an integer expression that is being multiplied, divided, or taken a
6074 modulus (CODE says which and what kind of divide or modulus) by a
6075 constant C. See if we can eliminate that operation by folding it with
6076 other operations already in T. WIDE_TYPE, if non-null, is a type that
6077 should be used for the computation if wider than our type.
6079 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6080 (X * 2) + (Y * 4). We must, however, be assured that either the original
6081 expression would not overflow or that overflow is undefined for the type
6082 in the language in question.
6084 If we return a non-null expression, it is an equivalent form of the
6085 original computation, but need not be in the original type.
6087 We set *STRICT_OVERFLOW_P to true if the return values depends on
6088 signed overflow being undefined. Otherwise we do not change
6089 *STRICT_OVERFLOW_P. */
6092 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6093 bool *strict_overflow_p
)
6095 /* To avoid exponential search depth, refuse to allow recursion past
6096 three levels. Beyond that (1) it's highly unlikely that we'll find
6097 something interesting and (2) we've probably processed it before
6098 when we built the inner expression. */
6107 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6114 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6115 bool *strict_overflow_p
)
6117 tree type
= TREE_TYPE (t
);
6118 enum tree_code tcode
= TREE_CODE (t
);
6119 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6120 > GET_MODE_SIZE (TYPE_MODE (type
)))
6121 ? wide_type
: type
);
6123 int same_p
= tcode
== code
;
6124 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6125 bool sub_strict_overflow_p
;
6127 /* Don't deal with constants of zero here; they confuse the code below. */
6128 if (integer_zerop (c
))
6131 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6132 op0
= TREE_OPERAND (t
, 0);
6134 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6135 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6137 /* Note that we need not handle conditional operations here since fold
6138 already handles those cases. So just do arithmetic here. */
6142 /* For a constant, we can always simplify if we are a multiply
6143 or (for divide and modulus) if it is a multiple of our constant. */
6144 if (code
== MULT_EXPR
6145 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
6147 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6148 fold_convert (ctype
, c
));
6149 /* If the multiplication overflowed, we lost information on it.
6150 See PR68142 and PR69845. */
6151 if (TREE_OVERFLOW (tem
))
6157 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6158 /* If op0 is an expression ... */
6159 if ((COMPARISON_CLASS_P (op0
)
6160 || UNARY_CLASS_P (op0
)
6161 || BINARY_CLASS_P (op0
)
6162 || VL_EXP_CLASS_P (op0
)
6163 || EXPRESSION_CLASS_P (op0
))
6164 /* ... and has wrapping overflow, and its type is smaller
6165 than ctype, then we cannot pass through as widening. */
6166 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6167 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6168 && (TYPE_PRECISION (ctype
)
6169 > TYPE_PRECISION (TREE_TYPE (op0
))))
6170 /* ... or this is a truncation (t is narrower than op0),
6171 then we cannot pass through this narrowing. */
6172 || (TYPE_PRECISION (type
)
6173 < TYPE_PRECISION (TREE_TYPE (op0
)))
6174 /* ... or signedness changes for division or modulus,
6175 then we cannot pass through this conversion. */
6176 || (code
!= MULT_EXPR
6177 && (TYPE_UNSIGNED (ctype
)
6178 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6179 /* ... or has undefined overflow while the converted to
6180 type has not, we cannot do the operation in the inner type
6181 as that would introduce undefined overflow. */
6182 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6183 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6184 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6187 /* Pass the constant down and see if we can make a simplification. If
6188 we can, replace this expression with the inner simplification for
6189 possible later conversion to our or some other type. */
6190 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6191 && TREE_CODE (t2
) == INTEGER_CST
6192 && !TREE_OVERFLOW (t2
)
6193 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6195 ? ctype
: NULL_TREE
,
6196 strict_overflow_p
))))
6201 /* If widening the type changes it from signed to unsigned, then we
6202 must avoid building ABS_EXPR itself as unsigned. */
6203 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6205 tree cstype
= (*signed_type_for
) (ctype
);
6206 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6209 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6210 return fold_convert (ctype
, t1
);
6214 /* If the constant is negative, we cannot simplify this. */
6215 if (tree_int_cst_sgn (c
) == -1)
6219 /* For division and modulus, type can't be unsigned, as e.g.
6220 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6221 For signed types, even with wrapping overflow, this is fine. */
6222 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6224 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6226 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6229 case MIN_EXPR
: case MAX_EXPR
:
6230 /* If widening the type changes the signedness, then we can't perform
6231 this optimization as that changes the result. */
6232 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6235 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6236 sub_strict_overflow_p
= false;
6237 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6238 &sub_strict_overflow_p
)) != 0
6239 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6240 &sub_strict_overflow_p
)) != 0)
6242 if (tree_int_cst_sgn (c
) < 0)
6243 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6244 if (sub_strict_overflow_p
)
6245 *strict_overflow_p
= true;
6246 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6247 fold_convert (ctype
, t2
));
6251 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6252 /* If the second operand is constant, this is a multiplication
6253 or floor division, by a power of two, so we can treat it that
6254 way unless the multiplier or divisor overflows. Signed
6255 left-shift overflow is implementation-defined rather than
6256 undefined in C90, so do not convert signed left shift into
6258 if (TREE_CODE (op1
) == INTEGER_CST
6259 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6260 /* const_binop may not detect overflow correctly,
6261 so check for it explicitly here. */
6262 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6263 && 0 != (t1
= fold_convert (ctype
,
6264 const_binop (LSHIFT_EXPR
,
6267 && !TREE_OVERFLOW (t1
))
6268 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6269 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6271 fold_convert (ctype
, op0
),
6273 c
, code
, wide_type
, strict_overflow_p
);
6276 case PLUS_EXPR
: case MINUS_EXPR
:
6277 /* See if we can eliminate the operation on both sides. If we can, we
6278 can return a new PLUS or MINUS. If we can't, the only remaining
6279 cases where we can do anything are if the second operand is a
6281 sub_strict_overflow_p
= false;
6282 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6283 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6284 if (t1
!= 0 && t2
!= 0
6285 && (code
== MULT_EXPR
6286 /* If not multiplication, we can only do this if both operands
6287 are divisible by c. */
6288 || (multiple_of_p (ctype
, op0
, c
)
6289 && multiple_of_p (ctype
, op1
, c
))))
6291 if (sub_strict_overflow_p
)
6292 *strict_overflow_p
= true;
6293 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6294 fold_convert (ctype
, t2
));
6297 /* If this was a subtraction, negate OP1 and set it to be an addition.
6298 This simplifies the logic below. */
6299 if (tcode
== MINUS_EXPR
)
6301 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6302 /* If OP1 was not easily negatable, the constant may be OP0. */
6303 if (TREE_CODE (op0
) == INTEGER_CST
)
6305 std::swap (op0
, op1
);
6310 if (TREE_CODE (op1
) != INTEGER_CST
)
6313 /* If either OP1 or C are negative, this optimization is not safe for
6314 some of the division and remainder types while for others we need
6315 to change the code. */
6316 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6318 if (code
== CEIL_DIV_EXPR
)
6319 code
= FLOOR_DIV_EXPR
;
6320 else if (code
== FLOOR_DIV_EXPR
)
6321 code
= CEIL_DIV_EXPR
;
6322 else if (code
!= MULT_EXPR
6323 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6327 /* If it's a multiply or a division/modulus operation of a multiple
6328 of our constant, do the operation and verify it doesn't overflow. */
6329 if (code
== MULT_EXPR
6330 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6332 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6333 fold_convert (ctype
, c
));
6334 /* We allow the constant to overflow with wrapping semantics. */
6336 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6342 /* If we have an unsigned type, we cannot widen the operation since it
6343 will change the result if the original computation overflowed. */
6344 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6347 /* If we were able to eliminate our operation from the first side,
6348 apply our operation to the second side and reform the PLUS. */
6349 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6350 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6352 /* The last case is if we are a multiply. In that case, we can
6353 apply the distributive law to commute the multiply and addition
6354 if the multiplication of the constants doesn't overflow
6355 and overflow is defined. With undefined overflow
6356 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6357 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6358 return fold_build2 (tcode
, ctype
,
6359 fold_build2 (code
, ctype
,
6360 fold_convert (ctype
, op0
),
6361 fold_convert (ctype
, c
)),
6367 /* We have a special case here if we are doing something like
6368 (C * 8) % 4 since we know that's zero. */
6369 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6370 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6371 /* If the multiplication can overflow we cannot optimize this. */
6372 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6373 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6374 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6376 *strict_overflow_p
= true;
6377 return omit_one_operand (type
, integer_zero_node
, op0
);
6380 /* ... fall through ... */
6382 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6383 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6384 /* If we can extract our operation from the LHS, do so and return a
6385 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6386 do something only if the second operand is a constant. */
6388 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6389 strict_overflow_p
)) != 0)
6390 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6391 fold_convert (ctype
, op1
));
6392 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6393 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6394 strict_overflow_p
)) != 0)
6395 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6396 fold_convert (ctype
, t1
));
6397 else if (TREE_CODE (op1
) != INTEGER_CST
)
6400 /* If these are the same operation types, we can associate them
6401 assuming no overflow. */
6404 bool overflow_p
= false;
6405 bool overflow_mul_p
;
6406 signop sign
= TYPE_SIGN (ctype
);
6407 unsigned prec
= TYPE_PRECISION (ctype
);
6408 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
6409 wi::to_wide (c
, prec
),
6410 sign
, &overflow_mul_p
);
6411 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6413 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6416 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6417 wide_int_to_tree (ctype
, mul
));
6420 /* If these operations "cancel" each other, we have the main
6421 optimizations of this pass, which occur when either constant is a
6422 multiple of the other, in which case we replace this with either an
6423 operation or CODE or TCODE.
6425 If we have an unsigned type, we cannot do this since it will change
6426 the result if the original computation overflowed. */
6427 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6428 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6429 || (tcode
== MULT_EXPR
6430 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6431 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6432 && code
!= MULT_EXPR
)))
6434 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6436 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6437 *strict_overflow_p
= true;
6438 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6439 fold_convert (ctype
,
6440 const_binop (TRUNC_DIV_EXPR
,
6443 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6445 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6446 *strict_overflow_p
= true;
6447 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6448 fold_convert (ctype
,
6449 const_binop (TRUNC_DIV_EXPR
,
6462 /* Return a node which has the indicated constant VALUE (either 0 or
6463 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6464 and is of the indicated TYPE. */
6467 constant_boolean_node (bool value
, tree type
)
6469 if (type
== integer_type_node
)
6470 return value
? integer_one_node
: integer_zero_node
;
6471 else if (type
== boolean_type_node
)
6472 return value
? boolean_true_node
: boolean_false_node
;
6473 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6474 return build_vector_from_val (type
,
6475 build_int_cst (TREE_TYPE (type
),
6478 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6482 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6483 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6484 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6485 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6486 COND is the first argument to CODE; otherwise (as in the example
6487 given here), it is the second argument. TYPE is the type of the
6488 original expression. Return NULL_TREE if no simplification is
6492 fold_binary_op_with_conditional_arg (location_t loc
,
6493 enum tree_code code
,
6494 tree type
, tree op0
, tree op1
,
6495 tree cond
, tree arg
, int cond_first_p
)
6497 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6498 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6499 tree test
, true_value
, false_value
;
6500 tree lhs
= NULL_TREE
;
6501 tree rhs
= NULL_TREE
;
6502 enum tree_code cond_code
= COND_EXPR
;
6504 if (TREE_CODE (cond
) == COND_EXPR
6505 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6507 test
= TREE_OPERAND (cond
, 0);
6508 true_value
= TREE_OPERAND (cond
, 1);
6509 false_value
= TREE_OPERAND (cond
, 2);
6510 /* If this operand throws an expression, then it does not make
6511 sense to try to perform a logical or arithmetic operation
6513 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6515 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6518 else if (!(TREE_CODE (type
) != VECTOR_TYPE
6519 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
6521 tree testtype
= TREE_TYPE (cond
);
6523 true_value
= constant_boolean_node (true, testtype
);
6524 false_value
= constant_boolean_node (false, testtype
);
6527 /* Detect the case of mixing vector and scalar types - bail out. */
6530 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6531 cond_code
= VEC_COND_EXPR
;
6533 /* This transformation is only worthwhile if we don't have to wrap ARG
6534 in a SAVE_EXPR and the operation can be simplified without recursing
6535 on at least one of the branches once its pushed inside the COND_EXPR. */
6536 if (!TREE_CONSTANT (arg
)
6537 && (TREE_SIDE_EFFECTS (arg
)
6538 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6539 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6542 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6545 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6547 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6549 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6553 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6555 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6557 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6560 /* Check that we have simplified at least one of the branches. */
6561 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6564 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6568 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6570 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6571 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6572 ADDEND is the same as X.
6574 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6575 and finite. The problematic cases are when X is zero, and its mode
6576 has signed zeros. In the case of rounding towards -infinity,
6577 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6578 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6581 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6583 if (!real_zerop (addend
))
6586 /* Don't allow the fold with -fsignaling-nans. */
6587 if (HONOR_SNANS (element_mode (type
)))
6590 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6591 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6594 /* In a vector or complex, we would need to check the sign of all zeros. */
6595 if (TREE_CODE (addend
) != REAL_CST
)
6598 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6599 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6602 /* The mode has signed zeros, and we have to honor their sign.
6603 In this situation, there is only one case we can return true for.
6604 X - 0 is the same as X unless rounding towards -infinity is
6606 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6609 /* Subroutine of fold() that optimizes comparisons of a division by
6610 a nonzero integer constant against an integer constant, i.e.
6613 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6614 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6615 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6617 The function returns the constant folded tree if a simplification
6618 can be made, and NULL_TREE otherwise. */
6621 fold_div_compare (location_t loc
,
6622 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6624 tree prod
, tmp
, hi
, lo
;
6625 tree arg00
= TREE_OPERAND (arg0
, 0);
6626 tree arg01
= TREE_OPERAND (arg0
, 1);
6627 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6628 bool neg_overflow
= false;
6631 /* We have to do this the hard way to detect unsigned overflow.
6632 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6633 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6634 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6635 neg_overflow
= false;
6637 if (sign
== UNSIGNED
)
6639 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6640 build_int_cst (TREE_TYPE (arg01
), 1));
6643 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6644 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6645 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6646 -1, overflow
| TREE_OVERFLOW (prod
));
6648 else if (tree_int_cst_sgn (arg01
) >= 0)
6650 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6651 build_int_cst (TREE_TYPE (arg01
), 1));
6652 switch (tree_int_cst_sgn (arg1
))
6655 neg_overflow
= true;
6656 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6661 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6666 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6676 /* A negative divisor reverses the relational operators. */
6677 code
= swap_tree_comparison (code
);
6679 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6680 build_int_cst (TREE_TYPE (arg01
), 1));
6681 switch (tree_int_cst_sgn (arg1
))
6684 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6689 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6694 neg_overflow
= true;
6695 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6707 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6708 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6709 if (TREE_OVERFLOW (hi
))
6710 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6711 if (TREE_OVERFLOW (lo
))
6712 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6713 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6716 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6717 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6718 if (TREE_OVERFLOW (hi
))
6719 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6720 if (TREE_OVERFLOW (lo
))
6721 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6722 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6725 if (TREE_OVERFLOW (lo
))
6727 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6728 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6730 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6733 if (TREE_OVERFLOW (hi
))
6735 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6736 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6738 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6741 if (TREE_OVERFLOW (hi
))
6743 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6744 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6746 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6749 if (TREE_OVERFLOW (lo
))
6751 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6752 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6754 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6764 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6765 equality/inequality test, then return a simplified form of the test
6766 using a sign testing. Otherwise return NULL. TYPE is the desired
6770 fold_single_bit_test_into_sign_test (location_t loc
,
6771 enum tree_code code
, tree arg0
, tree arg1
,
6774 /* If this is testing a single bit, we can optimize the test. */
6775 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6776 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6777 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6779 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6780 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6781 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6783 if (arg00
!= NULL_TREE
6784 /* This is only a win if casting to a signed type is cheap,
6785 i.e. when arg00's type is not a partial mode. */
6786 && TYPE_PRECISION (TREE_TYPE (arg00
))
6787 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6789 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6790 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6792 fold_convert_loc (loc
, stype
, arg00
),
6793 build_int_cst (stype
, 0));
6800 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6801 equality/inequality test, then return a simplified form of
6802 the test using shifts and logical operations. Otherwise return
6803 NULL. TYPE is the desired result type. */
6806 fold_single_bit_test (location_t loc
, enum tree_code code
,
6807 tree arg0
, tree arg1
, tree result_type
)
6809 /* If this is testing a single bit, we can optimize the test. */
6810 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6811 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6812 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6814 tree inner
= TREE_OPERAND (arg0
, 0);
6815 tree type
= TREE_TYPE (arg0
);
6816 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6817 machine_mode operand_mode
= TYPE_MODE (type
);
6819 tree signed_type
, unsigned_type
, intermediate_type
;
6822 /* First, see if we can fold the single bit test into a sign-bit
6824 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6829 /* Otherwise we have (A & C) != 0 where C is a single bit,
6830 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6831 Similarly for (A & C) == 0. */
6833 /* If INNER is a right shift of a constant and it plus BITNUM does
6834 not overflow, adjust BITNUM and INNER. */
6835 if (TREE_CODE (inner
) == RSHIFT_EXPR
6836 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6837 && bitnum
< TYPE_PRECISION (type
)
6838 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6839 TYPE_PRECISION (type
) - bitnum
))
6841 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6842 inner
= TREE_OPERAND (inner
, 0);
6845 /* If we are going to be able to omit the AND below, we must do our
6846 operations as unsigned. If we must use the AND, we have a choice.
6847 Normally unsigned is faster, but for some machines signed is. */
6848 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6849 && !flag_syntax_only
) ? 0 : 1;
6851 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6852 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6853 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6854 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6857 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6858 inner
, size_int (bitnum
));
6860 one
= build_int_cst (intermediate_type
, 1);
6862 if (code
== EQ_EXPR
)
6863 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6865 /* Put the AND last so it can combine with more things. */
6866 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6868 /* Make sure to return the proper type. */
6869 inner
= fold_convert_loc (loc
, result_type
, inner
);
6876 /* Check whether we are allowed to reorder operands arg0 and arg1,
6877 such that the evaluation of arg1 occurs before arg0. */
6880 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6882 if (! flag_evaluation_order
)
6884 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6886 return ! TREE_SIDE_EFFECTS (arg0
)
6887 && ! TREE_SIDE_EFFECTS (arg1
);
6890 /* Test whether it is preferable two swap two operands, ARG0 and
6891 ARG1, for example because ARG0 is an integer constant and ARG1
6892 isn't. If REORDER is true, only recommend swapping if we can
6893 evaluate the operands in reverse order. */
6896 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6898 if (CONSTANT_CLASS_P (arg1
))
6900 if (CONSTANT_CLASS_P (arg0
))
6906 if (TREE_CONSTANT (arg1
))
6908 if (TREE_CONSTANT (arg0
))
6911 if (reorder
&& flag_evaluation_order
6912 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6915 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6916 for commutative and comparison operators. Ensuring a canonical
6917 form allows the optimizers to find additional redundancies without
6918 having to explicitly check for both orderings. */
6919 if (TREE_CODE (arg0
) == SSA_NAME
6920 && TREE_CODE (arg1
) == SSA_NAME
6921 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6924 /* Put SSA_NAMEs last. */
6925 if (TREE_CODE (arg1
) == SSA_NAME
)
6927 if (TREE_CODE (arg0
) == SSA_NAME
)
6930 /* Put variables last. */
6940 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6941 means A >= Y && A != MAX, but in this case we know that
6942 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6945 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6947 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6949 if (TREE_CODE (bound
) == LT_EXPR
)
6950 a
= TREE_OPERAND (bound
, 0);
6951 else if (TREE_CODE (bound
) == GT_EXPR
)
6952 a
= TREE_OPERAND (bound
, 1);
6956 typea
= TREE_TYPE (a
);
6957 if (!INTEGRAL_TYPE_P (typea
)
6958 && !POINTER_TYPE_P (typea
))
6961 if (TREE_CODE (ineq
) == LT_EXPR
)
6963 a1
= TREE_OPERAND (ineq
, 1);
6964 y
= TREE_OPERAND (ineq
, 0);
6966 else if (TREE_CODE (ineq
) == GT_EXPR
)
6968 a1
= TREE_OPERAND (ineq
, 0);
6969 y
= TREE_OPERAND (ineq
, 1);
6974 if (TREE_TYPE (a1
) != typea
)
6977 if (POINTER_TYPE_P (typea
))
6979 /* Convert the pointer types into integer before taking the difference. */
6980 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6981 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6982 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6985 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6987 if (!diff
|| !integer_onep (diff
))
6990 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6993 /* Fold a sum or difference of at least one multiplication.
6994 Returns the folded tree or NULL if no simplification could be made. */
6997 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6998 tree arg0
, tree arg1
)
7000 tree arg00
, arg01
, arg10
, arg11
;
7001 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7003 /* (A * C) +- (B * C) -> (A+-B) * C.
7004 (A * C) +- A -> A * (C+-1).
7005 We are most concerned about the case where C is a constant,
7006 but other combinations show up during loop reduction. Since
7007 it is not difficult, try all four possibilities. */
7009 if (TREE_CODE (arg0
) == MULT_EXPR
)
7011 arg00
= TREE_OPERAND (arg0
, 0);
7012 arg01
= TREE_OPERAND (arg0
, 1);
7014 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7016 arg00
= build_one_cst (type
);
7021 /* We cannot generate constant 1 for fract. */
7022 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7025 arg01
= build_one_cst (type
);
7027 if (TREE_CODE (arg1
) == MULT_EXPR
)
7029 arg10
= TREE_OPERAND (arg1
, 0);
7030 arg11
= TREE_OPERAND (arg1
, 1);
7032 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7034 arg10
= build_one_cst (type
);
7035 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7036 the purpose of this canonicalization. */
7037 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7038 && negate_expr_p (arg1
)
7039 && code
== PLUS_EXPR
)
7041 arg11
= negate_expr (arg1
);
7049 /* We cannot generate constant 1 for fract. */
7050 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7053 arg11
= build_one_cst (type
);
7057 if (operand_equal_p (arg01
, arg11
, 0))
7058 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7059 else if (operand_equal_p (arg00
, arg10
, 0))
7060 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7061 else if (operand_equal_p (arg00
, arg11
, 0))
7062 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7063 else if (operand_equal_p (arg01
, arg10
, 0))
7064 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7066 /* No identical multiplicands; see if we can find a common
7067 power-of-two factor in non-power-of-two multiplies. This
7068 can help in multi-dimensional array access. */
7069 else if (tree_fits_shwi_p (arg01
)
7070 && tree_fits_shwi_p (arg11
))
7072 HOST_WIDE_INT int01
, int11
, tmp
;
7075 int01
= tree_to_shwi (arg01
);
7076 int11
= tree_to_shwi (arg11
);
7078 /* Move min of absolute values to int11. */
7079 if (absu_hwi (int01
) < absu_hwi (int11
))
7081 tmp
= int01
, int01
= int11
, int11
= tmp
;
7082 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7089 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7090 /* The remainder should not be a constant, otherwise we
7091 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7092 increased the number of multiplications necessary. */
7093 && TREE_CODE (arg10
) != INTEGER_CST
)
7095 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7096 build_int_cst (TREE_TYPE (arg00
),
7101 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7106 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7107 fold_build2_loc (loc
, code
, type
,
7108 fold_convert_loc (loc
, type
, alt0
),
7109 fold_convert_loc (loc
, type
, alt1
)),
7110 fold_convert_loc (loc
, type
, same
));
7115 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7116 specified by EXPR into the buffer PTR of length LEN bytes.
7117 Return the number of bytes placed in the buffer, or zero
7121 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7123 tree type
= TREE_TYPE (expr
);
7124 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7125 int byte
, offset
, word
, words
;
7126 unsigned char value
;
7128 if ((off
== -1 && total_bytes
> len
)
7129 || off
>= total_bytes
)
7133 words
= total_bytes
/ UNITS_PER_WORD
;
7135 for (byte
= 0; byte
< total_bytes
; byte
++)
7137 int bitpos
= byte
* BITS_PER_UNIT
;
7138 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7140 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7142 if (total_bytes
> UNITS_PER_WORD
)
7144 word
= byte
/ UNITS_PER_WORD
;
7145 if (WORDS_BIG_ENDIAN
)
7146 word
= (words
- 1) - word
;
7147 offset
= word
* UNITS_PER_WORD
;
7148 if (BYTES_BIG_ENDIAN
)
7149 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7151 offset
+= byte
% UNITS_PER_WORD
;
7154 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7156 && offset
- off
< len
)
7157 ptr
[offset
- off
] = value
;
7159 return MIN (len
, total_bytes
- off
);
7163 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7164 specified by EXPR into the buffer PTR of length LEN bytes.
7165 Return the number of bytes placed in the buffer, or zero
7169 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7171 tree type
= TREE_TYPE (expr
);
7172 machine_mode mode
= TYPE_MODE (type
);
7173 int total_bytes
= GET_MODE_SIZE (mode
);
7174 FIXED_VALUE_TYPE value
;
7175 tree i_value
, i_type
;
7177 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7180 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7182 if (NULL_TREE
== i_type
7183 || TYPE_PRECISION (i_type
) != total_bytes
)
7186 value
= TREE_FIXED_CST (expr
);
7187 i_value
= double_int_to_tree (i_type
, value
.data
);
7189 return native_encode_int (i_value
, ptr
, len
, off
);
7193 /* Subroutine of native_encode_expr. Encode the REAL_CST
7194 specified by EXPR into the buffer PTR of length LEN bytes.
7195 Return the number of bytes placed in the buffer, or zero
7199 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7201 tree type
= TREE_TYPE (expr
);
7202 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7203 int byte
, offset
, word
, words
, bitpos
;
7204 unsigned char value
;
7206 /* There are always 32 bits in each long, no matter the size of
7207 the hosts long. We handle floating point representations with
7211 if ((off
== -1 && total_bytes
> len
)
7212 || off
>= total_bytes
)
7216 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7218 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7220 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7221 bitpos
+= BITS_PER_UNIT
)
7223 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7224 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7226 if (UNITS_PER_WORD
< 4)
7228 word
= byte
/ UNITS_PER_WORD
;
7229 if (WORDS_BIG_ENDIAN
)
7230 word
= (words
- 1) - word
;
7231 offset
= word
* UNITS_PER_WORD
;
7232 if (BYTES_BIG_ENDIAN
)
7233 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7235 offset
+= byte
% UNITS_PER_WORD
;
7238 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7239 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7241 && offset
- off
< len
)
7242 ptr
[offset
- off
] = value
;
7244 return MIN (len
, total_bytes
- off
);
7247 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7248 specified by EXPR into the buffer PTR of length LEN bytes.
7249 Return the number of bytes placed in the buffer, or zero
7253 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7258 part
= TREE_REALPART (expr
);
7259 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7263 part
= TREE_IMAGPART (expr
);
7265 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7266 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7270 return rsize
+ isize
;
7274 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7275 specified by EXPR into the buffer PTR of length LEN bytes.
7276 Return the number of bytes placed in the buffer, or zero
7280 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7287 count
= VECTOR_CST_NELTS (expr
);
7288 itype
= TREE_TYPE (TREE_TYPE (expr
));
7289 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7290 for (i
= 0; i
< count
; i
++)
7297 elem
= VECTOR_CST_ELT (expr
, i
);
7298 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7299 if ((off
== -1 && res
!= size
)
7312 /* Subroutine of native_encode_expr. Encode the STRING_CST
7313 specified by EXPR into the buffer PTR of length LEN bytes.
7314 Return the number of bytes placed in the buffer, or zero
7318 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7320 tree type
= TREE_TYPE (expr
);
7321 HOST_WIDE_INT total_bytes
;
7323 if (TREE_CODE (type
) != ARRAY_TYPE
7324 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7325 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7326 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7328 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7329 if ((off
== -1 && total_bytes
> len
)
7330 || off
>= total_bytes
)
7334 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7337 if (off
< TREE_STRING_LENGTH (expr
))
7339 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7340 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7342 memset (ptr
+ written
, 0,
7343 MIN (total_bytes
- written
, len
- written
));
7346 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7347 return MIN (total_bytes
- off
, len
);
7351 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7352 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7353 buffer PTR of length LEN bytes. If OFF is not -1 then start
7354 the encoding at byte offset OFF and encode at most LEN bytes.
7355 Return the number of bytes placed in the buffer, or zero upon failure. */
7358 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7360 /* We don't support starting at negative offset and -1 is special. */
7364 switch (TREE_CODE (expr
))
7367 return native_encode_int (expr
, ptr
, len
, off
);
7370 return native_encode_real (expr
, ptr
, len
, off
);
7373 return native_encode_fixed (expr
, ptr
, len
, off
);
7376 return native_encode_complex (expr
, ptr
, len
, off
);
7379 return native_encode_vector (expr
, ptr
, len
, off
);
7382 return native_encode_string (expr
, ptr
, len
, off
);
7390 /* Subroutine of native_interpret_expr. Interpret the contents of
7391 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7392 If the buffer cannot be interpreted, return NULL_TREE. */
7395 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7397 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7399 if (total_bytes
> len
7400 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7403 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7405 return wide_int_to_tree (type
, result
);
7409 /* Subroutine of native_interpret_expr. Interpret the contents of
7410 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7411 If the buffer cannot be interpreted, return NULL_TREE. */
7414 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7416 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7418 FIXED_VALUE_TYPE fixed_value
;
7420 if (total_bytes
> len
7421 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7424 result
= double_int::from_buffer (ptr
, total_bytes
);
7425 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7427 return build_fixed (type
, fixed_value
);
7431 /* Subroutine of native_interpret_expr. Interpret the contents of
7432 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7433 If the buffer cannot be interpreted, return NULL_TREE. */
7436 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7438 machine_mode mode
= TYPE_MODE (type
);
7439 int total_bytes
= GET_MODE_SIZE (mode
);
7440 unsigned char value
;
7441 /* There are always 32 bits in each long, no matter the size of
7442 the hosts long. We handle floating point representations with
7447 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7448 if (total_bytes
> len
|| total_bytes
> 24)
7450 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7452 memset (tmp
, 0, sizeof (tmp
));
7453 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7454 bitpos
+= BITS_PER_UNIT
)
7456 /* Both OFFSET and BYTE index within a long;
7457 bitpos indexes the whole float. */
7458 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7459 if (UNITS_PER_WORD
< 4)
7461 int word
= byte
/ UNITS_PER_WORD
;
7462 if (WORDS_BIG_ENDIAN
)
7463 word
= (words
- 1) - word
;
7464 offset
= word
* UNITS_PER_WORD
;
7465 if (BYTES_BIG_ENDIAN
)
7466 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7468 offset
+= byte
% UNITS_PER_WORD
;
7473 if (BYTES_BIG_ENDIAN
)
7475 /* Reverse bytes within each long, or within the entire float
7476 if it's smaller than a long (for HFmode). */
7477 offset
= MIN (3, total_bytes
- 1) - offset
;
7478 gcc_assert (offset
>= 0);
7481 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7483 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7486 real_from_target (&r
, tmp
, mode
);
7487 return build_real (type
, r
);
7491 /* Subroutine of native_interpret_expr. Interpret the contents of
7492 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7493 If the buffer cannot be interpreted, return NULL_TREE. */
7496 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7498 tree etype
, rpart
, ipart
;
7501 etype
= TREE_TYPE (type
);
7502 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7505 rpart
= native_interpret_expr (etype
, ptr
, size
);
7508 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7511 return build_complex (type
, rpart
, ipart
);
7515 /* Subroutine of native_interpret_expr. Interpret the contents of
7516 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7517 If the buffer cannot be interpreted, return NULL_TREE. */
7520 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7526 etype
= TREE_TYPE (type
);
7527 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7528 count
= TYPE_VECTOR_SUBPARTS (type
);
7529 if (size
* count
> len
)
7532 elements
= XALLOCAVEC (tree
, count
);
7533 for (i
= count
- 1; i
>= 0; i
--)
7535 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7540 return build_vector (type
, elements
);
7544 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7545 the buffer PTR of length LEN as a constant of type TYPE. For
7546 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7547 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7548 return NULL_TREE. */
7551 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7553 switch (TREE_CODE (type
))
7559 case REFERENCE_TYPE
:
7560 return native_interpret_int (type
, ptr
, len
);
7563 return native_interpret_real (type
, ptr
, len
);
7565 case FIXED_POINT_TYPE
:
7566 return native_interpret_fixed (type
, ptr
, len
);
7569 return native_interpret_complex (type
, ptr
, len
);
7572 return native_interpret_vector (type
, ptr
, len
);
7579 /* Returns true if we can interpret the contents of a native encoding
7583 can_native_interpret_type_p (tree type
)
7585 switch (TREE_CODE (type
))
7591 case REFERENCE_TYPE
:
7592 case FIXED_POINT_TYPE
:
7602 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7603 TYPE at compile-time. If we're unable to perform the conversion
7604 return NULL_TREE. */
7607 fold_view_convert_expr (tree type
, tree expr
)
7609 /* We support up to 512-bit values (for V8DFmode). */
7610 unsigned char buffer
[64];
7613 /* Check that the host and target are sane. */
7614 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7617 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7621 return native_interpret_expr (type
, buffer
, len
);
7624 /* Build an expression for the address of T. Folds away INDIRECT_REF
7625 to avoid confusing the gimplify process. */
7628 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7630 /* The size of the object is not relevant when talking about its address. */
7631 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7632 t
= TREE_OPERAND (t
, 0);
7634 if (TREE_CODE (t
) == INDIRECT_REF
)
7636 t
= TREE_OPERAND (t
, 0);
7638 if (TREE_TYPE (t
) != ptrtype
)
7639 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7641 else if (TREE_CODE (t
) == MEM_REF
7642 && integer_zerop (TREE_OPERAND (t
, 1)))
7643 return TREE_OPERAND (t
, 0);
7644 else if (TREE_CODE (t
) == MEM_REF
7645 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7646 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7647 TREE_OPERAND (t
, 0),
7648 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7649 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7651 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7653 if (TREE_TYPE (t
) != ptrtype
)
7654 t
= fold_convert_loc (loc
, ptrtype
, t
);
7657 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7662 /* Build an expression for the address of T. */
7665 build_fold_addr_expr_loc (location_t loc
, tree t
)
7667 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7669 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7672 /* Fold a unary expression of code CODE and type TYPE with operand
7673 OP0. Return the folded expression if folding is successful.
7674 Otherwise, return NULL_TREE. */
7677 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7681 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7683 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7684 && TREE_CODE_LENGTH (code
) == 1);
7689 if (CONVERT_EXPR_CODE_P (code
)
7690 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7692 /* Don't use STRIP_NOPS, because signedness of argument type
7694 STRIP_SIGN_NOPS (arg0
);
7698 /* Strip any conversions that don't change the mode. This
7699 is safe for every expression, except for a comparison
7700 expression because its signedness is derived from its
7703 Note that this is done as an internal manipulation within
7704 the constant folder, in order to find the simplest
7705 representation of the arguments so that their form can be
7706 studied. In any cases, the appropriate type conversions
7707 should be put back in the tree that will get out of the
7712 if (CONSTANT_CLASS_P (arg0
))
7714 tree tem
= const_unop (code
, type
, arg0
);
7717 if (TREE_TYPE (tem
) != type
)
7718 tem
= fold_convert_loc (loc
, type
, tem
);
7724 tem
= generic_simplify (loc
, code
, type
, op0
);
7728 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7730 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7731 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7732 fold_build1_loc (loc
, code
, type
,
7733 fold_convert_loc (loc
, TREE_TYPE (op0
),
7734 TREE_OPERAND (arg0
, 1))));
7735 else if (TREE_CODE (arg0
) == COND_EXPR
)
7737 tree arg01
= TREE_OPERAND (arg0
, 1);
7738 tree arg02
= TREE_OPERAND (arg0
, 2);
7739 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7740 arg01
= fold_build1_loc (loc
, code
, type
,
7741 fold_convert_loc (loc
,
7742 TREE_TYPE (op0
), arg01
));
7743 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7744 arg02
= fold_build1_loc (loc
, code
, type
,
7745 fold_convert_loc (loc
,
7746 TREE_TYPE (op0
), arg02
));
7747 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7750 /* If this was a conversion, and all we did was to move into
7751 inside the COND_EXPR, bring it back out. But leave it if
7752 it is a conversion from integer to integer and the
7753 result precision is no wider than a word since such a
7754 conversion is cheap and may be optimized away by combine,
7755 while it couldn't if it were outside the COND_EXPR. Then return
7756 so we don't get into an infinite recursion loop taking the
7757 conversion out and then back in. */
7759 if ((CONVERT_EXPR_CODE_P (code
)
7760 || code
== NON_LVALUE_EXPR
)
7761 && TREE_CODE (tem
) == COND_EXPR
7762 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7763 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7764 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7765 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7766 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7767 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7768 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7770 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7771 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7772 || flag_syntax_only
))
7773 tem
= build1_loc (loc
, code
, type
,
7775 TREE_TYPE (TREE_OPERAND
7776 (TREE_OPERAND (tem
, 1), 0)),
7777 TREE_OPERAND (tem
, 0),
7778 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7779 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7787 case NON_LVALUE_EXPR
:
7788 if (!maybe_lvalue_p (op0
))
7789 return fold_convert_loc (loc
, type
, op0
);
7794 case FIX_TRUNC_EXPR
:
7795 if (COMPARISON_CLASS_P (op0
))
7797 /* If we have (type) (a CMP b) and type is an integral type, return
7798 new expression involving the new type. Canonicalize
7799 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7801 Do not fold the result as that would not simplify further, also
7802 folding again results in recursions. */
7803 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7804 return build2_loc (loc
, TREE_CODE (op0
), type
,
7805 TREE_OPERAND (op0
, 0),
7806 TREE_OPERAND (op0
, 1));
7807 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7808 && TREE_CODE (type
) != VECTOR_TYPE
)
7809 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7810 constant_boolean_node (true, type
),
7811 constant_boolean_node (false, type
));
7814 /* Handle (T *)&A.B.C for A being of type T and B and C
7815 living at offset zero. This occurs frequently in
7816 C++ upcasting and then accessing the base. */
7817 if (TREE_CODE (op0
) == ADDR_EXPR
7818 && POINTER_TYPE_P (type
)
7819 && handled_component_p (TREE_OPERAND (op0
, 0)))
7821 HOST_WIDE_INT bitsize
, bitpos
;
7824 int unsignedp
, reversep
, volatilep
;
7826 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
7827 &offset
, &mode
, &unsignedp
, &reversep
,
7829 /* If the reference was to a (constant) zero offset, we can use
7830 the address of the base if it has the same base type
7831 as the result type and the pointer type is unqualified. */
7832 if (! offset
&& bitpos
== 0
7833 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7834 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7835 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7836 return fold_convert_loc (loc
, type
,
7837 build_fold_addr_expr_loc (loc
, base
));
7840 if (TREE_CODE (op0
) == MODIFY_EXPR
7841 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7842 /* Detect assigning a bitfield. */
7843 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7845 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7847 /* Don't leave an assignment inside a conversion
7848 unless assigning a bitfield. */
7849 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7850 /* First do the assignment, then return converted constant. */
7851 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7852 TREE_NO_WARNING (tem
) = 1;
7853 TREE_USED (tem
) = 1;
7857 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7858 constants (if x has signed type, the sign bit cannot be set
7859 in c). This folds extension into the BIT_AND_EXPR.
7860 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7861 very likely don't have maximal range for their precision and this
7862 transformation effectively doesn't preserve non-maximal ranges. */
7863 if (TREE_CODE (type
) == INTEGER_TYPE
7864 && TREE_CODE (op0
) == BIT_AND_EXPR
7865 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7867 tree and_expr
= op0
;
7868 tree and0
= TREE_OPERAND (and_expr
, 0);
7869 tree and1
= TREE_OPERAND (and_expr
, 1);
7872 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7873 || (TYPE_PRECISION (type
)
7874 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7876 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7877 <= HOST_BITS_PER_WIDE_INT
7878 && tree_fits_uhwi_p (and1
))
7880 unsigned HOST_WIDE_INT cst
;
7882 cst
= tree_to_uhwi (and1
);
7883 cst
&= HOST_WIDE_INT_M1U
7884 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7885 change
= (cst
== 0);
7887 && !flag_syntax_only
7888 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7891 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7892 and0
= fold_convert_loc (loc
, uns
, and0
);
7893 and1
= fold_convert_loc (loc
, uns
, and1
);
7898 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7899 TREE_OVERFLOW (and1
));
7900 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7901 fold_convert_loc (loc
, type
, and0
), tem
);
7905 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7906 cast (T1)X will fold away. We assume that this happens when X itself
7908 if (POINTER_TYPE_P (type
)
7909 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7910 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
7912 tree arg00
= TREE_OPERAND (arg0
, 0);
7913 tree arg01
= TREE_OPERAND (arg0
, 1);
7915 return fold_build_pointer_plus_loc
7916 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7919 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7920 of the same precision, and X is an integer type not narrower than
7921 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7922 if (INTEGRAL_TYPE_P (type
)
7923 && TREE_CODE (op0
) == BIT_NOT_EXPR
7924 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7925 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7926 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7928 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7929 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7930 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7931 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7932 fold_convert_loc (loc
, type
, tem
));
7935 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7936 type of X and Y (integer types only). */
7937 if (INTEGRAL_TYPE_P (type
)
7938 && TREE_CODE (op0
) == MULT_EXPR
7939 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7940 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7942 /* Be careful not to introduce new overflows. */
7944 if (TYPE_OVERFLOW_WRAPS (type
))
7947 mult_type
= unsigned_type_for (type
);
7949 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7951 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7952 fold_convert_loc (loc
, mult_type
,
7953 TREE_OPERAND (op0
, 0)),
7954 fold_convert_loc (loc
, mult_type
,
7955 TREE_OPERAND (op0
, 1)));
7956 return fold_convert_loc (loc
, type
, tem
);
7962 case VIEW_CONVERT_EXPR
:
7963 if (TREE_CODE (op0
) == MEM_REF
)
7965 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
7966 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7967 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
7974 tem
= fold_negate_expr (loc
, arg0
);
7976 return fold_convert_loc (loc
, type
, tem
);
7980 /* Convert fabs((double)float) into (double)fabsf(float). */
7981 if (TREE_CODE (arg0
) == NOP_EXPR
7982 && TREE_CODE (type
) == REAL_TYPE
)
7984 tree targ0
= strip_float_extensions (arg0
);
7986 return fold_convert_loc (loc
, type
,
7987 fold_build1_loc (loc
, ABS_EXPR
,
7994 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7995 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7996 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7997 fold_convert_loc (loc
, type
,
7998 TREE_OPERAND (arg0
, 0)))))
7999 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8000 fold_convert_loc (loc
, type
,
8001 TREE_OPERAND (arg0
, 1)));
8002 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8003 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8004 fold_convert_loc (loc
, type
,
8005 TREE_OPERAND (arg0
, 1)))))
8006 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8007 fold_convert_loc (loc
, type
,
8008 TREE_OPERAND (arg0
, 0)), tem
);
8012 case TRUTH_NOT_EXPR
:
8013 /* Note that the operand of this must be an int
8014 and its values must be 0 or 1.
8015 ("true" is a fixed value perhaps depending on the language,
8016 but we don't handle values other than 1 correctly yet.) */
8017 tem
= fold_truth_not_expr (loc
, arg0
);
8020 return fold_convert_loc (loc
, type
, tem
);
8023 /* Fold *&X to X if X is an lvalue. */
8024 if (TREE_CODE (op0
) == ADDR_EXPR
)
8026 tree op00
= TREE_OPERAND (op0
, 0);
8027 if ((TREE_CODE (op00
) == VAR_DECL
8028 || TREE_CODE (op00
) == PARM_DECL
8029 || TREE_CODE (op00
) == RESULT_DECL
)
8030 && !TREE_READONLY (op00
))
8037 } /* switch (code) */
8041 /* If the operation was a conversion do _not_ mark a resulting constant
8042 with TREE_OVERFLOW if the original constant was not. These conversions
8043 have implementation defined behavior and retaining the TREE_OVERFLOW
8044 flag here would confuse later passes such as VRP. */
8046 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8047 tree type
, tree op0
)
8049 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8051 && TREE_CODE (res
) == INTEGER_CST
8052 && TREE_CODE (op0
) == INTEGER_CST
8053 && CONVERT_EXPR_CODE_P (code
))
8054 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8059 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8060 operands OP0 and OP1. LOC is the location of the resulting expression.
8061 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8062 Return the folded expression if folding is successful. Otherwise,
8063 return NULL_TREE. */
8065 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8066 tree arg0
, tree arg1
, tree op0
, tree op1
)
8070 /* We only do these simplifications if we are optimizing. */
8074 /* Check for things like (A || B) && (A || C). We can convert this
8075 to A || (B && C). Note that either operator can be any of the four
8076 truth and/or operations and the transformation will still be
8077 valid. Also note that we only care about order for the
8078 ANDIF and ORIF operators. If B contains side effects, this
8079 might change the truth-value of A. */
8080 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8081 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8082 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8083 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8084 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8085 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8087 tree a00
= TREE_OPERAND (arg0
, 0);
8088 tree a01
= TREE_OPERAND (arg0
, 1);
8089 tree a10
= TREE_OPERAND (arg1
, 0);
8090 tree a11
= TREE_OPERAND (arg1
, 1);
8091 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8092 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8093 && (code
== TRUTH_AND_EXPR
8094 || code
== TRUTH_OR_EXPR
));
8096 if (operand_equal_p (a00
, a10
, 0))
8097 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8098 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8099 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8100 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8101 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8102 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8103 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8104 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8106 /* This case if tricky because we must either have commutative
8107 operators or else A10 must not have side-effects. */
8109 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8110 && operand_equal_p (a01
, a11
, 0))
8111 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8112 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8116 /* See if we can build a range comparison. */
8117 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8120 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8121 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8123 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8125 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8128 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8129 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8131 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8133 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8136 /* Check for the possibility of merging component references. If our
8137 lhs is another similar operation, try to merge its rhs with our
8138 rhs. Then try to merge our lhs and rhs. */
8139 if (TREE_CODE (arg0
) == code
8140 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8141 TREE_OPERAND (arg0
, 1), arg1
)))
8142 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8144 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8147 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8148 && (code
== TRUTH_AND_EXPR
8149 || code
== TRUTH_ANDIF_EXPR
8150 || code
== TRUTH_OR_EXPR
8151 || code
== TRUTH_ORIF_EXPR
))
8153 enum tree_code ncode
, icode
;
8155 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8156 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8157 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8159 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8160 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8161 We don't want to pack more than two leafs to a non-IF AND/OR
8163 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8164 equal to IF-CODE, then we don't want to add right-hand operand.
8165 If the inner right-hand side of left-hand operand has
8166 side-effects, or isn't simple, then we can't add to it,
8167 as otherwise we might destroy if-sequence. */
8168 if (TREE_CODE (arg0
) == icode
8169 && simple_operand_p_2 (arg1
)
8170 /* Needed for sequence points to handle trappings, and
8172 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8174 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8176 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8179 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8180 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8181 else if (TREE_CODE (arg1
) == icode
8182 && simple_operand_p_2 (arg0
)
8183 /* Needed for sequence points to handle trappings, and
8185 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8187 tem
= fold_build2_loc (loc
, ncode
, type
,
8188 arg0
, TREE_OPERAND (arg1
, 0));
8189 return fold_build2_loc (loc
, icode
, type
, tem
,
8190 TREE_OPERAND (arg1
, 1));
8192 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8194 For sequence point consistancy, we need to check for trapping,
8195 and side-effects. */
8196 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8197 && simple_operand_p_2 (arg1
))
8198 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8204 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8205 by changing CODE to reduce the magnitude of constants involved in
8206 ARG0 of the comparison.
8207 Returns a canonicalized comparison tree if a simplification was
8208 possible, otherwise returns NULL_TREE.
8209 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8210 valid if signed overflow is undefined. */
8213 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8214 tree arg0
, tree arg1
,
8215 bool *strict_overflow_p
)
8217 enum tree_code code0
= TREE_CODE (arg0
);
8218 tree t
, cst0
= NULL_TREE
;
8221 /* Match A +- CST code arg1. We can change this only if overflow
8223 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8224 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8225 /* In principle pointers also have undefined overflow behavior,
8226 but that causes problems elsewhere. */
8227 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8228 && (code0
== MINUS_EXPR
8229 || code0
== PLUS_EXPR
)
8230 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8233 /* Identify the constant in arg0 and its sign. */
8234 cst0
= TREE_OPERAND (arg0
, 1);
8235 sgn0
= tree_int_cst_sgn (cst0
);
8237 /* Overflowed constants and zero will cause problems. */
8238 if (integer_zerop (cst0
)
8239 || TREE_OVERFLOW (cst0
))
8242 /* See if we can reduce the magnitude of the constant in
8243 arg0 by changing the comparison code. */
8244 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8246 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8248 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8249 else if (code
== GT_EXPR
8250 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8252 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8253 else if (code
== LE_EXPR
8254 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8256 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8257 else if (code
== GE_EXPR
8258 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8262 *strict_overflow_p
= true;
8264 /* Now build the constant reduced in magnitude. But not if that
8265 would produce one outside of its types range. */
8266 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8268 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8269 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8271 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8272 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8275 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8276 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8277 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8278 t
= fold_convert (TREE_TYPE (arg1
), t
);
8280 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8283 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8284 overflow further. Try to decrease the magnitude of constants involved
8285 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8286 and put sole constants at the second argument position.
8287 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8290 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8291 tree arg0
, tree arg1
)
8294 bool strict_overflow_p
;
8295 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8296 "when reducing constant in comparison");
8298 /* Try canonicalization by simplifying arg0. */
8299 strict_overflow_p
= false;
8300 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8301 &strict_overflow_p
);
8304 if (strict_overflow_p
)
8305 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8309 /* Try canonicalization by simplifying arg1 using the swapped
8311 code
= swap_tree_comparison (code
);
8312 strict_overflow_p
= false;
8313 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8314 &strict_overflow_p
);
8315 if (t
&& strict_overflow_p
)
8316 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8320 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8321 space. This is used to avoid issuing overflow warnings for
8322 expressions like &p->x which can not wrap. */
8325 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8327 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8334 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8335 if (offset
== NULL_TREE
)
8336 wi_offset
= wi::zero (precision
);
8337 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8343 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8344 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8348 if (!wi::fits_uhwi_p (total
))
8351 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8355 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8357 if (TREE_CODE (base
) == ADDR_EXPR
)
8359 HOST_WIDE_INT base_size
;
8361 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8362 if (base_size
> 0 && size
< base_size
)
8366 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8369 /* Return a positive integer when the symbol DECL is known to have
8370 a nonzero address, zero when it's known not to (e.g., it's a weak
8371 symbol), and a negative integer when the symbol is not yet in the
8372 symbol table and so whether or not its address is zero is unknown. */
8374 maybe_nonzero_address (tree decl
)
8376 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
8377 if (struct symtab_node
*symbol
= symtab_node::get_create (decl
))
8378 return symbol
->nonzero_address ();
8383 /* Subroutine of fold_binary. This routine performs all of the
8384 transformations that are common to the equality/inequality
8385 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8386 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8387 fold_binary should call fold_binary. Fold a comparison with
8388 tree code CODE and type TYPE with operands OP0 and OP1. Return
8389 the folded comparison or NULL_TREE. */
8392 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8395 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8396 tree arg0
, arg1
, tem
;
8401 STRIP_SIGN_NOPS (arg0
);
8402 STRIP_SIGN_NOPS (arg1
);
8404 /* For comparisons of pointers we can decompose it to a compile time
8405 comparison of the base objects and the offsets into the object.
8406 This requires at least one operand being an ADDR_EXPR or a
8407 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8408 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8409 && (TREE_CODE (arg0
) == ADDR_EXPR
8410 || TREE_CODE (arg1
) == ADDR_EXPR
8411 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8412 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8414 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8415 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8417 int volatilep
, reversep
, unsignedp
;
8418 bool indirect_base0
= false, indirect_base1
= false;
8420 /* Get base and offset for the access. Strip ADDR_EXPR for
8421 get_inner_reference, but put it back by stripping INDIRECT_REF
8422 off the base object if possible. indirect_baseN will be true
8423 if baseN is not an address but refers to the object itself. */
8425 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8428 = get_inner_reference (TREE_OPERAND (arg0
, 0),
8429 &bitsize
, &bitpos0
, &offset0
, &mode
,
8430 &unsignedp
, &reversep
, &volatilep
, false);
8431 if (TREE_CODE (base0
) == INDIRECT_REF
)
8432 base0
= TREE_OPERAND (base0
, 0);
8434 indirect_base0
= true;
8436 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8438 base0
= TREE_OPERAND (arg0
, 0);
8439 STRIP_SIGN_NOPS (base0
);
8440 if (TREE_CODE (base0
) == ADDR_EXPR
)
8443 = get_inner_reference (TREE_OPERAND (base0
, 0),
8444 &bitsize
, &bitpos0
, &offset0
, &mode
,
8445 &unsignedp
, &reversep
, &volatilep
,
8447 if (TREE_CODE (base0
) == INDIRECT_REF
)
8448 base0
= TREE_OPERAND (base0
, 0);
8450 indirect_base0
= true;
8452 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
8453 offset0
= TREE_OPERAND (arg0
, 1);
8455 offset0
= size_binop (PLUS_EXPR
, offset0
,
8456 TREE_OPERAND (arg0
, 1));
8457 if (TREE_CODE (offset0
) == INTEGER_CST
)
8459 offset_int tem
= wi::sext (wi::to_offset (offset0
),
8460 TYPE_PRECISION (sizetype
));
8461 tem
<<= LOG2_BITS_PER_UNIT
;
8463 if (wi::fits_shwi_p (tem
))
8465 bitpos0
= tem
.to_shwi ();
8466 offset0
= NULL_TREE
;
8472 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8475 = get_inner_reference (TREE_OPERAND (arg1
, 0),
8476 &bitsize
, &bitpos1
, &offset1
, &mode
,
8477 &unsignedp
, &reversep
, &volatilep
, false);
8478 if (TREE_CODE (base1
) == INDIRECT_REF
)
8479 base1
= TREE_OPERAND (base1
, 0);
8481 indirect_base1
= true;
8483 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8485 base1
= TREE_OPERAND (arg1
, 0);
8486 STRIP_SIGN_NOPS (base1
);
8487 if (TREE_CODE (base1
) == ADDR_EXPR
)
8490 = get_inner_reference (TREE_OPERAND (base1
, 0),
8491 &bitsize
, &bitpos1
, &offset1
, &mode
,
8492 &unsignedp
, &reversep
, &volatilep
,
8494 if (TREE_CODE (base1
) == INDIRECT_REF
)
8495 base1
= TREE_OPERAND (base1
, 0);
8497 indirect_base1
= true;
8499 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
8500 offset1
= TREE_OPERAND (arg1
, 1);
8502 offset1
= size_binop (PLUS_EXPR
, offset1
,
8503 TREE_OPERAND (arg1
, 1));
8504 if (TREE_CODE (offset1
) == INTEGER_CST
)
8506 offset_int tem
= wi::sext (wi::to_offset (offset1
),
8507 TYPE_PRECISION (sizetype
));
8508 tem
<<= LOG2_BITS_PER_UNIT
;
8510 if (wi::fits_shwi_p (tem
))
8512 bitpos1
= tem
.to_shwi ();
8513 offset1
= NULL_TREE
;
8518 /* If we have equivalent bases we might be able to simplify. */
8519 if (indirect_base0
== indirect_base1
8520 && operand_equal_p (base0
, base1
,
8521 indirect_base0
? OEP_ADDRESS_OF
: 0))
8523 /* We can fold this expression to a constant if the non-constant
8524 offset parts are equal. */
8525 if ((offset0
== offset1
8526 || (offset0
&& offset1
8527 && operand_equal_p (offset0
, offset1
, 0)))
8530 || (indirect_base0
&& DECL_P (base0
))
8531 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8535 && bitpos0
!= bitpos1
8536 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8537 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8538 fold_overflow_warning (("assuming pointer wraparound does not "
8539 "occur when comparing P +- C1 with "
8541 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8546 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8548 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8550 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8552 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8554 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8556 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8560 /* We can simplify the comparison to a comparison of the variable
8561 offset parts if the constant offset parts are equal.
8562 Be careful to use signed sizetype here because otherwise we
8563 mess with array offsets in the wrong way. This is possible
8564 because pointer arithmetic is restricted to retain within an
8565 object and overflow on pointer differences is undefined as of
8566 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8567 else if (bitpos0
== bitpos1
8569 || (indirect_base0
&& DECL_P (base0
))
8570 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8572 /* By converting to signed sizetype we cover middle-end pointer
8573 arithmetic which operates on unsigned pointer types of size
8574 type size and ARRAY_REF offsets which are properly sign or
8575 zero extended from their type in case it is narrower than
8577 if (offset0
== NULL_TREE
)
8578 offset0
= build_int_cst (ssizetype
, 0);
8580 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8581 if (offset1
== NULL_TREE
)
8582 offset1
= build_int_cst (ssizetype
, 0);
8584 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8587 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8588 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8589 fold_overflow_warning (("assuming pointer wraparound does not "
8590 "occur when comparing P +- C1 with "
8592 WARN_STRICT_OVERFLOW_COMPARISON
);
8594 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8597 /* For equal offsets we can simplify to a comparison of the
8599 else if (bitpos0
== bitpos1
8601 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8603 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8604 && ((offset0
== offset1
)
8605 || (offset0
&& offset1
8606 && operand_equal_p (offset0
, offset1
, 0))))
8609 base0
= build_fold_addr_expr_loc (loc
, base0
);
8611 base1
= build_fold_addr_expr_loc (loc
, base1
);
8612 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8614 /* Comparison between an ordinary (non-weak) symbol and a null
8615 pointer can be eliminated since such symbols must have a non
8616 null address. In C, relational expressions between pointers
8617 to objects and null pointers are undefined. The results
8618 below follow the C++ rules with the additional property that
8619 every object pointer compares greater than a null pointer.
8621 else if (DECL_P (base0
)
8622 && maybe_nonzero_address (base0
) > 0
8623 /* Avoid folding references to struct members at offset 0 to
8624 prevent tests like '&ptr->firstmember == 0' from getting
8625 eliminated. When ptr is null, although the -> expression
8626 is strictly speaking invalid, GCC retains it as a matter
8627 of QoI. See PR c/44555. */
8628 && (offset0
== NULL_TREE
&& bitpos0
!= 0)
8629 /* The caller guarantees that when one of the arguments is
8630 constant (i.e., null in this case) it is second. */
8631 && integer_zerop (arg1
))
8638 return constant_boolean_node (false, type
);
8642 return constant_boolean_node (true, type
);
8649 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8650 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8651 the resulting offset is smaller in absolute value than the
8652 original one and has the same sign. */
8653 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8654 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8655 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8656 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8657 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8658 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8659 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8660 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8662 tree const1
= TREE_OPERAND (arg0
, 1);
8663 tree const2
= TREE_OPERAND (arg1
, 1);
8664 tree variable1
= TREE_OPERAND (arg0
, 0);
8665 tree variable2
= TREE_OPERAND (arg1
, 0);
8667 const char * const warnmsg
= G_("assuming signed overflow does not "
8668 "occur when combining constants around "
8671 /* Put the constant on the side where it doesn't overflow and is
8672 of lower absolute value and of same sign than before. */
8673 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8674 ? MINUS_EXPR
: PLUS_EXPR
,
8676 if (!TREE_OVERFLOW (cst
)
8677 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8678 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8680 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8681 return fold_build2_loc (loc
, code
, type
,
8683 fold_build2_loc (loc
, TREE_CODE (arg1
),
8688 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8689 ? MINUS_EXPR
: PLUS_EXPR
,
8691 if (!TREE_OVERFLOW (cst
)
8692 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8693 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8695 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8696 return fold_build2_loc (loc
, code
, type
,
8697 fold_build2_loc (loc
, TREE_CODE (arg0
),
8704 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8708 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8709 constant, we can simplify it. */
8710 if (TREE_CODE (arg1
) == INTEGER_CST
8711 && (TREE_CODE (arg0
) == MIN_EXPR
8712 || TREE_CODE (arg0
) == MAX_EXPR
)
8713 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8715 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
8720 /* If we are comparing an expression that just has comparisons
8721 of two integer values, arithmetic expressions of those comparisons,
8722 and constants, we can simplify it. There are only three cases
8723 to check: the two values can either be equal, the first can be
8724 greater, or the second can be greater. Fold the expression for
8725 those three values. Since each value must be 0 or 1, we have
8726 eight possibilities, each of which corresponds to the constant 0
8727 or 1 or one of the six possible comparisons.
8729 This handles common cases like (a > b) == 0 but also handles
8730 expressions like ((x > y) - (y > x)) > 0, which supposedly
8731 occur in macroized code. */
8733 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8735 tree cval1
= 0, cval2
= 0;
8738 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8739 /* Don't handle degenerate cases here; they should already
8740 have been handled anyway. */
8741 && cval1
!= 0 && cval2
!= 0
8742 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8743 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8744 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8745 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8746 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8747 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8748 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8750 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8751 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8753 /* We can't just pass T to eval_subst in case cval1 or cval2
8754 was the same as ARG1. */
8757 = fold_build2_loc (loc
, code
, type
,
8758 eval_subst (loc
, arg0
, cval1
, maxval
,
8762 = fold_build2_loc (loc
, code
, type
,
8763 eval_subst (loc
, arg0
, cval1
, maxval
,
8767 = fold_build2_loc (loc
, code
, type
,
8768 eval_subst (loc
, arg0
, cval1
, minval
,
8772 /* All three of these results should be 0 or 1. Confirm they are.
8773 Then use those values to select the proper code to use. */
8775 if (TREE_CODE (high_result
) == INTEGER_CST
8776 && TREE_CODE (equal_result
) == INTEGER_CST
8777 && TREE_CODE (low_result
) == INTEGER_CST
)
8779 /* Make a 3-bit mask with the high-order bit being the
8780 value for `>', the next for '=', and the low for '<'. */
8781 switch ((integer_onep (high_result
) * 4)
8782 + (integer_onep (equal_result
) * 2)
8783 + integer_onep (low_result
))
8787 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8808 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8813 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8814 SET_EXPR_LOCATION (tem
, loc
);
8817 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8822 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8823 into a single range test. */
8824 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8825 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8826 && TREE_CODE (arg1
) == INTEGER_CST
8827 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8828 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8829 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8830 && !TREE_OVERFLOW (arg1
))
8832 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
8833 if (tem
!= NULL_TREE
)
8841 /* Subroutine of fold_binary. Optimize complex multiplications of the
8842 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8843 argument EXPR represents the expression "z" of type TYPE. */
8846 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8848 tree itype
= TREE_TYPE (type
);
8849 tree rpart
, ipart
, tem
;
8851 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8853 rpart
= TREE_OPERAND (expr
, 0);
8854 ipart
= TREE_OPERAND (expr
, 1);
8856 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8858 rpart
= TREE_REALPART (expr
);
8859 ipart
= TREE_IMAGPART (expr
);
8863 expr
= save_expr (expr
);
8864 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8865 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8868 rpart
= save_expr (rpart
);
8869 ipart
= save_expr (ipart
);
8870 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8871 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8872 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8873 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8874 build_zero_cst (itype
));
8878 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8879 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8882 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
8884 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
8886 if (TREE_CODE (arg
) == VECTOR_CST
)
8888 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8889 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8891 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8893 constructor_elt
*elt
;
8895 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8896 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8899 elts
[i
] = elt
->value
;
8903 for (; i
< nelts
; i
++)
8905 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8909 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8910 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8911 NULL_TREE otherwise. */
8914 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
8916 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8918 bool need_ctor
= false;
8920 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8921 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8922 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8923 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8926 elts
= XALLOCAVEC (tree
, nelts
* 3);
8927 if (!vec_cst_ctor_to_array (arg0
, elts
)
8928 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
8931 for (i
= 0; i
< nelts
; i
++)
8933 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
8935 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
8940 vec
<constructor_elt
, va_gc
> *v
;
8941 vec_alloc (v
, nelts
);
8942 for (i
= 0; i
< nelts
; i
++)
8943 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
8944 return build_constructor (type
, v
);
8947 return build_vector (type
, &elts
[2 * nelts
]);
8950 /* Try to fold a pointer difference of type TYPE two address expressions of
8951 array references AREF0 and AREF1 using location LOC. Return a
8952 simplified expression for the difference or NULL_TREE. */
8955 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8956 tree aref0
, tree aref1
)
8958 tree base0
= TREE_OPERAND (aref0
, 0);
8959 tree base1
= TREE_OPERAND (aref1
, 0);
8960 tree base_offset
= build_int_cst (type
, 0);
8962 /* If the bases are array references as well, recurse. If the bases
8963 are pointer indirections compute the difference of the pointers.
8964 If the bases are equal, we are set. */
8965 if ((TREE_CODE (base0
) == ARRAY_REF
8966 && TREE_CODE (base1
) == ARRAY_REF
8968 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
8969 || (INDIRECT_REF_P (base0
)
8970 && INDIRECT_REF_P (base1
)
8972 = fold_binary_loc (loc
, MINUS_EXPR
, type
,
8973 fold_convert (type
, TREE_OPERAND (base0
, 0)),
8975 TREE_OPERAND (base1
, 0)))))
8976 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
8978 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8979 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8980 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8981 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8982 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8984 fold_build2_loc (loc
, MULT_EXPR
, type
,
8990 /* If the real or vector real constant CST of type TYPE has an exact
8991 inverse, return it, else return NULL. */
8994 exact_inverse (tree type
, tree cst
)
8997 tree unit_type
, *elts
;
8999 unsigned vec_nelts
, i
;
9001 switch (TREE_CODE (cst
))
9004 r
= TREE_REAL_CST (cst
);
9006 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9007 return build_real (type
, r
);
9012 vec_nelts
= VECTOR_CST_NELTS (cst
);
9013 elts
= XALLOCAVEC (tree
, vec_nelts
);
9014 unit_type
= TREE_TYPE (type
);
9015 mode
= TYPE_MODE (unit_type
);
9017 for (i
= 0; i
< vec_nelts
; i
++)
9019 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9020 if (!exact_real_inverse (mode
, &r
))
9022 elts
[i
] = build_real (unit_type
, r
);
9025 return build_vector (type
, elts
);
9032 /* Mask out the tz least significant bits of X of type TYPE where
9033 tz is the number of trailing zeroes in Y. */
9035 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9037 int tz
= wi::ctz (y
);
9039 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9043 /* Return true when T is an address and is known to be nonzero.
9044 For floating point we further ensure that T is not denormal.
9045 Similar logic is present in nonzero_address in rtlanal.h.
9047 If the return value is based on the assumption that signed overflow
9048 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9049 change *STRICT_OVERFLOW_P. */
9052 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9054 tree type
= TREE_TYPE (t
);
9055 enum tree_code code
;
9057 /* Doing something useful for floating point would need more work. */
9058 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9061 code
= TREE_CODE (t
);
9062 switch (TREE_CODE_CLASS (code
))
9065 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9068 case tcc_comparison
:
9069 return tree_binary_nonzero_warnv_p (code
, type
,
9070 TREE_OPERAND (t
, 0),
9071 TREE_OPERAND (t
, 1),
9074 case tcc_declaration
:
9076 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9084 case TRUTH_NOT_EXPR
:
9085 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9088 case TRUTH_AND_EXPR
:
9090 case TRUTH_XOR_EXPR
:
9091 return tree_binary_nonzero_warnv_p (code
, type
,
9092 TREE_OPERAND (t
, 0),
9093 TREE_OPERAND (t
, 1),
9101 case WITH_SIZE_EXPR
:
9103 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9108 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9112 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9117 tree fndecl
= get_callee_fndecl (t
);
9118 if (!fndecl
) return false;
9119 if (flag_delete_null_pointer_checks
&& !flag_check_new
9120 && DECL_IS_OPERATOR_NEW (fndecl
)
9121 && !TREE_NOTHROW (fndecl
))
9123 if (flag_delete_null_pointer_checks
9124 && lookup_attribute ("returns_nonnull",
9125 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9127 return alloca_call_p (t
);
9136 /* Return true when T is an address and is known to be nonzero.
9137 Handle warnings about undefined signed overflow. */
9140 tree_expr_nonzero_p (tree t
)
9142 bool ret
, strict_overflow_p
;
9144 strict_overflow_p
= false;
9145 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9146 if (strict_overflow_p
)
9147 fold_overflow_warning (("assuming signed overflow does not occur when "
9148 "determining that expression is always "
9150 WARN_STRICT_OVERFLOW_MISC
);
9154 /* Return true if T is known not to be equal to an integer W. */
9157 expr_not_equal_to (tree t
, const wide_int
&w
)
9159 wide_int min
, max
, nz
;
9160 value_range_type rtype
;
9161 switch (TREE_CODE (t
))
9164 return wi::ne_p (t
, w
);
9167 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
9169 rtype
= get_range_info (t
, &min
, &max
);
9170 if (rtype
== VR_RANGE
)
9172 if (wi::lt_p (max
, w
, TYPE_SIGN (TREE_TYPE (t
))))
9174 if (wi::lt_p (w
, min
, TYPE_SIGN (TREE_TYPE (t
))))
9177 else if (rtype
== VR_ANTI_RANGE
9178 && wi::le_p (min
, w
, TYPE_SIGN (TREE_TYPE (t
)))
9179 && wi::le_p (w
, max
, TYPE_SIGN (TREE_TYPE (t
))))
9181 /* If T has some known zero bits and W has any of those bits set,
9182 then T is known not to be equal to W. */
9183 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
9184 TYPE_PRECISION (TREE_TYPE (t
))), 0))
9193 /* Fold a binary expression of code CODE and type TYPE with operands
9194 OP0 and OP1. LOC is the location of the resulting expression.
9195 Return the folded expression if folding is successful. Otherwise,
9196 return NULL_TREE. */
9199 fold_binary_loc (location_t loc
,
9200 enum tree_code code
, tree type
, tree op0
, tree op1
)
9202 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9203 tree arg0
, arg1
, tem
;
9204 tree t1
= NULL_TREE
;
9205 bool strict_overflow_p
;
9208 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9209 && TREE_CODE_LENGTH (code
) == 2
9211 && op1
!= NULL_TREE
);
9216 /* Strip any conversions that don't change the mode. This is
9217 safe for every expression, except for a comparison expression
9218 because its signedness is derived from its operands. So, in
9219 the latter case, only strip conversions that don't change the
9220 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9223 Note that this is done as an internal manipulation within the
9224 constant folder, in order to find the simplest representation
9225 of the arguments so that their form can be studied. In any
9226 cases, the appropriate type conversions should be put back in
9227 the tree that will get out of the constant folder. */
9229 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9231 STRIP_SIGN_NOPS (arg0
);
9232 STRIP_SIGN_NOPS (arg1
);
9240 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9241 constant but we can't do arithmetic on them. */
9242 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9244 tem
= const_binop (code
, type
, arg0
, arg1
);
9245 if (tem
!= NULL_TREE
)
9247 if (TREE_TYPE (tem
) != type
)
9248 tem
= fold_convert_loc (loc
, type
, tem
);
9253 /* If this is a commutative operation, and ARG0 is a constant, move it
9254 to ARG1 to reduce the number of tests below. */
9255 if (commutative_tree_code (code
)
9256 && tree_swap_operands_p (arg0
, arg1
, true))
9257 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9259 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9260 to ARG1 to reduce the number of tests below. */
9261 if (kind
== tcc_comparison
9262 && tree_swap_operands_p (arg0
, arg1
, true))
9263 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9265 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9269 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9271 First check for cases where an arithmetic operation is applied to a
9272 compound, conditional, or comparison operation. Push the arithmetic
9273 operation inside the compound or conditional to see if any folding
9274 can then be done. Convert comparison to conditional for this purpose.
9275 The also optimizes non-constant cases that used to be done in
9278 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9279 one of the operands is a comparison and the other is a comparison, a
9280 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9281 code below would make the expression more complex. Change it to a
9282 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9283 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9285 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9286 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9287 && TREE_CODE (type
) != VECTOR_TYPE
9288 && ((truth_value_p (TREE_CODE (arg0
))
9289 && (truth_value_p (TREE_CODE (arg1
))
9290 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9291 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9292 || (truth_value_p (TREE_CODE (arg1
))
9293 && (truth_value_p (TREE_CODE (arg0
))
9294 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9295 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9297 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9298 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9301 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9302 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9304 if (code
== EQ_EXPR
)
9305 tem
= invert_truthvalue_loc (loc
, tem
);
9307 return fold_convert_loc (loc
, type
, tem
);
9310 if (TREE_CODE_CLASS (code
) == tcc_binary
9311 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9313 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9315 tem
= fold_build2_loc (loc
, code
, type
,
9316 fold_convert_loc (loc
, TREE_TYPE (op0
),
9317 TREE_OPERAND (arg0
, 1)), op1
);
9318 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9321 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9322 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9324 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9325 fold_convert_loc (loc
, TREE_TYPE (op1
),
9326 TREE_OPERAND (arg1
, 1)));
9327 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9331 if (TREE_CODE (arg0
) == COND_EXPR
9332 || TREE_CODE (arg0
) == VEC_COND_EXPR
9333 || COMPARISON_CLASS_P (arg0
))
9335 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9337 /*cond_first_p=*/1);
9338 if (tem
!= NULL_TREE
)
9342 if (TREE_CODE (arg1
) == COND_EXPR
9343 || TREE_CODE (arg1
) == VEC_COND_EXPR
9344 || COMPARISON_CLASS_P (arg1
))
9346 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9348 /*cond_first_p=*/0);
9349 if (tem
!= NULL_TREE
)
9357 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9358 if (TREE_CODE (arg0
) == ADDR_EXPR
9359 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9361 tree iref
= TREE_OPERAND (arg0
, 0);
9362 return fold_build2 (MEM_REF
, type
,
9363 TREE_OPERAND (iref
, 0),
9364 int_const_binop (PLUS_EXPR
, arg1
,
9365 TREE_OPERAND (iref
, 1)));
9368 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9369 if (TREE_CODE (arg0
) == ADDR_EXPR
9370 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9373 HOST_WIDE_INT coffset
;
9374 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9378 return fold_build2 (MEM_REF
, type
,
9379 build_fold_addr_expr (base
),
9380 int_const_binop (PLUS_EXPR
, arg1
,
9381 size_int (coffset
)));
9386 case POINTER_PLUS_EXPR
:
9387 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9388 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9389 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9390 return fold_convert_loc (loc
, type
,
9391 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9392 fold_convert_loc (loc
, sizetype
,
9394 fold_convert_loc (loc
, sizetype
,
9400 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9402 /* X + (X / CST) * -CST is X % CST. */
9403 if (TREE_CODE (arg1
) == MULT_EXPR
9404 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9405 && operand_equal_p (arg0
,
9406 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9408 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9409 tree cst1
= TREE_OPERAND (arg1
, 1);
9410 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9412 if (sum
&& integer_zerop (sum
))
9413 return fold_convert_loc (loc
, type
,
9414 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9415 TREE_TYPE (arg0
), arg0
,
9420 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9421 one. Make sure the type is not saturating and has the signedness of
9422 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9423 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9424 if ((TREE_CODE (arg0
) == MULT_EXPR
9425 || TREE_CODE (arg1
) == MULT_EXPR
)
9426 && !TYPE_SATURATING (type
)
9427 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9428 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9429 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9431 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9436 if (! FLOAT_TYPE_P (type
))
9438 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9439 (plus (plus (mult) (mult)) (foo)) so that we can
9440 take advantage of the factoring cases below. */
9441 if (ANY_INTEGRAL_TYPE_P (type
)
9442 && TYPE_OVERFLOW_WRAPS (type
)
9443 && (((TREE_CODE (arg0
) == PLUS_EXPR
9444 || TREE_CODE (arg0
) == MINUS_EXPR
)
9445 && TREE_CODE (arg1
) == MULT_EXPR
)
9446 || ((TREE_CODE (arg1
) == PLUS_EXPR
9447 || TREE_CODE (arg1
) == MINUS_EXPR
)
9448 && TREE_CODE (arg0
) == MULT_EXPR
)))
9450 tree parg0
, parg1
, parg
, marg
;
9451 enum tree_code pcode
;
9453 if (TREE_CODE (arg1
) == MULT_EXPR
)
9454 parg
= arg0
, marg
= arg1
;
9456 parg
= arg1
, marg
= arg0
;
9457 pcode
= TREE_CODE (parg
);
9458 parg0
= TREE_OPERAND (parg
, 0);
9459 parg1
= TREE_OPERAND (parg
, 1);
9463 if (TREE_CODE (parg0
) == MULT_EXPR
9464 && TREE_CODE (parg1
) != MULT_EXPR
)
9465 return fold_build2_loc (loc
, pcode
, type
,
9466 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9467 fold_convert_loc (loc
, type
,
9469 fold_convert_loc (loc
, type
,
9471 fold_convert_loc (loc
, type
, parg1
));
9472 if (TREE_CODE (parg0
) != MULT_EXPR
9473 && TREE_CODE (parg1
) == MULT_EXPR
)
9475 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9476 fold_convert_loc (loc
, type
, parg0
),
9477 fold_build2_loc (loc
, pcode
, type
,
9478 fold_convert_loc (loc
, type
, marg
),
9479 fold_convert_loc (loc
, type
,
9485 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9486 to __complex__ ( x, y ). This is not the same for SNaNs or
9487 if signed zeros are involved. */
9488 if (!HONOR_SNANS (element_mode (arg0
))
9489 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9490 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9492 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9493 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9494 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9495 bool arg0rz
= false, arg0iz
= false;
9496 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9497 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9499 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9500 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9501 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9503 tree rp
= arg1r
? arg1r
9504 : build1 (REALPART_EXPR
, rtype
, arg1
);
9505 tree ip
= arg0i
? arg0i
9506 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9507 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9509 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9511 tree rp
= arg0r
? arg0r
9512 : build1 (REALPART_EXPR
, rtype
, arg0
);
9513 tree ip
= arg1i
? arg1i
9514 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9515 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9520 if (flag_unsafe_math_optimizations
9521 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9522 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9523 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9526 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9527 We associate floats only if the user has specified
9528 -fassociative-math. */
9529 if (flag_associative_math
9530 && TREE_CODE (arg1
) == PLUS_EXPR
9531 && TREE_CODE (arg0
) != MULT_EXPR
)
9533 tree tree10
= TREE_OPERAND (arg1
, 0);
9534 tree tree11
= TREE_OPERAND (arg1
, 1);
9535 if (TREE_CODE (tree11
) == MULT_EXPR
9536 && TREE_CODE (tree10
) == MULT_EXPR
)
9539 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9540 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9543 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9544 We associate floats only if the user has specified
9545 -fassociative-math. */
9546 if (flag_associative_math
9547 && TREE_CODE (arg0
) == PLUS_EXPR
9548 && TREE_CODE (arg1
) != MULT_EXPR
)
9550 tree tree00
= TREE_OPERAND (arg0
, 0);
9551 tree tree01
= TREE_OPERAND (arg0
, 1);
9552 if (TREE_CODE (tree01
) == MULT_EXPR
9553 && TREE_CODE (tree00
) == MULT_EXPR
)
9556 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9557 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9563 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9564 is a rotate of A by C1 bits. */
9565 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9566 is a rotate of A by B bits. */
9568 enum tree_code code0
, code1
;
9570 code0
= TREE_CODE (arg0
);
9571 code1
= TREE_CODE (arg1
);
9572 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9573 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9574 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9575 TREE_OPERAND (arg1
, 0), 0)
9576 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9577 TYPE_UNSIGNED (rtype
))
9578 /* Only create rotates in complete modes. Other cases are not
9579 expanded properly. */
9580 && (element_precision (rtype
)
9581 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
9583 tree tree01
, tree11
;
9584 enum tree_code code01
, code11
;
9586 tree01
= TREE_OPERAND (arg0
, 1);
9587 tree11
= TREE_OPERAND (arg1
, 1);
9588 STRIP_NOPS (tree01
);
9589 STRIP_NOPS (tree11
);
9590 code01
= TREE_CODE (tree01
);
9591 code11
= TREE_CODE (tree11
);
9592 if (code01
== INTEGER_CST
9593 && code11
== INTEGER_CST
9594 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9595 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9597 tem
= build2_loc (loc
, LROTATE_EXPR
,
9598 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9599 TREE_OPERAND (arg0
, 0),
9600 code0
== LSHIFT_EXPR
9601 ? TREE_OPERAND (arg0
, 1)
9602 : TREE_OPERAND (arg1
, 1));
9603 return fold_convert_loc (loc
, type
, tem
);
9605 else if (code11
== MINUS_EXPR
)
9607 tree tree110
, tree111
;
9608 tree110
= TREE_OPERAND (tree11
, 0);
9609 tree111
= TREE_OPERAND (tree11
, 1);
9610 STRIP_NOPS (tree110
);
9611 STRIP_NOPS (tree111
);
9612 if (TREE_CODE (tree110
) == INTEGER_CST
9613 && 0 == compare_tree_int (tree110
,
9615 (TREE_TYPE (TREE_OPERAND
9617 && operand_equal_p (tree01
, tree111
, 0))
9619 fold_convert_loc (loc
, type
,
9620 build2 ((code0
== LSHIFT_EXPR
9623 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9624 TREE_OPERAND (arg0
, 0),
9625 TREE_OPERAND (arg0
, 1)));
9627 else if (code01
== MINUS_EXPR
)
9629 tree tree010
, tree011
;
9630 tree010
= TREE_OPERAND (tree01
, 0);
9631 tree011
= TREE_OPERAND (tree01
, 1);
9632 STRIP_NOPS (tree010
);
9633 STRIP_NOPS (tree011
);
9634 if (TREE_CODE (tree010
) == INTEGER_CST
9635 && 0 == compare_tree_int (tree010
,
9637 (TREE_TYPE (TREE_OPERAND
9639 && operand_equal_p (tree11
, tree011
, 0))
9640 return fold_convert_loc
9642 build2 ((code0
!= LSHIFT_EXPR
9645 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9646 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9652 /* In most languages, can't associate operations on floats through
9653 parentheses. Rather than remember where the parentheses were, we
9654 don't associate floats at all, unless the user has specified
9656 And, we need to make sure type is not saturating. */
9658 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9659 && !TYPE_SATURATING (type
))
9661 tree var0
, con0
, lit0
, minus_lit0
;
9662 tree var1
, con1
, lit1
, minus_lit1
;
9666 /* Split both trees into variables, constants, and literals. Then
9667 associate each group together, the constants with literals,
9668 then the result with variables. This increases the chances of
9669 literals being recombined later and of generating relocatable
9670 expressions for the sum of a constant and literal. */
9671 var0
= split_tree (loc
, arg0
, type
, code
,
9672 &con0
, &lit0
, &minus_lit0
, 0);
9673 var1
= split_tree (loc
, arg1
, type
, code
,
9674 &con1
, &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
9676 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9677 if (code
== MINUS_EXPR
)
9680 /* With undefined overflow prefer doing association in a type
9681 which wraps on overflow, if that is one of the operand types. */
9682 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9683 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9685 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9686 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9687 atype
= TREE_TYPE (arg0
);
9688 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9689 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9690 atype
= TREE_TYPE (arg1
);
9691 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9694 /* With undefined overflow we can only associate constants with one
9695 variable, and constants whose association doesn't overflow. */
9696 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9697 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9703 bool one_neg
= false;
9705 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9707 tmp0
= TREE_OPERAND (tmp0
, 0);
9710 if (CONVERT_EXPR_P (tmp0
)
9711 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9712 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9713 <= TYPE_PRECISION (atype
)))
9714 tmp0
= TREE_OPERAND (tmp0
, 0);
9715 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9717 tmp1
= TREE_OPERAND (tmp1
, 0);
9720 if (CONVERT_EXPR_P (tmp1
)
9721 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9722 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9723 <= TYPE_PRECISION (atype
)))
9724 tmp1
= TREE_OPERAND (tmp1
, 0);
9725 /* The only case we can still associate with two variables
9726 is if they cancel out. */
9728 || !operand_equal_p (tmp0
, tmp1
, 0))
9733 /* Only do something if we found more than two objects. Otherwise,
9734 nothing has changed and we risk infinite recursion. */
9736 && (2 < ((var0
!= 0) + (var1
!= 0)
9737 + (con0
!= 0) + (con1
!= 0)
9738 + (lit0
!= 0) + (lit1
!= 0)
9739 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9741 bool any_overflows
= false;
9742 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9743 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9744 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9745 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9746 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9747 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9748 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9749 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9752 /* Preserve the MINUS_EXPR if the negative part of the literal is
9753 greater than the positive part. Otherwise, the multiplicative
9754 folding code (i.e extract_muldiv) may be fooled in case
9755 unsigned constants are subtracted, like in the following
9756 example: ((X*2 + 4) - 8U)/2. */
9757 if (minus_lit0
&& lit0
)
9759 if (TREE_CODE (lit0
) == INTEGER_CST
9760 && TREE_CODE (minus_lit0
) == INTEGER_CST
9761 && tree_int_cst_lt (lit0
, minus_lit0
))
9763 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9769 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9775 /* Don't introduce overflows through reassociation. */
9777 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9778 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9785 fold_convert_loc (loc
, type
,
9786 associate_trees (loc
, var0
, minus_lit0
,
9787 MINUS_EXPR
, atype
));
9790 con0
= associate_trees (loc
, con0
, minus_lit0
,
9793 fold_convert_loc (loc
, type
,
9794 associate_trees (loc
, var0
, con0
,
9799 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9801 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9809 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9810 if (TREE_CODE (arg0
) == NEGATE_EXPR
9811 && negate_expr_p (op1
)
9812 && reorder_operands_p (arg0
, arg1
))
9813 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9815 fold_convert_loc (loc
, type
,
9816 TREE_OPERAND (arg0
, 0)));
9818 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9819 __complex__ ( x, -y ). This is not the same for SNaNs or if
9820 signed zeros are involved. */
9821 if (!HONOR_SNANS (element_mode (arg0
))
9822 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9823 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9825 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9826 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9827 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9828 bool arg0rz
= false, arg0iz
= false;
9829 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9830 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9832 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9833 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9834 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9836 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9838 : build1 (REALPART_EXPR
, rtype
, arg1
));
9839 tree ip
= arg0i
? arg0i
9840 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9841 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9843 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9845 tree rp
= arg0r
? arg0r
9846 : build1 (REALPART_EXPR
, rtype
, arg0
);
9847 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9849 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9850 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9855 /* A - B -> A + (-B) if B is easily negatable. */
9856 if (negate_expr_p (op1
)
9857 && ! TYPE_OVERFLOW_SANITIZED (type
)
9858 && ((FLOAT_TYPE_P (type
)
9859 /* Avoid this transformation if B is a positive REAL_CST. */
9860 && (TREE_CODE (op1
) != REAL_CST
9861 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
9862 || INTEGRAL_TYPE_P (type
)))
9863 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9864 fold_convert_loc (loc
, type
, arg0
),
9867 /* Fold &a[i] - &a[j] to i-j. */
9868 if (TREE_CODE (arg0
) == ADDR_EXPR
9869 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9870 && TREE_CODE (arg1
) == ADDR_EXPR
9871 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9873 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9874 TREE_OPERAND (arg0
, 0),
9875 TREE_OPERAND (arg1
, 0));
9880 if (FLOAT_TYPE_P (type
)
9881 && flag_unsafe_math_optimizations
9882 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9883 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9884 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9887 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9888 one. Make sure the type is not saturating and has the signedness of
9889 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9890 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9891 if ((TREE_CODE (arg0
) == MULT_EXPR
9892 || TREE_CODE (arg1
) == MULT_EXPR
)
9893 && !TYPE_SATURATING (type
)
9894 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9895 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9896 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9898 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9906 if (! FLOAT_TYPE_P (type
))
9908 /* Transform x * -C into -x * C if x is easily negatable. */
9909 if (TREE_CODE (op1
) == INTEGER_CST
9910 && tree_int_cst_sgn (op1
) == -1
9911 && negate_expr_p (op0
)
9912 && (tem
= negate_expr (op1
)) != op1
9913 && ! TREE_OVERFLOW (tem
))
9914 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9915 fold_convert_loc (loc
, type
,
9916 negate_expr (op0
)), tem
);
9918 strict_overflow_p
= false;
9919 if (TREE_CODE (arg1
) == INTEGER_CST
9920 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9921 &strict_overflow_p
)))
9923 if (strict_overflow_p
)
9924 fold_overflow_warning (("assuming signed overflow does not "
9925 "occur when simplifying "
9927 WARN_STRICT_OVERFLOW_MISC
);
9928 return fold_convert_loc (loc
, type
, tem
);
9931 /* Optimize z * conj(z) for integer complex numbers. */
9932 if (TREE_CODE (arg0
) == CONJ_EXPR
9933 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9934 return fold_mult_zconjz (loc
, type
, arg1
);
9935 if (TREE_CODE (arg1
) == CONJ_EXPR
9936 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9937 return fold_mult_zconjz (loc
, type
, arg0
);
9941 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9942 This is not the same for NaNs or if signed zeros are
9944 if (!HONOR_NANS (arg0
)
9945 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9946 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9947 && TREE_CODE (arg1
) == COMPLEX_CST
9948 && real_zerop (TREE_REALPART (arg1
)))
9950 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9951 if (real_onep (TREE_IMAGPART (arg1
)))
9953 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9954 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9956 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9957 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9959 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9960 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9961 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9965 /* Optimize z * conj(z) for floating point complex numbers.
9966 Guarded by flag_unsafe_math_optimizations as non-finite
9967 imaginary components don't produce scalar results. */
9968 if (flag_unsafe_math_optimizations
9969 && TREE_CODE (arg0
) == CONJ_EXPR
9970 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9971 return fold_mult_zconjz (loc
, type
, arg1
);
9972 if (flag_unsafe_math_optimizations
9973 && TREE_CODE (arg1
) == CONJ_EXPR
9974 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9975 return fold_mult_zconjz (loc
, type
, arg0
);
9980 /* Canonicalize (X & C1) | C2. */
9981 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9982 && TREE_CODE (arg1
) == INTEGER_CST
9983 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9985 int width
= TYPE_PRECISION (type
), w
;
9986 wide_int c1
= TREE_OPERAND (arg0
, 1);
9989 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9990 if ((c1
& c2
) == c1
)
9991 return omit_one_operand_loc (loc
, type
, arg1
,
9992 TREE_OPERAND (arg0
, 0));
9994 wide_int msk
= wi::mask (width
, false,
9995 TYPE_PRECISION (TREE_TYPE (arg1
)));
9997 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9998 if (msk
.and_not (c1
| c2
) == 0)
9999 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10000 TREE_OPERAND (arg0
, 0), arg1
);
10002 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10003 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10004 mode which allows further optimizations. */
10007 wide_int c3
= c1
.and_not (c2
);
10008 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10010 wide_int mask
= wi::mask (w
, false,
10011 TYPE_PRECISION (type
));
10012 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10020 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10021 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10022 TREE_OPERAND (arg0
, 0),
10023 wide_int_to_tree (type
,
10028 /* See if this can be simplified into a rotate first. If that
10029 is unsuccessful continue in the association code. */
10033 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10034 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10035 && INTEGRAL_TYPE_P (type
)
10036 && integer_onep (TREE_OPERAND (arg0
, 1))
10037 && integer_onep (arg1
))
10038 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10039 build_zero_cst (TREE_TYPE (arg0
)));
10041 /* See if this can be simplified into a rotate first. If that
10042 is unsuccessful continue in the association code. */
10046 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10047 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10048 && INTEGRAL_TYPE_P (type
)
10049 && integer_onep (TREE_OPERAND (arg0
, 1))
10050 && integer_onep (arg1
))
10053 tem
= TREE_OPERAND (arg0
, 0);
10054 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10055 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10057 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10058 build_zero_cst (TREE_TYPE (tem
)));
10060 /* Fold ~X & 1 as (X & 1) == 0. */
10061 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10062 && INTEGRAL_TYPE_P (type
)
10063 && integer_onep (arg1
))
10066 tem
= TREE_OPERAND (arg0
, 0);
10067 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10068 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10070 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10071 build_zero_cst (TREE_TYPE (tem
)));
10073 /* Fold !X & 1 as X == 0. */
10074 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10075 && integer_onep (arg1
))
10077 tem
= TREE_OPERAND (arg0
, 0);
10078 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
10079 build_zero_cst (TREE_TYPE (tem
)));
10082 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10083 multiple of 1 << CST. */
10084 if (TREE_CODE (arg1
) == INTEGER_CST
)
10086 wide_int cst1
= arg1
;
10087 wide_int ncst1
= -cst1
;
10088 if ((cst1
& ncst1
) == ncst1
10089 && multiple_of_p (type
, arg0
,
10090 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10091 return fold_convert_loc (loc
, type
, arg0
);
10094 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10096 if (TREE_CODE (arg1
) == INTEGER_CST
10097 && TREE_CODE (arg0
) == MULT_EXPR
10098 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10100 wide_int warg1
= arg1
;
10101 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10104 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10106 else if (masked
!= warg1
)
10108 /* Avoid the transform if arg1 is a mask of some
10109 mode which allows further optimizations. */
10110 int pop
= wi::popcount (warg1
);
10111 if (!(pop
>= BITS_PER_UNIT
10112 && exact_log2 (pop
) != -1
10113 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10114 return fold_build2_loc (loc
, code
, type
, op0
,
10115 wide_int_to_tree (type
, masked
));
10119 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10120 ((A & N) + B) & M -> (A + B) & M
10121 Similarly if (N & M) == 0,
10122 ((A | N) + B) & M -> (A + B) & M
10123 and for - instead of + (or unary - instead of +)
10124 and/or ^ instead of |.
10125 If B is constant and (B & M) == 0, fold into A & M. */
10126 if (TREE_CODE (arg1
) == INTEGER_CST
)
10128 wide_int cst1
= arg1
;
10129 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10130 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10131 && (TREE_CODE (arg0
) == PLUS_EXPR
10132 || TREE_CODE (arg0
) == MINUS_EXPR
10133 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10134 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10135 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10141 /* Now we know that arg0 is (C + D) or (C - D) or
10142 -C and arg1 (M) is == (1LL << cst) - 1.
10143 Store C into PMOP[0] and D into PMOP[1]. */
10144 pmop
[0] = TREE_OPERAND (arg0
, 0);
10146 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10148 pmop
[1] = TREE_OPERAND (arg0
, 1);
10152 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10155 for (; which
>= 0; which
--)
10156 switch (TREE_CODE (pmop
[which
]))
10161 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10164 cst0
= TREE_OPERAND (pmop
[which
], 1);
10166 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10171 else if (cst0
!= 0)
10173 /* If C or D is of the form (A & N) where
10174 (N & M) == M, or of the form (A | N) or
10175 (A ^ N) where (N & M) == 0, replace it with A. */
10176 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10179 /* If C or D is a N where (N & M) == 0, it can be
10180 omitted (assumed 0). */
10181 if ((TREE_CODE (arg0
) == PLUS_EXPR
10182 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10183 && (cst1
& pmop
[which
]) == 0)
10184 pmop
[which
] = NULL
;
10190 /* Only build anything new if we optimized one or both arguments
10192 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10193 || (TREE_CODE (arg0
) != NEGATE_EXPR
10194 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10196 tree utype
= TREE_TYPE (arg0
);
10197 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10199 /* Perform the operations in a type that has defined
10200 overflow behavior. */
10201 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10202 if (pmop
[0] != NULL
)
10203 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10204 if (pmop
[1] != NULL
)
10205 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10208 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10209 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10210 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10212 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10213 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10215 else if (pmop
[0] != NULL
)
10217 else if (pmop
[1] != NULL
)
10220 return build_int_cst (type
, 0);
10222 else if (pmop
[0] == NULL
)
10223 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10225 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10227 /* TEM is now the new binary +, - or unary - replacement. */
10228 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10229 fold_convert_loc (loc
, utype
, arg1
));
10230 return fold_convert_loc (loc
, type
, tem
);
10235 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10236 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10237 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10239 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10241 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10244 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10250 /* Don't touch a floating-point divide by zero unless the mode
10251 of the constant can represent infinity. */
10252 if (TREE_CODE (arg1
) == REAL_CST
10253 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10254 && real_zerop (arg1
))
10257 /* (-A) / (-B) -> A / B */
10258 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10259 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10260 TREE_OPERAND (arg0
, 0),
10261 negate_expr (arg1
));
10262 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10263 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10264 negate_expr (arg0
),
10265 TREE_OPERAND (arg1
, 0));
10268 case TRUNC_DIV_EXPR
:
10271 case FLOOR_DIV_EXPR
:
10272 /* Simplify A / (B << N) where A and B are positive and B is
10273 a power of 2, to A >> (N + log2(B)). */
10274 strict_overflow_p
= false;
10275 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10276 && (TYPE_UNSIGNED (type
)
10277 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10279 tree sval
= TREE_OPERAND (arg1
, 0);
10280 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10282 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10283 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10284 wi::exact_log2 (sval
));
10286 if (strict_overflow_p
)
10287 fold_overflow_warning (("assuming signed overflow does not "
10288 "occur when simplifying A / (B << N)"),
10289 WARN_STRICT_OVERFLOW_MISC
);
10291 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10293 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10294 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10300 case ROUND_DIV_EXPR
:
10301 case CEIL_DIV_EXPR
:
10302 case EXACT_DIV_EXPR
:
10303 if (integer_zerop (arg1
))
10306 /* Convert -A / -B to A / B when the type is signed and overflow is
10308 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10309 && TREE_CODE (arg0
) == NEGATE_EXPR
10310 && negate_expr_p (op1
))
10312 if (INTEGRAL_TYPE_P (type
))
10313 fold_overflow_warning (("assuming signed overflow does not occur "
10314 "when distributing negation across "
10316 WARN_STRICT_OVERFLOW_MISC
);
10317 return fold_build2_loc (loc
, code
, type
,
10318 fold_convert_loc (loc
, type
,
10319 TREE_OPERAND (arg0
, 0)),
10320 negate_expr (op1
));
10322 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10323 && TREE_CODE (arg1
) == NEGATE_EXPR
10324 && negate_expr_p (op0
))
10326 if (INTEGRAL_TYPE_P (type
))
10327 fold_overflow_warning (("assuming signed overflow does not occur "
10328 "when distributing negation across "
10330 WARN_STRICT_OVERFLOW_MISC
);
10331 return fold_build2_loc (loc
, code
, type
,
10333 fold_convert_loc (loc
, type
,
10334 TREE_OPERAND (arg1
, 0)));
10337 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10338 operation, EXACT_DIV_EXPR.
10340 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10341 At one time others generated faster code, it's not clear if they do
10342 after the last round to changes to the DIV code in expmed.c. */
10343 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10344 && multiple_of_p (type
, arg0
, arg1
))
10345 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10346 fold_convert (type
, arg0
),
10347 fold_convert (type
, arg1
));
10349 strict_overflow_p
= false;
10350 if (TREE_CODE (arg1
) == INTEGER_CST
10351 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10352 &strict_overflow_p
)))
10354 if (strict_overflow_p
)
10355 fold_overflow_warning (("assuming signed overflow does not occur "
10356 "when simplifying division"),
10357 WARN_STRICT_OVERFLOW_MISC
);
10358 return fold_convert_loc (loc
, type
, tem
);
10363 case CEIL_MOD_EXPR
:
10364 case FLOOR_MOD_EXPR
:
10365 case ROUND_MOD_EXPR
:
10366 case TRUNC_MOD_EXPR
:
10367 strict_overflow_p
= false;
10368 if (TREE_CODE (arg1
) == INTEGER_CST
10369 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10370 &strict_overflow_p
)))
10372 if (strict_overflow_p
)
10373 fold_overflow_warning (("assuming signed overflow does not occur "
10374 "when simplifying modulus"),
10375 WARN_STRICT_OVERFLOW_MISC
);
10376 return fold_convert_loc (loc
, type
, tem
);
10385 /* Since negative shift count is not well-defined,
10386 don't try to compute it in the compiler. */
10387 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10390 prec
= element_precision (type
);
10392 /* If we have a rotate of a bit operation with the rotate count and
10393 the second operand of the bit operation both constant,
10394 permute the two operations. */
10395 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10396 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10397 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10398 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10399 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10400 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10401 fold_build2_loc (loc
, code
, type
,
10402 TREE_OPERAND (arg0
, 0), arg1
),
10403 fold_build2_loc (loc
, code
, type
,
10404 TREE_OPERAND (arg0
, 1), arg1
));
10406 /* Two consecutive rotates adding up to the some integer
10407 multiple of the precision of the type can be ignored. */
10408 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10409 && TREE_CODE (arg0
) == RROTATE_EXPR
10410 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10411 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
10413 return TREE_OPERAND (arg0
, 0);
10421 case TRUTH_ANDIF_EXPR
:
10422 /* Note that the operands of this must be ints
10423 and their values must be 0 or 1.
10424 ("true" is a fixed value perhaps depending on the language.) */
10425 /* If first arg is constant zero, return it. */
10426 if (integer_zerop (arg0
))
10427 return fold_convert_loc (loc
, type
, arg0
);
10428 case TRUTH_AND_EXPR
:
10429 /* If either arg is constant true, drop it. */
10430 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10431 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10432 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10433 /* Preserve sequence points. */
10434 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10435 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10436 /* If second arg is constant zero, result is zero, but first arg
10437 must be evaluated. */
10438 if (integer_zerop (arg1
))
10439 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10440 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10441 case will be handled here. */
10442 if (integer_zerop (arg0
))
10443 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10445 /* !X && X is always false. */
10446 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10447 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10448 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10449 /* X && !X is always false. */
10450 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10451 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10452 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10454 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10455 means A >= Y && A != MAX, but in this case we know that
10458 if (!TREE_SIDE_EFFECTS (arg0
)
10459 && !TREE_SIDE_EFFECTS (arg1
))
10461 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10462 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10463 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10465 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10466 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10467 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10470 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10476 case TRUTH_ORIF_EXPR
:
10477 /* Note that the operands of this must be ints
10478 and their values must be 0 or true.
10479 ("true" is a fixed value perhaps depending on the language.) */
10480 /* If first arg is constant true, return it. */
10481 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10482 return fold_convert_loc (loc
, type
, arg0
);
10483 case TRUTH_OR_EXPR
:
10484 /* If either arg is constant zero, drop it. */
10485 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10486 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10487 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10488 /* Preserve sequence points. */
10489 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10490 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10491 /* If second arg is constant true, result is true, but we must
10492 evaluate first arg. */
10493 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10494 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10495 /* Likewise for first arg, but note this only occurs here for
10497 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10498 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10500 /* !X || X is always true. */
10501 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10502 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10503 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10504 /* X || !X is always true. */
10505 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10506 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10507 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10509 /* (X && !Y) || (!X && Y) is X ^ Y */
10510 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10511 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10513 tree a0
, a1
, l0
, l1
, n0
, n1
;
10515 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10516 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10518 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10519 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10521 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10522 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10524 if ((operand_equal_p (n0
, a0
, 0)
10525 && operand_equal_p (n1
, a1
, 0))
10526 || (operand_equal_p (n0
, a1
, 0)
10527 && operand_equal_p (n1
, a0
, 0)))
10528 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10531 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10537 case TRUTH_XOR_EXPR
:
10538 /* If the second arg is constant zero, drop it. */
10539 if (integer_zerop (arg1
))
10540 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10541 /* If the second arg is constant true, this is a logical inversion. */
10542 if (integer_onep (arg1
))
10544 tem
= invert_truthvalue_loc (loc
, arg0
);
10545 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10547 /* Identical arguments cancel to zero. */
10548 if (operand_equal_p (arg0
, arg1
, 0))
10549 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10551 /* !X ^ X is always true. */
10552 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10553 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10554 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10556 /* X ^ !X is always true. */
10557 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10558 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10559 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10568 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10569 if (tem
!= NULL_TREE
)
10572 /* bool_var != 1 becomes !bool_var. */
10573 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10574 && code
== NE_EXPR
)
10575 return fold_convert_loc (loc
, type
,
10576 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10577 TREE_TYPE (arg0
), arg0
));
10579 /* bool_var == 0 becomes !bool_var. */
10580 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10581 && code
== EQ_EXPR
)
10582 return fold_convert_loc (loc
, type
,
10583 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10584 TREE_TYPE (arg0
), arg0
));
10586 /* !exp != 0 becomes !exp */
10587 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
10588 && code
== NE_EXPR
)
10589 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10591 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10592 if ((TREE_CODE (arg0
) == PLUS_EXPR
10593 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10594 || TREE_CODE (arg0
) == MINUS_EXPR
)
10595 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10598 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10599 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
10601 tree val
= TREE_OPERAND (arg0
, 1);
10602 val
= fold_build2_loc (loc
, code
, type
, val
,
10603 build_int_cst (TREE_TYPE (val
), 0));
10604 return omit_two_operands_loc (loc
, type
, val
,
10605 TREE_OPERAND (arg0
, 0), arg1
);
10608 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10609 if ((TREE_CODE (arg1
) == PLUS_EXPR
10610 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
10611 || TREE_CODE (arg1
) == MINUS_EXPR
)
10612 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1
,
10615 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10616 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
10618 tree val
= TREE_OPERAND (arg1
, 1);
10619 val
= fold_build2_loc (loc
, code
, type
, val
,
10620 build_int_cst (TREE_TYPE (val
), 0));
10621 return omit_two_operands_loc (loc
, type
, val
,
10622 TREE_OPERAND (arg1
, 0), arg0
);
10625 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10626 if (TREE_CODE (arg0
) == MINUS_EXPR
10627 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
10628 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10631 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
10632 return omit_two_operands_loc (loc
, type
,
10634 ? boolean_true_node
: boolean_false_node
,
10635 TREE_OPERAND (arg0
, 1), arg1
);
10637 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10638 if (TREE_CODE (arg1
) == MINUS_EXPR
10639 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == INTEGER_CST
10640 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1
,
10643 && wi::extract_uhwi (TREE_OPERAND (arg1
, 0), 0, 1) == 1)
10644 return omit_two_operands_loc (loc
, type
,
10646 ? boolean_true_node
: boolean_false_node
,
10647 TREE_OPERAND (arg1
, 1), arg0
);
10649 /* If this is an EQ or NE comparison with zero and ARG0 is
10650 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10651 two operations, but the latter can be done in one less insn
10652 on machines that have only two-operand insns or on which a
10653 constant cannot be the first operand. */
10654 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10655 && integer_zerop (arg1
))
10657 tree arg00
= TREE_OPERAND (arg0
, 0);
10658 tree arg01
= TREE_OPERAND (arg0
, 1);
10659 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10660 && integer_onep (TREE_OPERAND (arg00
, 0)))
10662 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
10663 arg01
, TREE_OPERAND (arg00
, 1));
10664 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10665 build_int_cst (TREE_TYPE (arg0
), 1));
10666 return fold_build2_loc (loc
, code
, type
,
10667 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10670 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
10671 && integer_onep (TREE_OPERAND (arg01
, 0)))
10673 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
10674 arg00
, TREE_OPERAND (arg01
, 1));
10675 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10676 build_int_cst (TREE_TYPE (arg0
), 1));
10677 return fold_build2_loc (loc
, code
, type
,
10678 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10683 /* If this is an NE or EQ comparison of zero against the result of a
10684 signed MOD operation whose second operand is a power of 2, make
10685 the MOD operation unsigned since it is simpler and equivalent. */
10686 if (integer_zerop (arg1
)
10687 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10688 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10689 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10690 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10691 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10692 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10694 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
10695 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
10696 fold_convert_loc (loc
, newtype
,
10697 TREE_OPERAND (arg0
, 0)),
10698 fold_convert_loc (loc
, newtype
,
10699 TREE_OPERAND (arg0
, 1)));
10701 return fold_build2_loc (loc
, code
, type
, newmod
,
10702 fold_convert_loc (loc
, newtype
, arg1
));
10705 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10706 C1 is a valid shift constant, and C2 is a power of two, i.e.
10708 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10709 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10710 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10712 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10713 && integer_zerop (arg1
))
10715 tree itype
= TREE_TYPE (arg0
);
10716 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10717 prec
= TYPE_PRECISION (itype
);
10719 /* Check for a valid shift count. */
10720 if (wi::ltu_p (arg001
, prec
))
10722 tree arg01
= TREE_OPERAND (arg0
, 1);
10723 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10724 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10725 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10726 can be rewritten as (X & (C2 << C1)) != 0. */
10727 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10729 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
10730 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
10731 return fold_build2_loc (loc
, code
, type
, tem
,
10732 fold_convert_loc (loc
, itype
, arg1
));
10734 /* Otherwise, for signed (arithmetic) shifts,
10735 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10736 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10737 else if (!TYPE_UNSIGNED (itype
))
10738 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10739 arg000
, build_int_cst (itype
, 0));
10740 /* Otherwise, of unsigned (logical) shifts,
10741 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10742 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10744 return omit_one_operand_loc (loc
, type
,
10745 code
== EQ_EXPR
? integer_one_node
10746 : integer_zero_node
,
10751 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10752 Similarly for NE_EXPR. */
10753 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10754 && TREE_CODE (arg1
) == INTEGER_CST
10755 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10757 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
10758 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10759 TREE_OPERAND (arg0
, 1));
10761 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10762 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
10764 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10765 if (integer_nonzerop (dandnotc
))
10766 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
10769 /* If this is a comparison of a field, we may be able to simplify it. */
10770 if ((TREE_CODE (arg0
) == COMPONENT_REF
10771 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10772 /* Handle the constant case even without -O
10773 to make sure the warnings are given. */
10774 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10776 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
10781 /* Optimize comparisons of strlen vs zero to a compare of the
10782 first character of the string vs zero. To wit,
10783 strlen(ptr) == 0 => *ptr == 0
10784 strlen(ptr) != 0 => *ptr != 0
10785 Other cases should reduce to one of these two (or a constant)
10786 due to the return value of strlen being unsigned. */
10787 if (TREE_CODE (arg0
) == CALL_EXPR
10788 && integer_zerop (arg1
))
10790 tree fndecl
= get_callee_fndecl (arg0
);
10793 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10794 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10795 && call_expr_nargs (arg0
) == 1
10796 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
10798 tree iref
= build_fold_indirect_ref_loc (loc
,
10799 CALL_EXPR_ARG (arg0
, 0));
10800 return fold_build2_loc (loc
, code
, type
, iref
,
10801 build_int_cst (TREE_TYPE (iref
), 0));
10805 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10806 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10807 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10808 && integer_zerop (arg1
)
10809 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10811 tree arg00
= TREE_OPERAND (arg0
, 0);
10812 tree arg01
= TREE_OPERAND (arg0
, 1);
10813 tree itype
= TREE_TYPE (arg00
);
10814 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
10816 if (TYPE_UNSIGNED (itype
))
10818 itype
= signed_type_for (itype
);
10819 arg00
= fold_convert_loc (loc
, itype
, arg00
);
10821 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10822 type
, arg00
, build_zero_cst (itype
));
10826 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10827 (X & C) == 0 when C is a single bit. */
10828 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10829 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10830 && integer_zerop (arg1
)
10831 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10833 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10834 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10835 TREE_OPERAND (arg0
, 1));
10836 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10838 fold_convert_loc (loc
, TREE_TYPE (arg0
),
10842 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10843 constant C is a power of two, i.e. a single bit. */
10844 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10845 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10846 && integer_zerop (arg1
)
10847 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10848 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10849 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10851 tree arg00
= TREE_OPERAND (arg0
, 0);
10852 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10853 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10856 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10857 when is C is a power of two, i.e. a single bit. */
10858 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10859 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10860 && integer_zerop (arg1
)
10861 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10862 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10863 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10865 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10866 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
10867 arg000
, TREE_OPERAND (arg0
, 1));
10868 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10869 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10872 if (integer_zerop (arg1
)
10873 && tree_expr_nonzero_p (arg0
))
10875 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10876 return omit_one_operand_loc (loc
, type
, res
, arg0
);
10879 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10880 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10881 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10883 tree arg00
= TREE_OPERAND (arg0
, 0);
10884 tree arg01
= TREE_OPERAND (arg0
, 1);
10885 tree arg10
= TREE_OPERAND (arg1
, 0);
10886 tree arg11
= TREE_OPERAND (arg1
, 1);
10887 tree itype
= TREE_TYPE (arg0
);
10889 if (operand_equal_p (arg01
, arg11
, 0))
10890 return fold_build2_loc (loc
, code
, type
,
10891 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10892 fold_build2_loc (loc
,
10893 BIT_XOR_EXPR
, itype
,
10896 build_zero_cst (itype
));
10898 if (operand_equal_p (arg01
, arg10
, 0))
10899 return fold_build2_loc (loc
, code
, type
,
10900 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10901 fold_build2_loc (loc
,
10902 BIT_XOR_EXPR
, itype
,
10905 build_zero_cst (itype
));
10907 if (operand_equal_p (arg00
, arg11
, 0))
10908 return fold_build2_loc (loc
, code
, type
,
10909 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10910 fold_build2_loc (loc
,
10911 BIT_XOR_EXPR
, itype
,
10914 build_zero_cst (itype
));
10916 if (operand_equal_p (arg00
, arg10
, 0))
10917 return fold_build2_loc (loc
, code
, type
,
10918 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10919 fold_build2_loc (loc
,
10920 BIT_XOR_EXPR
, itype
,
10923 build_zero_cst (itype
));
10926 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10927 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
10929 tree arg00
= TREE_OPERAND (arg0
, 0);
10930 tree arg01
= TREE_OPERAND (arg0
, 1);
10931 tree arg10
= TREE_OPERAND (arg1
, 0);
10932 tree arg11
= TREE_OPERAND (arg1
, 1);
10933 tree itype
= TREE_TYPE (arg0
);
10935 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10936 operand_equal_p guarantees no side-effects so we don't need
10937 to use omit_one_operand on Z. */
10938 if (operand_equal_p (arg01
, arg11
, 0))
10939 return fold_build2_loc (loc
, code
, type
, arg00
,
10940 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10942 if (operand_equal_p (arg01
, arg10
, 0))
10943 return fold_build2_loc (loc
, code
, type
, arg00
,
10944 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10946 if (operand_equal_p (arg00
, arg11
, 0))
10947 return fold_build2_loc (loc
, code
, type
, arg01
,
10948 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10950 if (operand_equal_p (arg00
, arg10
, 0))
10951 return fold_build2_loc (loc
, code
, type
, arg01
,
10952 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10955 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10956 if (TREE_CODE (arg01
) == INTEGER_CST
10957 && TREE_CODE (arg11
) == INTEGER_CST
)
10959 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
10960 fold_convert_loc (loc
, itype
, arg11
));
10961 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10962 return fold_build2_loc (loc
, code
, type
, tem
,
10963 fold_convert_loc (loc
, itype
, arg10
));
10967 /* Attempt to simplify equality/inequality comparisons of complex
10968 values. Only lower the comparison if the result is known or
10969 can be simplified to a single scalar comparison. */
10970 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
10971 || TREE_CODE (arg0
) == COMPLEX_CST
)
10972 && (TREE_CODE (arg1
) == COMPLEX_EXPR
10973 || TREE_CODE (arg1
) == COMPLEX_CST
))
10975 tree real0
, imag0
, real1
, imag1
;
10978 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
10980 real0
= TREE_OPERAND (arg0
, 0);
10981 imag0
= TREE_OPERAND (arg0
, 1);
10985 real0
= TREE_REALPART (arg0
);
10986 imag0
= TREE_IMAGPART (arg0
);
10989 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
10991 real1
= TREE_OPERAND (arg1
, 0);
10992 imag1
= TREE_OPERAND (arg1
, 1);
10996 real1
= TREE_REALPART (arg1
);
10997 imag1
= TREE_IMAGPART (arg1
);
11000 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
11001 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
11003 if (integer_zerop (rcond
))
11005 if (code
== EQ_EXPR
)
11006 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11008 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
11012 if (code
== NE_EXPR
)
11013 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11015 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11019 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11020 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11022 if (integer_zerop (icond
))
11024 if (code
== EQ_EXPR
)
11025 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11027 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11031 if (code
== NE_EXPR
)
11032 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11034 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11045 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11046 if (tem
!= NULL_TREE
)
11049 /* Transform comparisons of the form X +- C CMP X. */
11050 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11051 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11052 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11053 && !HONOR_SNANS (arg0
))
11054 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11055 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11057 tree arg01
= TREE_OPERAND (arg0
, 1);
11058 enum tree_code code0
= TREE_CODE (arg0
);
11061 if (TREE_CODE (arg01
) == REAL_CST
)
11062 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11064 is_positive
= tree_int_cst_sgn (arg01
);
11066 /* (X - c) > X becomes false. */
11067 if (code
== GT_EXPR
11068 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11069 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11071 if (TREE_CODE (arg01
) == INTEGER_CST
11072 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11073 fold_overflow_warning (("assuming signed overflow does not "
11074 "occur when assuming that (X - c) > X "
11075 "is always false"),
11076 WARN_STRICT_OVERFLOW_ALL
);
11077 return constant_boolean_node (0, type
);
11080 /* Likewise (X + c) < X becomes false. */
11081 if (code
== LT_EXPR
11082 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11083 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11085 if (TREE_CODE (arg01
) == INTEGER_CST
11086 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11087 fold_overflow_warning (("assuming signed overflow does not "
11088 "occur when assuming that "
11089 "(X + c) < X is always false"),
11090 WARN_STRICT_OVERFLOW_ALL
);
11091 return constant_boolean_node (0, type
);
11094 /* Convert (X - c) <= X to true. */
11095 if (!HONOR_NANS (arg1
)
11097 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11098 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11100 if (TREE_CODE (arg01
) == INTEGER_CST
11101 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11102 fold_overflow_warning (("assuming signed overflow does not "
11103 "occur when assuming that "
11104 "(X - c) <= X is always true"),
11105 WARN_STRICT_OVERFLOW_ALL
);
11106 return constant_boolean_node (1, type
);
11109 /* Convert (X + c) >= X to true. */
11110 if (!HONOR_NANS (arg1
)
11112 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11113 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11115 if (TREE_CODE (arg01
) == INTEGER_CST
11116 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11117 fold_overflow_warning (("assuming signed overflow does not "
11118 "occur when assuming that "
11119 "(X + c) >= X is always true"),
11120 WARN_STRICT_OVERFLOW_ALL
);
11121 return constant_boolean_node (1, type
);
11124 if (TREE_CODE (arg01
) == INTEGER_CST
)
11126 /* Convert X + c > X and X - c < X to true for integers. */
11127 if (code
== GT_EXPR
11128 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11129 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11131 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11132 fold_overflow_warning (("assuming signed overflow does "
11133 "not occur when assuming that "
11134 "(X + c) > X is always true"),
11135 WARN_STRICT_OVERFLOW_ALL
);
11136 return constant_boolean_node (1, type
);
11139 if (code
== LT_EXPR
11140 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11141 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11143 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11144 fold_overflow_warning (("assuming signed overflow does "
11145 "not occur when assuming that "
11146 "(X - c) < X is always true"),
11147 WARN_STRICT_OVERFLOW_ALL
);
11148 return constant_boolean_node (1, type
);
11151 /* Convert X + c <= X and X - c >= X to false for integers. */
11152 if (code
== LE_EXPR
11153 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11154 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11156 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11157 fold_overflow_warning (("assuming signed overflow does "
11158 "not occur when assuming that "
11159 "(X + c) <= X is always false"),
11160 WARN_STRICT_OVERFLOW_ALL
);
11161 return constant_boolean_node (0, type
);
11164 if (code
== GE_EXPR
11165 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11166 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11168 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11169 fold_overflow_warning (("assuming signed overflow does "
11170 "not occur when assuming that "
11171 "(X - c) >= X is always false"),
11172 WARN_STRICT_OVERFLOW_ALL
);
11173 return constant_boolean_node (0, type
);
11178 /* If we are comparing an ABS_EXPR with a constant, we can
11179 convert all the cases into explicit comparisons, but they may
11180 well not be faster than doing the ABS and one comparison.
11181 But ABS (X) <= C is a range comparison, which becomes a subtraction
11182 and a comparison, and is probably faster. */
11183 if (code
== LE_EXPR
11184 && TREE_CODE (arg1
) == INTEGER_CST
11185 && TREE_CODE (arg0
) == ABS_EXPR
11186 && ! TREE_SIDE_EFFECTS (arg0
)
11187 && (0 != (tem
= negate_expr (arg1
)))
11188 && TREE_CODE (tem
) == INTEGER_CST
11189 && !TREE_OVERFLOW (tem
))
11190 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11191 build2 (GE_EXPR
, type
,
11192 TREE_OPERAND (arg0
, 0), tem
),
11193 build2 (LE_EXPR
, type
,
11194 TREE_OPERAND (arg0
, 0), arg1
));
11196 /* Convert ABS_EXPR<x> >= 0 to true. */
11197 strict_overflow_p
= false;
11198 if (code
== GE_EXPR
11199 && (integer_zerop (arg1
)
11200 || (! HONOR_NANS (arg0
)
11201 && real_zerop (arg1
)))
11202 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11204 if (strict_overflow_p
)
11205 fold_overflow_warning (("assuming signed overflow does not occur "
11206 "when simplifying comparison of "
11207 "absolute value and zero"),
11208 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11209 return omit_one_operand_loc (loc
, type
,
11210 constant_boolean_node (true, type
),
11214 /* Convert ABS_EXPR<x> < 0 to false. */
11215 strict_overflow_p
= false;
11216 if (code
== LT_EXPR
11217 && (integer_zerop (arg1
) || real_zerop (arg1
))
11218 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11220 if (strict_overflow_p
)
11221 fold_overflow_warning (("assuming signed overflow does not occur "
11222 "when simplifying comparison of "
11223 "absolute value and zero"),
11224 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11225 return omit_one_operand_loc (loc
, type
,
11226 constant_boolean_node (false, type
),
11230 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11231 and similarly for >= into !=. */
11232 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11233 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11234 && TREE_CODE (arg1
) == LSHIFT_EXPR
11235 && integer_onep (TREE_OPERAND (arg1
, 0)))
11236 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11237 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11238 TREE_OPERAND (arg1
, 1)),
11239 build_zero_cst (TREE_TYPE (arg0
)));
11241 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11242 otherwise Y might be >= # of bits in X's type and thus e.g.
11243 (unsigned char) (1 << Y) for Y 15 might be 0.
11244 If the cast is widening, then 1 << Y should have unsigned type,
11245 otherwise if Y is number of bits in the signed shift type minus 1,
11246 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11247 31 might be 0xffffffff80000000. */
11248 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11249 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11250 && CONVERT_EXPR_P (arg1
)
11251 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11252 && (element_precision (TREE_TYPE (arg1
))
11253 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11254 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11255 || (element_precision (TREE_TYPE (arg1
))
11256 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11257 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11259 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11260 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11261 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11262 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11263 build_zero_cst (TREE_TYPE (arg0
)));
11268 case UNORDERED_EXPR
:
11276 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11278 tree targ0
= strip_float_extensions (arg0
);
11279 tree targ1
= strip_float_extensions (arg1
);
11280 tree newtype
= TREE_TYPE (targ0
);
11282 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11283 newtype
= TREE_TYPE (targ1
);
11285 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11286 return fold_build2_loc (loc
, code
, type
,
11287 fold_convert_loc (loc
, newtype
, targ0
),
11288 fold_convert_loc (loc
, newtype
, targ1
));
11293 case COMPOUND_EXPR
:
11294 /* When pedantic, a compound expression can be neither an lvalue
11295 nor an integer constant expression. */
11296 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11298 /* Don't let (0, 0) be null pointer constant. */
11299 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11300 : fold_convert_loc (loc
, type
, arg1
);
11301 return pedantic_non_lvalue_loc (loc
, tem
);
11304 /* An ASSERT_EXPR should never be passed to fold_binary. */
11305 gcc_unreachable ();
11309 } /* switch (code) */
11312 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11313 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11317 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11319 switch (TREE_CODE (*tp
))
11325 *walk_subtrees
= 0;
11327 /* ... fall through ... */
11334 /* Return whether the sub-tree ST contains a label which is accessible from
11335 outside the sub-tree. */
11338 contains_label_p (tree st
)
11341 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11344 /* Fold a ternary expression of code CODE and type TYPE with operands
11345 OP0, OP1, and OP2. Return the folded expression if folding is
11346 successful. Otherwise, return NULL_TREE. */
11349 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11350 tree op0
, tree op1
, tree op2
)
11353 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11354 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11356 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11357 && TREE_CODE_LENGTH (code
) == 3);
11359 /* If this is a commutative operation, and OP0 is a constant, move it
11360 to OP1 to reduce the number of tests below. */
11361 if (commutative_ternary_tree_code (code
)
11362 && tree_swap_operands_p (op0
, op1
, true))
11363 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11365 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11369 /* Strip any conversions that don't change the mode. This is safe
11370 for every expression, except for a comparison expression because
11371 its signedness is derived from its operands. So, in the latter
11372 case, only strip conversions that don't change the signedness.
11374 Note that this is done as an internal manipulation within the
11375 constant folder, in order to find the simplest representation of
11376 the arguments so that their form can be studied. In any cases,
11377 the appropriate type conversions should be put back in the tree
11378 that will get out of the constant folder. */
11399 case COMPONENT_REF
:
11400 if (TREE_CODE (arg0
) == CONSTRUCTOR
11401 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11403 unsigned HOST_WIDE_INT idx
;
11405 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11412 case VEC_COND_EXPR
:
11413 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11414 so all simple results must be passed through pedantic_non_lvalue. */
11415 if (TREE_CODE (arg0
) == INTEGER_CST
)
11417 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11418 tem
= integer_zerop (arg0
) ? op2
: op1
;
11419 /* Only optimize constant conditions when the selected branch
11420 has the same type as the COND_EXPR. This avoids optimizing
11421 away "c ? x : throw", where the throw has a void type.
11422 Avoid throwing away that operand which contains label. */
11423 if ((!TREE_SIDE_EFFECTS (unused_op
)
11424 || !contains_label_p (unused_op
))
11425 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11426 || VOID_TYPE_P (type
)))
11427 return pedantic_non_lvalue_loc (loc
, tem
);
11430 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11432 if ((TREE_CODE (arg1
) == VECTOR_CST
11433 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11434 && (TREE_CODE (arg2
) == VECTOR_CST
11435 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11437 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
11438 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
11439 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
11440 for (i
= 0; i
< nelts
; i
++)
11442 tree val
= VECTOR_CST_ELT (arg0
, i
);
11443 if (integer_all_onesp (val
))
11445 else if (integer_zerop (val
))
11446 sel
[i
] = nelts
+ i
;
11447 else /* Currently unreachable. */
11450 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11451 if (t
!= NULL_TREE
)
11456 /* If we have A op B ? A : C, we may be able to convert this to a
11457 simpler expression, depending on the operation and the values
11458 of B and C. Signed zeros prevent all of these transformations,
11459 for reasons given above each one.
11461 Also try swapping the arguments and inverting the conditional. */
11462 if (COMPARISON_CLASS_P (arg0
)
11463 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11464 arg1
, TREE_OPERAND (arg0
, 1))
11465 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
11467 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11472 if (COMPARISON_CLASS_P (arg0
)
11473 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11475 TREE_OPERAND (arg0
, 1))
11476 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11478 location_t loc0
= expr_location_or (arg0
, loc
);
11479 tem
= fold_invert_truthvalue (loc0
, arg0
);
11480 if (tem
&& COMPARISON_CLASS_P (tem
))
11482 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11488 /* If the second operand is simpler than the third, swap them
11489 since that produces better jump optimization results. */
11490 if (truth_value_p (TREE_CODE (arg0
))
11491 && tree_swap_operands_p (op1
, op2
, false))
11493 location_t loc0
= expr_location_or (arg0
, loc
);
11494 /* See if this can be inverted. If it can't, possibly because
11495 it was a floating-point inequality comparison, don't do
11497 tem
= fold_invert_truthvalue (loc0
, arg0
);
11499 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11502 /* Convert A ? 1 : 0 to simply A. */
11503 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11504 : (integer_onep (op1
)
11505 && !VECTOR_TYPE_P (type
)))
11506 && integer_zerop (op2
)
11507 /* If we try to convert OP0 to our type, the
11508 call to fold will try to move the conversion inside
11509 a COND, which will recurse. In that case, the COND_EXPR
11510 is probably the best choice, so leave it alone. */
11511 && type
== TREE_TYPE (arg0
))
11512 return pedantic_non_lvalue_loc (loc
, arg0
);
11514 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11515 over COND_EXPR in cases such as floating point comparisons. */
11516 if (integer_zerop (op1
)
11517 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
11518 : (integer_onep (op2
)
11519 && !VECTOR_TYPE_P (type
)))
11520 && truth_value_p (TREE_CODE (arg0
)))
11521 return pedantic_non_lvalue_loc (loc
,
11522 fold_convert_loc (loc
, type
,
11523 invert_truthvalue_loc (loc
,
11526 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11527 if (TREE_CODE (arg0
) == LT_EXPR
11528 && integer_zerop (TREE_OPERAND (arg0
, 1))
11529 && integer_zerop (op2
)
11530 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11532 /* sign_bit_p looks through both zero and sign extensions,
11533 but for this optimization only sign extensions are
11535 tree tem2
= TREE_OPERAND (arg0
, 0);
11536 while (tem
!= tem2
)
11538 if (TREE_CODE (tem2
) != NOP_EXPR
11539 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11544 tem2
= TREE_OPERAND (tem2
, 0);
11546 /* sign_bit_p only checks ARG1 bits within A's precision.
11547 If <sign bit of A> has wider type than A, bits outside
11548 of A's precision in <sign bit of A> need to be checked.
11549 If they are all 0, this optimization needs to be done
11550 in unsigned A's type, if they are all 1 in signed A's type,
11551 otherwise this can't be done. */
11553 && TYPE_PRECISION (TREE_TYPE (tem
))
11554 < TYPE_PRECISION (TREE_TYPE (arg1
))
11555 && TYPE_PRECISION (TREE_TYPE (tem
))
11556 < TYPE_PRECISION (type
))
11558 int inner_width
, outer_width
;
11561 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11562 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11563 if (outer_width
> TYPE_PRECISION (type
))
11564 outer_width
= TYPE_PRECISION (type
);
11566 wide_int mask
= wi::shifted_mask
11567 (inner_width
, outer_width
- inner_width
, false,
11568 TYPE_PRECISION (TREE_TYPE (arg1
)));
11570 wide_int common
= mask
& arg1
;
11571 if (common
== mask
)
11573 tem_type
= signed_type_for (TREE_TYPE (tem
));
11574 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11576 else if (common
== 0)
11578 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11579 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11587 fold_convert_loc (loc
, type
,
11588 fold_build2_loc (loc
, BIT_AND_EXPR
,
11589 TREE_TYPE (tem
), tem
,
11590 fold_convert_loc (loc
,
11595 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11596 already handled above. */
11597 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11598 && integer_onep (TREE_OPERAND (arg0
, 1))
11599 && integer_zerop (op2
)
11600 && integer_pow2p (arg1
))
11602 tree tem
= TREE_OPERAND (arg0
, 0);
11604 if (TREE_CODE (tem
) == RSHIFT_EXPR
11605 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
11606 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11607 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
11608 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11609 TREE_OPERAND (tem
, 0), arg1
);
11612 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11613 is probably obsolete because the first operand should be a
11614 truth value (that's why we have the two cases above), but let's
11615 leave it in until we can confirm this for all front-ends. */
11616 if (integer_zerop (op2
)
11617 && TREE_CODE (arg0
) == NE_EXPR
11618 && integer_zerop (TREE_OPERAND (arg0
, 1))
11619 && integer_pow2p (arg1
)
11620 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11621 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11622 arg1
, OEP_ONLY_CONST
))
11623 return pedantic_non_lvalue_loc (loc
,
11624 fold_convert_loc (loc
, type
,
11625 TREE_OPERAND (arg0
, 0)));
11627 /* Disable the transformations below for vectors, since
11628 fold_binary_op_with_conditional_arg may undo them immediately,
11629 yielding an infinite loop. */
11630 if (code
== VEC_COND_EXPR
)
11633 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11634 if (integer_zerop (op2
)
11635 && truth_value_p (TREE_CODE (arg0
))
11636 && truth_value_p (TREE_CODE (arg1
))
11637 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11638 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
11639 : TRUTH_ANDIF_EXPR
,
11640 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
11642 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11643 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
11644 && truth_value_p (TREE_CODE (arg0
))
11645 && truth_value_p (TREE_CODE (arg1
))
11646 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11648 location_t loc0
= expr_location_or (arg0
, loc
);
11649 /* Only perform transformation if ARG0 is easily inverted. */
11650 tem
= fold_invert_truthvalue (loc0
, arg0
);
11652 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11655 type
, fold_convert_loc (loc
, type
, tem
),
11659 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11660 if (integer_zerop (arg1
)
11661 && truth_value_p (TREE_CODE (arg0
))
11662 && truth_value_p (TREE_CODE (op2
))
11663 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11665 location_t loc0
= expr_location_or (arg0
, loc
);
11666 /* Only perform transformation if ARG0 is easily inverted. */
11667 tem
= fold_invert_truthvalue (loc0
, arg0
);
11669 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11670 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
11671 type
, fold_convert_loc (loc
, type
, tem
),
11675 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11676 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
11677 && truth_value_p (TREE_CODE (arg0
))
11678 && truth_value_p (TREE_CODE (op2
))
11679 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11680 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11681 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
11682 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
11687 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11688 of fold_ternary on them. */
11689 gcc_unreachable ();
11691 case BIT_FIELD_REF
:
11692 if (TREE_CODE (arg0
) == VECTOR_CST
11693 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
11694 || (TREE_CODE (type
) == VECTOR_TYPE
11695 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
11697 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
11698 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
11699 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
11700 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
11703 && (idx
% width
) == 0
11704 && (n
% width
) == 0
11705 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11710 if (TREE_CODE (arg0
) == VECTOR_CST
)
11713 return VECTOR_CST_ELT (arg0
, idx
);
11715 tree
*vals
= XALLOCAVEC (tree
, n
);
11716 for (unsigned i
= 0; i
< n
; ++i
)
11717 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
11718 return build_vector (type
, vals
);
11723 /* On constants we can use native encode/interpret to constant
11724 fold (nearly) all BIT_FIELD_REFs. */
11725 if (CONSTANT_CLASS_P (arg0
)
11726 && can_native_interpret_type_p (type
)
11727 && BITS_PER_UNIT
== 8)
11729 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11730 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
11731 /* Limit us to a reasonable amount of work. To relax the
11732 other limitations we need bit-shifting of the buffer
11733 and rounding up the size. */
11734 if (bitpos
% BITS_PER_UNIT
== 0
11735 && bitsize
% BITS_PER_UNIT
== 0
11736 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
11738 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11739 unsigned HOST_WIDE_INT len
11740 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
11741 bitpos
/ BITS_PER_UNIT
);
11743 && len
* BITS_PER_UNIT
>= bitsize
)
11745 tree v
= native_interpret_expr (type
, b
,
11746 bitsize
/ BITS_PER_UNIT
);
11756 /* For integers we can decompose the FMA if possible. */
11757 if (TREE_CODE (arg0
) == INTEGER_CST
11758 && TREE_CODE (arg1
) == INTEGER_CST
)
11759 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11760 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
11761 if (integer_zerop (arg2
))
11762 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11764 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
11766 case VEC_PERM_EXPR
:
11767 if (TREE_CODE (arg2
) == VECTOR_CST
)
11769 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
11770 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
11771 unsigned char *sel2
= sel
+ nelts
;
11772 bool need_mask_canon
= false;
11773 bool need_mask_canon2
= false;
11774 bool all_in_vec0
= true;
11775 bool all_in_vec1
= true;
11776 bool maybe_identity
= true;
11777 bool single_arg
= (op0
== op1
);
11778 bool changed
= false;
11780 mask2
= 2 * nelts
- 1;
11781 mask
= single_arg
? (nelts
- 1) : mask2
;
11782 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
11783 for (i
= 0; i
< nelts
; i
++)
11785 tree val
= VECTOR_CST_ELT (arg2
, i
);
11786 if (TREE_CODE (val
) != INTEGER_CST
)
11789 /* Make sure that the perm value is in an acceptable
11792 need_mask_canon
|= wi::gtu_p (t
, mask
);
11793 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
11794 sel
[i
] = t
.to_uhwi () & mask
;
11795 sel2
[i
] = t
.to_uhwi () & mask2
;
11797 if (sel
[i
] < nelts
)
11798 all_in_vec1
= false;
11800 all_in_vec0
= false;
11802 if ((sel
[i
] & (nelts
-1)) != i
)
11803 maybe_identity
= false;
11806 if (maybe_identity
)
11816 else if (all_in_vec1
)
11819 for (i
= 0; i
< nelts
; i
++)
11821 need_mask_canon
= true;
11824 if ((TREE_CODE (op0
) == VECTOR_CST
11825 || TREE_CODE (op0
) == CONSTRUCTOR
)
11826 && (TREE_CODE (op1
) == VECTOR_CST
11827 || TREE_CODE (op1
) == CONSTRUCTOR
))
11829 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
11830 if (t
!= NULL_TREE
)
11834 if (op0
== op1
&& !single_arg
)
11837 /* Some targets are deficient and fail to expand a single
11838 argument permutation while still allowing an equivalent
11839 2-argument version. */
11840 if (need_mask_canon
&& arg2
== op2
11841 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
11842 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
11844 need_mask_canon
= need_mask_canon2
;
11848 if (need_mask_canon
&& arg2
== op2
)
11850 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
11851 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
11852 for (i
= 0; i
< nelts
; i
++)
11853 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
11854 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
11859 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
11865 } /* switch (code) */
11868 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11869 of an array (or vector). */
11872 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
)
11874 tree index_type
= NULL_TREE
;
11875 offset_int low_bound
= 0;
11877 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
11879 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
11880 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
11882 /* Static constructors for variably sized objects makes no sense. */
11883 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
11884 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
11885 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
11890 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
11891 TYPE_SIGN (index_type
));
11893 offset_int index
= low_bound
- 1;
11895 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11896 TYPE_SIGN (index_type
));
11898 offset_int max_index
;
11899 unsigned HOST_WIDE_INT cnt
;
11902 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
11904 /* Array constructor might explicitly set index, or specify a range,
11905 or leave index NULL meaning that it is next index after previous
11909 if (TREE_CODE (cfield
) == INTEGER_CST
)
11910 max_index
= index
= wi::to_offset (cfield
);
11913 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
11914 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
11915 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
11922 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11923 TYPE_SIGN (index_type
));
11927 /* Do we have match? */
11928 if (wi::cmpu (access_index
, index
) >= 0
11929 && wi::cmpu (access_index
, max_index
) <= 0)
11935 /* Perform constant folding and related simplification of EXPR.
11936 The related simplifications include x*1 => x, x*0 => 0, etc.,
11937 and application of the associative law.
11938 NOP_EXPR conversions may be removed freely (as long as we
11939 are careful not to change the type of the overall expression).
11940 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11941 but we can constant-fold them if they have constant operands. */
11943 #ifdef ENABLE_FOLD_CHECKING
11944 # define fold(x) fold_1 (x)
11945 static tree
fold_1 (tree
);
11951 const tree t
= expr
;
11952 enum tree_code code
= TREE_CODE (t
);
11953 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11955 location_t loc
= EXPR_LOCATION (expr
);
11957 /* Return right away if a constant. */
11958 if (kind
== tcc_constant
)
11961 /* CALL_EXPR-like objects with variable numbers of operands are
11962 treated specially. */
11963 if (kind
== tcc_vl_exp
)
11965 if (code
== CALL_EXPR
)
11967 tem
= fold_call_expr (loc
, expr
, false);
11968 return tem
? tem
: expr
;
11973 if (IS_EXPR_CODE_CLASS (kind
))
11975 tree type
= TREE_TYPE (t
);
11976 tree op0
, op1
, op2
;
11978 switch (TREE_CODE_LENGTH (code
))
11981 op0
= TREE_OPERAND (t
, 0);
11982 tem
= fold_unary_loc (loc
, code
, type
, op0
);
11983 return tem
? tem
: expr
;
11985 op0
= TREE_OPERAND (t
, 0);
11986 op1
= TREE_OPERAND (t
, 1);
11987 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
11988 return tem
? tem
: expr
;
11990 op0
= TREE_OPERAND (t
, 0);
11991 op1
= TREE_OPERAND (t
, 1);
11992 op2
= TREE_OPERAND (t
, 2);
11993 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
11994 return tem
? tem
: expr
;
12004 tree op0
= TREE_OPERAND (t
, 0);
12005 tree op1
= TREE_OPERAND (t
, 1);
12007 if (TREE_CODE (op1
) == INTEGER_CST
12008 && TREE_CODE (op0
) == CONSTRUCTOR
12009 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
12011 tree val
= get_array_ctor_element_at_index (op0
,
12012 wi::to_offset (op1
));
12020 /* Return a VECTOR_CST if possible. */
12023 tree type
= TREE_TYPE (t
);
12024 if (TREE_CODE (type
) != VECTOR_TYPE
)
12029 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
12030 if (! CONSTANT_CLASS_P (val
))
12033 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
12037 return fold (DECL_INITIAL (t
));
12041 } /* switch (code) */
12044 #ifdef ENABLE_FOLD_CHECKING
12047 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
12048 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
12049 static void fold_check_failed (const_tree
, const_tree
);
12050 void print_fold_checksum (const_tree
);
12052 /* When --enable-checking=fold, compute a digest of expr before
12053 and after actual fold call to see if fold did not accidentally
12054 change original expr. */
12060 struct md5_ctx ctx
;
12061 unsigned char checksum_before
[16], checksum_after
[16];
12062 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12064 md5_init_ctx (&ctx
);
12065 fold_checksum_tree (expr
, &ctx
, &ht
);
12066 md5_finish_ctx (&ctx
, checksum_before
);
12069 ret
= fold_1 (expr
);
12071 md5_init_ctx (&ctx
);
12072 fold_checksum_tree (expr
, &ctx
, &ht
);
12073 md5_finish_ctx (&ctx
, checksum_after
);
12075 if (memcmp (checksum_before
, checksum_after
, 16))
12076 fold_check_failed (expr
, ret
);
12082 print_fold_checksum (const_tree expr
)
12084 struct md5_ctx ctx
;
12085 unsigned char checksum
[16], cnt
;
12086 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12088 md5_init_ctx (&ctx
);
12089 fold_checksum_tree (expr
, &ctx
, &ht
);
12090 md5_finish_ctx (&ctx
, checksum
);
12091 for (cnt
= 0; cnt
< 16; ++cnt
)
12092 fprintf (stderr
, "%02x", checksum
[cnt
]);
12093 putc ('\n', stderr
);
12097 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12099 internal_error ("fold check: original tree changed by fold");
12103 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12104 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12106 const tree_node
**slot
;
12107 enum tree_code code
;
12108 union tree_node buf
;
12114 slot
= ht
->find_slot (expr
, INSERT
);
12118 code
= TREE_CODE (expr
);
12119 if (TREE_CODE_CLASS (code
) == tcc_declaration
12120 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
12122 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12123 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12124 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
12125 buf
.decl_with_vis
.symtab_node
= NULL
;
12126 expr
= (tree
) &buf
;
12128 else if (TREE_CODE_CLASS (code
) == tcc_type
12129 && (TYPE_POINTER_TO (expr
)
12130 || TYPE_REFERENCE_TO (expr
)
12131 || TYPE_CACHED_VALUES_P (expr
)
12132 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
12133 || TYPE_NEXT_VARIANT (expr
)
12134 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
12136 /* Allow these fields to be modified. */
12138 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12139 expr
= tmp
= (tree
) &buf
;
12140 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
12141 TYPE_POINTER_TO (tmp
) = NULL
;
12142 TYPE_REFERENCE_TO (tmp
) = NULL
;
12143 TYPE_NEXT_VARIANT (tmp
) = NULL
;
12144 TYPE_ALIAS_SET (tmp
) = -1;
12145 if (TYPE_CACHED_VALUES_P (tmp
))
12147 TYPE_CACHED_VALUES_P (tmp
) = 0;
12148 TYPE_CACHED_VALUES (tmp
) = NULL
;
12151 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12152 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
12153 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12154 if (TREE_CODE_CLASS (code
) != tcc_type
12155 && TREE_CODE_CLASS (code
) != tcc_declaration
12156 && code
!= TREE_LIST
12157 && code
!= SSA_NAME
12158 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12159 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12160 switch (TREE_CODE_CLASS (code
))
12166 md5_process_bytes (TREE_STRING_POINTER (expr
),
12167 TREE_STRING_LENGTH (expr
), ctx
);
12170 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12171 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12174 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
12175 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
12181 case tcc_exceptional
:
12185 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12186 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12187 expr
= TREE_CHAIN (expr
);
12188 goto recursive_label
;
12191 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12192 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12198 case tcc_expression
:
12199 case tcc_reference
:
12200 case tcc_comparison
:
12203 case tcc_statement
:
12205 len
= TREE_OPERAND_LENGTH (expr
);
12206 for (i
= 0; i
< len
; ++i
)
12207 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12209 case tcc_declaration
:
12210 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12211 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12212 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12214 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12215 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12216 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12217 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12218 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12221 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12223 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12225 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12226 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12228 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12232 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12233 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12234 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12235 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12236 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12237 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12238 if (INTEGRAL_TYPE_P (expr
)
12239 || SCALAR_FLOAT_TYPE_P (expr
))
12241 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12242 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12244 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12245 if (TREE_CODE (expr
) == RECORD_TYPE
12246 || TREE_CODE (expr
) == UNION_TYPE
12247 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12248 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12249 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12256 /* Helper function for outputting the checksum of a tree T. When
12257 debugging with gdb, you can "define mynext" to be "next" followed
12258 by "call debug_fold_checksum (op0)", then just trace down till the
12261 DEBUG_FUNCTION
void
12262 debug_fold_checksum (const_tree t
)
12265 unsigned char checksum
[16];
12266 struct md5_ctx ctx
;
12267 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12269 md5_init_ctx (&ctx
);
12270 fold_checksum_tree (t
, &ctx
, &ht
);
12271 md5_finish_ctx (&ctx
, checksum
);
12274 for (i
= 0; i
< 16; i
++)
12275 fprintf (stderr
, "%d ", checksum
[i
]);
12277 fprintf (stderr
, "\n");
12282 /* Fold a unary tree expression with code CODE of type TYPE with an
12283 operand OP0. LOC is the location of the resulting expression.
12284 Return a folded expression if successful. Otherwise, return a tree
12285 expression with code CODE of type TYPE with an operand OP0. */
12288 fold_build1_stat_loc (location_t loc
,
12289 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12292 #ifdef ENABLE_FOLD_CHECKING
12293 unsigned char checksum_before
[16], checksum_after
[16];
12294 struct md5_ctx ctx
;
12295 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12297 md5_init_ctx (&ctx
);
12298 fold_checksum_tree (op0
, &ctx
, &ht
);
12299 md5_finish_ctx (&ctx
, checksum_before
);
12303 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12305 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12307 #ifdef ENABLE_FOLD_CHECKING
12308 md5_init_ctx (&ctx
);
12309 fold_checksum_tree (op0
, &ctx
, &ht
);
12310 md5_finish_ctx (&ctx
, checksum_after
);
12312 if (memcmp (checksum_before
, checksum_after
, 16))
12313 fold_check_failed (op0
, tem
);
12318 /* Fold a binary tree expression with code CODE of type TYPE with
12319 operands OP0 and OP1. LOC is the location of the resulting
12320 expression. Return a folded expression if successful. Otherwise,
12321 return a tree expression with code CODE of type TYPE with operands
12325 fold_build2_stat_loc (location_t loc
,
12326 enum tree_code code
, tree type
, tree op0
, tree op1
12330 #ifdef ENABLE_FOLD_CHECKING
12331 unsigned char checksum_before_op0
[16],
12332 checksum_before_op1
[16],
12333 checksum_after_op0
[16],
12334 checksum_after_op1
[16];
12335 struct md5_ctx ctx
;
12336 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12338 md5_init_ctx (&ctx
);
12339 fold_checksum_tree (op0
, &ctx
, &ht
);
12340 md5_finish_ctx (&ctx
, checksum_before_op0
);
12343 md5_init_ctx (&ctx
);
12344 fold_checksum_tree (op1
, &ctx
, &ht
);
12345 md5_finish_ctx (&ctx
, checksum_before_op1
);
12349 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12351 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12353 #ifdef ENABLE_FOLD_CHECKING
12354 md5_init_ctx (&ctx
);
12355 fold_checksum_tree (op0
, &ctx
, &ht
);
12356 md5_finish_ctx (&ctx
, checksum_after_op0
);
12359 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12360 fold_check_failed (op0
, tem
);
12362 md5_init_ctx (&ctx
);
12363 fold_checksum_tree (op1
, &ctx
, &ht
);
12364 md5_finish_ctx (&ctx
, checksum_after_op1
);
12366 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12367 fold_check_failed (op1
, tem
);
12372 /* Fold a ternary tree expression with code CODE of type TYPE with
12373 operands OP0, OP1, and OP2. Return a folded expression if
12374 successful. Otherwise, return a tree expression with code CODE of
12375 type TYPE with operands OP0, OP1, and OP2. */
12378 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
12379 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12382 #ifdef ENABLE_FOLD_CHECKING
12383 unsigned char checksum_before_op0
[16],
12384 checksum_before_op1
[16],
12385 checksum_before_op2
[16],
12386 checksum_after_op0
[16],
12387 checksum_after_op1
[16],
12388 checksum_after_op2
[16];
12389 struct md5_ctx ctx
;
12390 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12392 md5_init_ctx (&ctx
);
12393 fold_checksum_tree (op0
, &ctx
, &ht
);
12394 md5_finish_ctx (&ctx
, checksum_before_op0
);
12397 md5_init_ctx (&ctx
);
12398 fold_checksum_tree (op1
, &ctx
, &ht
);
12399 md5_finish_ctx (&ctx
, checksum_before_op1
);
12402 md5_init_ctx (&ctx
);
12403 fold_checksum_tree (op2
, &ctx
, &ht
);
12404 md5_finish_ctx (&ctx
, checksum_before_op2
);
12408 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12409 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12411 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12413 #ifdef ENABLE_FOLD_CHECKING
12414 md5_init_ctx (&ctx
);
12415 fold_checksum_tree (op0
, &ctx
, &ht
);
12416 md5_finish_ctx (&ctx
, checksum_after_op0
);
12419 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12420 fold_check_failed (op0
, tem
);
12422 md5_init_ctx (&ctx
);
12423 fold_checksum_tree (op1
, &ctx
, &ht
);
12424 md5_finish_ctx (&ctx
, checksum_after_op1
);
12427 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12428 fold_check_failed (op1
, tem
);
12430 md5_init_ctx (&ctx
);
12431 fold_checksum_tree (op2
, &ctx
, &ht
);
12432 md5_finish_ctx (&ctx
, checksum_after_op2
);
12434 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12435 fold_check_failed (op2
, tem
);
12440 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12441 arguments in ARGARRAY, and a null static chain.
12442 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12443 of type TYPE from the given operands as constructed by build_call_array. */
12446 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12447 int nargs
, tree
*argarray
)
12450 #ifdef ENABLE_FOLD_CHECKING
12451 unsigned char checksum_before_fn
[16],
12452 checksum_before_arglist
[16],
12453 checksum_after_fn
[16],
12454 checksum_after_arglist
[16];
12455 struct md5_ctx ctx
;
12456 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12459 md5_init_ctx (&ctx
);
12460 fold_checksum_tree (fn
, &ctx
, &ht
);
12461 md5_finish_ctx (&ctx
, checksum_before_fn
);
12464 md5_init_ctx (&ctx
);
12465 for (i
= 0; i
< nargs
; i
++)
12466 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12467 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12471 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12473 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12475 #ifdef ENABLE_FOLD_CHECKING
12476 md5_init_ctx (&ctx
);
12477 fold_checksum_tree (fn
, &ctx
, &ht
);
12478 md5_finish_ctx (&ctx
, checksum_after_fn
);
12481 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12482 fold_check_failed (fn
, tem
);
12484 md5_init_ctx (&ctx
);
12485 for (i
= 0; i
< nargs
; i
++)
12486 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12487 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12489 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12490 fold_check_failed (NULL_TREE
, tem
);
12495 /* Perform constant folding and related simplification of initializer
12496 expression EXPR. These behave identically to "fold_buildN" but ignore
12497 potential run-time traps and exceptions that fold must preserve. */
12499 #define START_FOLD_INIT \
12500 int saved_signaling_nans = flag_signaling_nans;\
12501 int saved_trapping_math = flag_trapping_math;\
12502 int saved_rounding_math = flag_rounding_math;\
12503 int saved_trapv = flag_trapv;\
12504 int saved_folding_initializer = folding_initializer;\
12505 flag_signaling_nans = 0;\
12506 flag_trapping_math = 0;\
12507 flag_rounding_math = 0;\
12509 folding_initializer = 1;
12511 #define END_FOLD_INIT \
12512 flag_signaling_nans = saved_signaling_nans;\
12513 flag_trapping_math = saved_trapping_math;\
12514 flag_rounding_math = saved_rounding_math;\
12515 flag_trapv = saved_trapv;\
12516 folding_initializer = saved_folding_initializer;
12519 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12520 tree type
, tree op
)
12525 result
= fold_build1_loc (loc
, code
, type
, op
);
12532 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12533 tree type
, tree op0
, tree op1
)
12538 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12545 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12546 int nargs
, tree
*argarray
)
12551 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12557 #undef START_FOLD_INIT
12558 #undef END_FOLD_INIT
12560 /* Determine if first argument is a multiple of second argument. Return 0 if
12561 it is not, or we cannot easily determined it to be.
12563 An example of the sort of thing we care about (at this point; this routine
12564 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12565 fold cases do now) is discovering that
12567 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12573 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12575 This code also handles discovering that
12577 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12579 is a multiple of 8 so we don't have to worry about dealing with a
12580 possible remainder.
12582 Note that we *look* inside a SAVE_EXPR only to determine how it was
12583 calculated; it is not safe for fold to do much of anything else with the
12584 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12585 at run time. For example, the latter example above *cannot* be implemented
12586 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12587 evaluation time of the original SAVE_EXPR is not necessarily the same at
12588 the time the new expression is evaluated. The only optimization of this
12589 sort that would be valid is changing
12591 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12595 SAVE_EXPR (I) * SAVE_EXPR (J)
12597 (where the same SAVE_EXPR (J) is used in the original and the
12598 transformed version). */
12601 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
12603 if (operand_equal_p (top
, bottom
, 0))
12606 if (TREE_CODE (type
) != INTEGER_TYPE
)
12609 switch (TREE_CODE (top
))
12612 /* Bitwise and provides a power of two multiple. If the mask is
12613 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12614 if (!integer_pow2p (bottom
))
12619 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12620 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12624 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12625 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12628 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12632 op1
= TREE_OPERAND (top
, 1);
12633 /* const_binop may not detect overflow correctly,
12634 so check for it explicitly here. */
12635 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
12636 && 0 != (t1
= fold_convert (type
,
12637 const_binop (LSHIFT_EXPR
,
12640 && !TREE_OVERFLOW (t1
))
12641 return multiple_of_p (type
, t1
, bottom
);
12646 /* Can't handle conversions from non-integral or wider integral type. */
12647 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12648 || (TYPE_PRECISION (type
)
12649 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12652 /* .. fall through ... */
12655 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12658 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12659 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
12662 if (TREE_CODE (bottom
) != INTEGER_CST
12663 || integer_zerop (bottom
)
12664 || (TYPE_UNSIGNED (type
)
12665 && (tree_int_cst_sgn (top
) < 0
12666 || tree_int_cst_sgn (bottom
) < 0)))
12668 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
12676 #define tree_expr_nonnegative_warnv_p(X, Y) \
12677 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12679 #define RECURSE(X) \
12680 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12682 /* Return true if CODE or TYPE is known to be non-negative. */
12685 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
12687 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
12688 && truth_value_p (code
))
12689 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12690 have a signed:1 type (where the value is -1 and 0). */
12695 /* Return true if (CODE OP0) is known to be non-negative. If the return
12696 value is based on the assumption that signed overflow is undefined,
12697 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12698 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12701 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12702 bool *strict_overflow_p
, int depth
)
12704 if (TYPE_UNSIGNED (type
))
12710 /* We can't return 1 if flag_wrapv is set because
12711 ABS_EXPR<INT_MIN> = INT_MIN. */
12712 if (!ANY_INTEGRAL_TYPE_P (type
))
12714 if (TYPE_OVERFLOW_UNDEFINED (type
))
12716 *strict_overflow_p
= true;
12721 case NON_LVALUE_EXPR
:
12723 case FIX_TRUNC_EXPR
:
12724 return RECURSE (op0
);
12728 tree inner_type
= TREE_TYPE (op0
);
12729 tree outer_type
= type
;
12731 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12733 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12734 return RECURSE (op0
);
12735 if (INTEGRAL_TYPE_P (inner_type
))
12737 if (TYPE_UNSIGNED (inner_type
))
12739 return RECURSE (op0
);
12742 else if (INTEGRAL_TYPE_P (outer_type
))
12744 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12745 return RECURSE (op0
);
12746 if (INTEGRAL_TYPE_P (inner_type
))
12747 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12748 && TYPE_UNSIGNED (inner_type
);
12754 return tree_simple_nonnegative_warnv_p (code
, type
);
12757 /* We don't know sign of `t', so be conservative and return false. */
12761 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12762 value is based on the assumption that signed overflow is undefined,
12763 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12764 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12767 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12768 tree op1
, bool *strict_overflow_p
,
12771 if (TYPE_UNSIGNED (type
))
12776 case POINTER_PLUS_EXPR
:
12778 if (FLOAT_TYPE_P (type
))
12779 return RECURSE (op0
) && RECURSE (op1
);
12781 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12782 both unsigned and at least 2 bits shorter than the result. */
12783 if (TREE_CODE (type
) == INTEGER_TYPE
12784 && TREE_CODE (op0
) == NOP_EXPR
12785 && TREE_CODE (op1
) == NOP_EXPR
)
12787 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
12788 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
12789 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12790 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12792 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12793 TYPE_PRECISION (inner2
)) + 1;
12794 return prec
< TYPE_PRECISION (type
);
12800 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12802 /* x * x is always non-negative for floating point x
12803 or without overflow. */
12804 if (operand_equal_p (op0
, op1
, 0)
12805 || (RECURSE (op0
) && RECURSE (op1
)))
12807 if (ANY_INTEGRAL_TYPE_P (type
)
12808 && TYPE_OVERFLOW_UNDEFINED (type
))
12809 *strict_overflow_p
= true;
12814 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12815 both unsigned and their total bits is shorter than the result. */
12816 if (TREE_CODE (type
) == INTEGER_TYPE
12817 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
12818 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
12820 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
12821 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
12823 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
12824 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
12827 bool unsigned0
= TYPE_UNSIGNED (inner0
);
12828 bool unsigned1
= TYPE_UNSIGNED (inner1
);
12830 if (TREE_CODE (op0
) == INTEGER_CST
)
12831 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
12833 if (TREE_CODE (op1
) == INTEGER_CST
)
12834 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
12836 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
12837 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
12839 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
12840 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
12841 : TYPE_PRECISION (inner0
);
12843 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
12844 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
12845 : TYPE_PRECISION (inner1
);
12847 return precision0
+ precision1
< TYPE_PRECISION (type
);
12854 return RECURSE (op0
) || RECURSE (op1
);
12860 case TRUNC_DIV_EXPR
:
12861 case CEIL_DIV_EXPR
:
12862 case FLOOR_DIV_EXPR
:
12863 case ROUND_DIV_EXPR
:
12864 return RECURSE (op0
) && RECURSE (op1
);
12866 case TRUNC_MOD_EXPR
:
12867 return RECURSE (op0
);
12869 case FLOOR_MOD_EXPR
:
12870 return RECURSE (op1
);
12872 case CEIL_MOD_EXPR
:
12873 case ROUND_MOD_EXPR
:
12875 return tree_simple_nonnegative_warnv_p (code
, type
);
12878 /* We don't know sign of `t', so be conservative and return false. */
12882 /* Return true if T is known to be non-negative. If the return
12883 value is based on the assumption that signed overflow is undefined,
12884 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12885 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12888 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12890 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12893 switch (TREE_CODE (t
))
12896 return tree_int_cst_sgn (t
) >= 0;
12899 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12902 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
12905 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
12908 /* Limit the depth of recursion to avoid quadratic behavior.
12909 This is expected to catch almost all occurrences in practice.
12910 If this code misses important cases that unbounded recursion
12911 would not, passes that need this information could be revised
12912 to provide it through dataflow propagation. */
12913 return (!name_registered_for_update_p (t
)
12914 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
12915 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
12916 strict_overflow_p
, depth
));
12919 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
12923 /* Return true if T is known to be non-negative. If the return
12924 value is based on the assumption that signed overflow is undefined,
12925 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12926 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12929 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
12930 bool *strict_overflow_p
, int depth
)
12951 case CFN_BUILT_IN_BSWAP32
:
12952 case CFN_BUILT_IN_BSWAP64
:
12957 /* sqrt(-0.0) is -0.0. */
12958 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
12960 return RECURSE (arg0
);
12986 CASE_CFN_NEARBYINT
:
12993 CASE_CFN_SIGNIFICAND
:
12997 /* True if the 1st argument is nonnegative. */
12998 return RECURSE (arg0
);
13001 /* True if the 1st OR 2nd arguments are nonnegative. */
13002 return RECURSE (arg0
) || RECURSE (arg1
);
13005 /* True if the 1st AND 2nd arguments are nonnegative. */
13006 return RECURSE (arg0
) && RECURSE (arg1
);
13009 /* True if the 2nd argument is nonnegative. */
13010 return RECURSE (arg1
);
13013 /* True if the 1st argument is nonnegative or the second
13014 argument is an even integer. */
13015 if (TREE_CODE (arg1
) == INTEGER_CST
13016 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
13018 return RECURSE (arg0
);
13021 /* True if the 1st argument is nonnegative or the second
13022 argument is an even integer valued real. */
13023 if (TREE_CODE (arg1
) == REAL_CST
)
13028 c
= TREE_REAL_CST (arg1
);
13029 n
= real_to_integer (&c
);
13032 REAL_VALUE_TYPE cint
;
13033 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
13034 if (real_identical (&c
, &cint
))
13038 return RECURSE (arg0
);
13043 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
13046 /* Return true if T is known to be non-negative. If the return
13047 value is based on the assumption that signed overflow is undefined,
13048 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13049 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13052 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13054 enum tree_code code
= TREE_CODE (t
);
13055 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13062 tree temp
= TARGET_EXPR_SLOT (t
);
13063 t
= TARGET_EXPR_INITIAL (t
);
13065 /* If the initializer is non-void, then it's a normal expression
13066 that will be assigned to the slot. */
13067 if (!VOID_TYPE_P (t
))
13068 return RECURSE (t
);
13070 /* Otherwise, the initializer sets the slot in some way. One common
13071 way is an assignment statement at the end of the initializer. */
13074 if (TREE_CODE (t
) == BIND_EXPR
)
13075 t
= expr_last (BIND_EXPR_BODY (t
));
13076 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13077 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13078 t
= expr_last (TREE_OPERAND (t
, 0));
13079 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13084 if (TREE_CODE (t
) == MODIFY_EXPR
13085 && TREE_OPERAND (t
, 0) == temp
)
13086 return RECURSE (TREE_OPERAND (t
, 1));
13093 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
13094 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
13096 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
13097 get_call_combined_fn (t
),
13100 strict_overflow_p
, depth
);
13102 case COMPOUND_EXPR
:
13104 return RECURSE (TREE_OPERAND (t
, 1));
13107 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
13110 return RECURSE (TREE_OPERAND (t
, 0));
13113 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13118 #undef tree_expr_nonnegative_warnv_p
13120 /* Return true if T is known to be non-negative. If the return
13121 value is based on the assumption that signed overflow is undefined,
13122 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13123 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13126 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13128 enum tree_code code
;
13129 if (t
== error_mark_node
)
13132 code
= TREE_CODE (t
);
13133 switch (TREE_CODE_CLASS (code
))
13136 case tcc_comparison
:
13137 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13139 TREE_OPERAND (t
, 0),
13140 TREE_OPERAND (t
, 1),
13141 strict_overflow_p
, depth
);
13144 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13146 TREE_OPERAND (t
, 0),
13147 strict_overflow_p
, depth
);
13150 case tcc_declaration
:
13151 case tcc_reference
:
13152 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13160 case TRUTH_AND_EXPR
:
13161 case TRUTH_OR_EXPR
:
13162 case TRUTH_XOR_EXPR
:
13163 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13165 TREE_OPERAND (t
, 0),
13166 TREE_OPERAND (t
, 1),
13167 strict_overflow_p
, depth
);
13168 case TRUTH_NOT_EXPR
:
13169 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13171 TREE_OPERAND (t
, 0),
13172 strict_overflow_p
, depth
);
13179 case WITH_SIZE_EXPR
:
13181 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13184 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13188 /* Return true if `t' is known to be non-negative. Handle warnings
13189 about undefined signed overflow. */
13192 tree_expr_nonnegative_p (tree t
)
13194 bool ret
, strict_overflow_p
;
13196 strict_overflow_p
= false;
13197 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13198 if (strict_overflow_p
)
13199 fold_overflow_warning (("assuming signed overflow does not occur when "
13200 "determining that expression is always "
13202 WARN_STRICT_OVERFLOW_MISC
);
13207 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13208 For floating point we further ensure that T is not denormal.
13209 Similar logic is present in nonzero_address in rtlanal.h.
13211 If the return value is based on the assumption that signed overflow
13212 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13213 change *STRICT_OVERFLOW_P. */
13216 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13217 bool *strict_overflow_p
)
13222 return tree_expr_nonzero_warnv_p (op0
,
13223 strict_overflow_p
);
13227 tree inner_type
= TREE_TYPE (op0
);
13228 tree outer_type
= type
;
13230 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13231 && tree_expr_nonzero_warnv_p (op0
,
13232 strict_overflow_p
));
13236 case NON_LVALUE_EXPR
:
13237 return tree_expr_nonzero_warnv_p (op0
,
13238 strict_overflow_p
);
13247 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13248 For floating point we further ensure that T is not denormal.
13249 Similar logic is present in nonzero_address in rtlanal.h.
13251 If the return value is based on the assumption that signed overflow
13252 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13253 change *STRICT_OVERFLOW_P. */
13256 tree_binary_nonzero_warnv_p (enum tree_code code
,
13259 tree op1
, bool *strict_overflow_p
)
13261 bool sub_strict_overflow_p
;
13264 case POINTER_PLUS_EXPR
:
13266 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13268 /* With the presence of negative values it is hard
13269 to say something. */
13270 sub_strict_overflow_p
= false;
13271 if (!tree_expr_nonnegative_warnv_p (op0
,
13272 &sub_strict_overflow_p
)
13273 || !tree_expr_nonnegative_warnv_p (op1
,
13274 &sub_strict_overflow_p
))
13276 /* One of operands must be positive and the other non-negative. */
13277 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13278 overflows, on a twos-complement machine the sum of two
13279 nonnegative numbers can never be zero. */
13280 return (tree_expr_nonzero_warnv_p (op0
,
13282 || tree_expr_nonzero_warnv_p (op1
,
13283 strict_overflow_p
));
13288 if (TYPE_OVERFLOW_UNDEFINED (type
))
13290 if (tree_expr_nonzero_warnv_p (op0
,
13292 && tree_expr_nonzero_warnv_p (op1
,
13293 strict_overflow_p
))
13295 *strict_overflow_p
= true;
13302 sub_strict_overflow_p
= false;
13303 if (tree_expr_nonzero_warnv_p (op0
,
13304 &sub_strict_overflow_p
)
13305 && tree_expr_nonzero_warnv_p (op1
,
13306 &sub_strict_overflow_p
))
13308 if (sub_strict_overflow_p
)
13309 *strict_overflow_p
= true;
13314 sub_strict_overflow_p
= false;
13315 if (tree_expr_nonzero_warnv_p (op0
,
13316 &sub_strict_overflow_p
))
13318 if (sub_strict_overflow_p
)
13319 *strict_overflow_p
= true;
13321 /* When both operands are nonzero, then MAX must be too. */
13322 if (tree_expr_nonzero_warnv_p (op1
,
13323 strict_overflow_p
))
13326 /* MAX where operand 0 is positive is positive. */
13327 return tree_expr_nonnegative_warnv_p (op0
,
13328 strict_overflow_p
);
13330 /* MAX where operand 1 is positive is positive. */
13331 else if (tree_expr_nonzero_warnv_p (op1
,
13332 &sub_strict_overflow_p
)
13333 && tree_expr_nonnegative_warnv_p (op1
,
13334 &sub_strict_overflow_p
))
13336 if (sub_strict_overflow_p
)
13337 *strict_overflow_p
= true;
13343 return (tree_expr_nonzero_warnv_p (op1
,
13345 || tree_expr_nonzero_warnv_p (op0
,
13346 strict_overflow_p
));
13355 /* Return true when T is an address and is known to be nonzero.
13356 For floating point we further ensure that T is not denormal.
13357 Similar logic is present in nonzero_address in rtlanal.h.
13359 If the return value is based on the assumption that signed overflow
13360 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13361 change *STRICT_OVERFLOW_P. */
13364 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13366 bool sub_strict_overflow_p
;
13367 switch (TREE_CODE (t
))
13370 return !integer_zerop (t
);
13374 tree base
= TREE_OPERAND (t
, 0);
13376 if (!DECL_P (base
))
13377 base
= get_base_address (base
);
13379 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
13380 base
= TARGET_EXPR_SLOT (base
);
13385 /* For objects in symbol table check if we know they are non-zero.
13386 Don't do anything for variables and functions before symtab is built;
13387 it is quite possible that they will be declared weak later. */
13388 int nonzero_addr
= maybe_nonzero_address (base
);
13389 if (nonzero_addr
>= 0)
13390 return nonzero_addr
;
13392 /* Function local objects are never NULL. */
13394 && (DECL_CONTEXT (base
)
13395 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
13396 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
13399 /* Constants are never weak. */
13400 if (CONSTANT_CLASS_P (base
))
13407 sub_strict_overflow_p
= false;
13408 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13409 &sub_strict_overflow_p
)
13410 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13411 &sub_strict_overflow_p
))
13413 if (sub_strict_overflow_p
)
13414 *strict_overflow_p
= true;
13425 #define integer_valued_real_p(X) \
13426 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13428 #define RECURSE(X) \
13429 ((integer_valued_real_p) (X, depth + 1))
13431 /* Return true if the floating point result of (CODE OP0) has an
13432 integer value. We also allow +Inf, -Inf and NaN to be considered
13433 integer values. Return false for signaling NaN.
13435 DEPTH is the current nesting depth of the query. */
13438 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
13446 return RECURSE (op0
);
13450 tree type
= TREE_TYPE (op0
);
13451 if (TREE_CODE (type
) == INTEGER_TYPE
)
13453 if (TREE_CODE (type
) == REAL_TYPE
)
13454 return RECURSE (op0
);
13464 /* Return true if the floating point result of (CODE OP0 OP1) has an
13465 integer value. We also allow +Inf, -Inf and NaN to be considered
13466 integer values. Return false for signaling NaN.
13468 DEPTH is the current nesting depth of the query. */
13471 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
13480 return RECURSE (op0
) && RECURSE (op1
);
13488 /* Return true if the floating point result of calling FNDECL with arguments
13489 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13490 considered integer values. Return false for signaling NaN. If FNDECL
13491 takes fewer than 2 arguments, the remaining ARGn are null.
13493 DEPTH is the current nesting depth of the query. */
13496 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
13502 CASE_CFN_NEARBYINT
:
13510 return RECURSE (arg0
) && RECURSE (arg1
);
13518 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13519 has an integer value. We also allow +Inf, -Inf and NaN to be
13520 considered integer values. Return false for signaling NaN.
13522 DEPTH is the current nesting depth of the query. */
13525 integer_valued_real_single_p (tree t
, int depth
)
13527 switch (TREE_CODE (t
))
13530 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
13533 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13536 /* Limit the depth of recursion to avoid quadratic behavior.
13537 This is expected to catch almost all occurrences in practice.
13538 If this code misses important cases that unbounded recursion
13539 would not, passes that need this information could be revised
13540 to provide it through dataflow propagation. */
13541 return (!name_registered_for_update_p (t
)
13542 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
13543 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
13552 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13553 has an integer value. We also allow +Inf, -Inf and NaN to be
13554 considered integer values. Return false for signaling NaN.
13556 DEPTH is the current nesting depth of the query. */
13559 integer_valued_real_invalid_p (tree t
, int depth
)
13561 switch (TREE_CODE (t
))
13563 case COMPOUND_EXPR
:
13566 return RECURSE (TREE_OPERAND (t
, 1));
13569 return RECURSE (TREE_OPERAND (t
, 0));
13578 #undef integer_valued_real_p
13580 /* Return true if the floating point expression T has an integer value.
13581 We also allow +Inf, -Inf and NaN to be considered integer values.
13582 Return false for signaling NaN.
13584 DEPTH is the current nesting depth of the query. */
13587 integer_valued_real_p (tree t
, int depth
)
13589 if (t
== error_mark_node
)
13592 tree_code code
= TREE_CODE (t
);
13593 switch (TREE_CODE_CLASS (code
))
13596 case tcc_comparison
:
13597 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
13598 TREE_OPERAND (t
, 1), depth
);
13601 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
13604 case tcc_declaration
:
13605 case tcc_reference
:
13606 return integer_valued_real_single_p (t
, depth
);
13616 return integer_valued_real_single_p (t
, depth
);
13620 tree arg0
= (call_expr_nargs (t
) > 0
13621 ? CALL_EXPR_ARG (t
, 0)
13623 tree arg1
= (call_expr_nargs (t
) > 1
13624 ? CALL_EXPR_ARG (t
, 1)
13626 return integer_valued_real_call_p (get_call_combined_fn (t
),
13627 arg0
, arg1
, depth
);
13631 return integer_valued_real_invalid_p (t
, depth
);
13635 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13636 attempt to fold the expression to a constant without modifying TYPE,
13639 If the expression could be simplified to a constant, then return
13640 the constant. If the expression would not be simplified to a
13641 constant, then return NULL_TREE. */
13644 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13646 tree tem
= fold_binary (code
, type
, op0
, op1
);
13647 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13650 /* Given the components of a unary expression CODE, TYPE and OP0,
13651 attempt to fold the expression to a constant without modifying
13654 If the expression could be simplified to a constant, then return
13655 the constant. If the expression would not be simplified to a
13656 constant, then return NULL_TREE. */
13659 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13661 tree tem
= fold_unary (code
, type
, op0
);
13662 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13665 /* If EXP represents referencing an element in a constant string
13666 (either via pointer arithmetic or array indexing), return the
13667 tree representing the value accessed, otherwise return NULL. */
13670 fold_read_from_constant_string (tree exp
)
13672 if ((TREE_CODE (exp
) == INDIRECT_REF
13673 || TREE_CODE (exp
) == ARRAY_REF
)
13674 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13676 tree exp1
= TREE_OPERAND (exp
, 0);
13679 location_t loc
= EXPR_LOCATION (exp
);
13681 if (TREE_CODE (exp
) == INDIRECT_REF
)
13682 string
= string_constant (exp1
, &index
);
13685 tree low_bound
= array_ref_low_bound (exp
);
13686 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13688 /* Optimize the special-case of a zero lower bound.
13690 We convert the low_bound to sizetype to avoid some problems
13691 with constant folding. (E.g. suppose the lower bound is 1,
13692 and its mode is QI. Without the conversion,l (ARRAY
13693 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13694 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13695 if (! integer_zerop (low_bound
))
13696 index
= size_diffop_loc (loc
, index
,
13697 fold_convert_loc (loc
, sizetype
, low_bound
));
13703 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13704 && TREE_CODE (string
) == STRING_CST
13705 && TREE_CODE (index
) == INTEGER_CST
13706 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13707 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13709 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13710 return build_int_cst_type (TREE_TYPE (exp
),
13711 (TREE_STRING_POINTER (string
)
13712 [TREE_INT_CST_LOW (index
)]));
13717 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13718 an integer constant, real, or fixed-point constant.
13720 TYPE is the type of the result. */
13723 fold_negate_const (tree arg0
, tree type
)
13725 tree t
= NULL_TREE
;
13727 switch (TREE_CODE (arg0
))
13732 wide_int val
= wi::neg (arg0
, &overflow
);
13733 t
= force_fit_type (type
, val
, 1,
13734 (overflow
| TREE_OVERFLOW (arg0
))
13735 && !TYPE_UNSIGNED (type
));
13740 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13745 FIXED_VALUE_TYPE f
;
13746 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13747 &(TREE_FIXED_CST (arg0
)), NULL
,
13748 TYPE_SATURATING (type
));
13749 t
= build_fixed (type
, f
);
13750 /* Propagate overflow flags. */
13751 if (overflow_p
| TREE_OVERFLOW (arg0
))
13752 TREE_OVERFLOW (t
) = 1;
13757 gcc_unreachable ();
13763 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13764 an integer constant or real constant.
13766 TYPE is the type of the result. */
13769 fold_abs_const (tree arg0
, tree type
)
13771 tree t
= NULL_TREE
;
13773 switch (TREE_CODE (arg0
))
13777 /* If the value is unsigned or non-negative, then the absolute value
13778 is the same as the ordinary value. */
13779 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
13782 /* If the value is negative, then the absolute value is
13787 wide_int val
= wi::neg (arg0
, &overflow
);
13788 t
= force_fit_type (type
, val
, -1,
13789 overflow
| TREE_OVERFLOW (arg0
));
13795 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13796 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13802 gcc_unreachable ();
13808 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13809 constant. TYPE is the type of the result. */
13812 fold_not_const (const_tree arg0
, tree type
)
13814 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13816 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
13819 /* Given CODE, a relational operator, the target type, TYPE and two
13820 constant operands OP0 and OP1, return the result of the
13821 relational operation. If the result is not a compile time
13822 constant, then return NULL_TREE. */
13825 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13827 int result
, invert
;
13829 /* From here on, the only cases we handle are when the result is
13830 known to be a constant. */
13832 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13834 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13835 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13837 /* Handle the cases where either operand is a NaN. */
13838 if (real_isnan (c0
) || real_isnan (c1
))
13848 case UNORDERED_EXPR
:
13862 if (flag_trapping_math
)
13868 gcc_unreachable ();
13871 return constant_boolean_node (result
, type
);
13874 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13877 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
13879 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
13880 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
13881 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
13884 /* Handle equality/inequality of complex constants. */
13885 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
13887 tree rcond
= fold_relational_const (code
, type
,
13888 TREE_REALPART (op0
),
13889 TREE_REALPART (op1
));
13890 tree icond
= fold_relational_const (code
, type
,
13891 TREE_IMAGPART (op0
),
13892 TREE_IMAGPART (op1
));
13893 if (code
== EQ_EXPR
)
13894 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
13895 else if (code
== NE_EXPR
)
13896 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
13901 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
13903 if (!VECTOR_TYPE_P (type
))
13905 /* Have vector comparison with scalar boolean result. */
13906 bool result
= true;
13907 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
13908 && VECTOR_CST_NELTS (op0
) == VECTOR_CST_NELTS (op1
));
13909 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (op0
); i
++)
13911 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13912 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13913 tree tmp
= fold_relational_const (code
, type
, elem0
, elem1
);
13914 result
&= integer_onep (tmp
);
13916 if (code
== NE_EXPR
)
13918 return constant_boolean_node (result
, type
);
13920 unsigned count
= VECTOR_CST_NELTS (op0
);
13921 tree
*elts
= XALLOCAVEC (tree
, count
);
13922 gcc_assert (VECTOR_CST_NELTS (op1
) == count
13923 && TYPE_VECTOR_SUBPARTS (type
) == count
);
13925 for (unsigned i
= 0; i
< count
; i
++)
13927 tree elem_type
= TREE_TYPE (type
);
13928 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13929 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13931 tree tem
= fold_relational_const (code
, elem_type
,
13934 if (tem
== NULL_TREE
)
13937 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
13940 return build_vector (type
, elts
);
13943 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13945 To compute GT, swap the arguments and do LT.
13946 To compute GE, do LT and invert the result.
13947 To compute LE, swap the arguments, do LT and invert the result.
13948 To compute NE, do EQ and invert the result.
13950 Therefore, the code below must handle only EQ and LT. */
13952 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13954 std::swap (op0
, op1
);
13955 code
= swap_tree_comparison (code
);
13958 /* Note that it is safe to invert for real values here because we
13959 have already handled the one case that it matters. */
13962 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13965 code
= invert_tree_comparison (code
, false);
13968 /* Compute a result for LT or EQ if args permit;
13969 Otherwise return T. */
13970 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13972 if (code
== EQ_EXPR
)
13973 result
= tree_int_cst_equal (op0
, op1
);
13975 result
= tree_int_cst_lt (op0
, op1
);
13982 return constant_boolean_node (result
, type
);
13985 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13986 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13990 fold_build_cleanup_point_expr (tree type
, tree expr
)
13992 /* If the expression does not have side effects then we don't have to wrap
13993 it with a cleanup point expression. */
13994 if (!TREE_SIDE_EFFECTS (expr
))
13997 /* If the expression is a return, check to see if the expression inside the
13998 return has no side effects or the right hand side of the modify expression
13999 inside the return. If either don't have side effects set we don't need to
14000 wrap the expression in a cleanup point expression. Note we don't check the
14001 left hand side of the modify because it should always be a return decl. */
14002 if (TREE_CODE (expr
) == RETURN_EXPR
)
14004 tree op
= TREE_OPERAND (expr
, 0);
14005 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14007 op
= TREE_OPERAND (op
, 1);
14008 if (!TREE_SIDE_EFFECTS (op
))
14012 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14015 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14016 of an indirection through OP0, or NULL_TREE if no simplification is
14020 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
14026 subtype
= TREE_TYPE (sub
);
14027 if (!POINTER_TYPE_P (subtype
))
14030 if (TREE_CODE (sub
) == ADDR_EXPR
)
14032 tree op
= TREE_OPERAND (sub
, 0);
14033 tree optype
= TREE_TYPE (op
);
14034 /* *&CONST_DECL -> to the value of the const decl. */
14035 if (TREE_CODE (op
) == CONST_DECL
)
14036 return DECL_INITIAL (op
);
14037 /* *&p => p; make sure to handle *&"str"[cst] here. */
14038 if (type
== optype
)
14040 tree fop
= fold_read_from_constant_string (op
);
14046 /* *(foo *)&fooarray => fooarray[0] */
14047 else if (TREE_CODE (optype
) == ARRAY_TYPE
14048 && type
== TREE_TYPE (optype
)
14049 && (!in_gimple_form
14050 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14052 tree type_domain
= TYPE_DOMAIN (optype
);
14053 tree min_val
= size_zero_node
;
14054 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14055 min_val
= TYPE_MIN_VALUE (type_domain
);
14057 && TREE_CODE (min_val
) != INTEGER_CST
)
14059 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14060 NULL_TREE
, NULL_TREE
);
14062 /* *(foo *)&complexfoo => __real__ complexfoo */
14063 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14064 && type
== TREE_TYPE (optype
))
14065 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14066 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14067 else if (TREE_CODE (optype
) == VECTOR_TYPE
14068 && type
== TREE_TYPE (optype
))
14070 tree part_width
= TYPE_SIZE (type
);
14071 tree index
= bitsize_int (0);
14072 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14076 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14077 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14079 tree op00
= TREE_OPERAND (sub
, 0);
14080 tree op01
= TREE_OPERAND (sub
, 1);
14083 if (TREE_CODE (op00
) == ADDR_EXPR
)
14086 op00
= TREE_OPERAND (op00
, 0);
14087 op00type
= TREE_TYPE (op00
);
14089 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14090 if (TREE_CODE (op00type
) == VECTOR_TYPE
14091 && type
== TREE_TYPE (op00type
))
14093 tree part_width
= TYPE_SIZE (type
);
14094 unsigned HOST_WIDE_INT max_offset
14095 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
14096 * TYPE_VECTOR_SUBPARTS (op00type
));
14097 if (tree_int_cst_sign_bit (op01
) == 0
14098 && compare_tree_int (op01
, max_offset
) == -1)
14100 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (op01
);
14101 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14102 tree index
= bitsize_int (indexi
);
14103 return fold_build3_loc (loc
,
14104 BIT_FIELD_REF
, type
, op00
,
14105 part_width
, index
);
14108 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14109 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14110 && type
== TREE_TYPE (op00type
))
14112 tree size
= TYPE_SIZE_UNIT (type
);
14113 if (tree_int_cst_equal (size
, op01
))
14114 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14116 /* ((foo *)&fooarray)[1] => fooarray[1] */
14117 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14118 && type
== TREE_TYPE (op00type
))
14120 tree type_domain
= TYPE_DOMAIN (op00type
);
14121 tree min_val
= size_zero_node
;
14122 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14123 min_val
= TYPE_MIN_VALUE (type_domain
);
14124 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14125 TYPE_SIZE_UNIT (type
));
14126 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14127 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14128 NULL_TREE
, NULL_TREE
);
14133 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14134 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14135 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14136 && (!in_gimple_form
14137 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14140 tree min_val
= size_zero_node
;
14141 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14142 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14143 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14144 min_val
= TYPE_MIN_VALUE (type_domain
);
14146 && TREE_CODE (min_val
) != INTEGER_CST
)
14148 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14155 /* Builds an expression for an indirection through T, simplifying some
14159 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14161 tree type
= TREE_TYPE (TREE_TYPE (t
));
14162 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14167 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14170 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14173 fold_indirect_ref_loc (location_t loc
, tree t
)
14175 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14183 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14184 whose result is ignored. The type of the returned tree need not be
14185 the same as the original expression. */
14188 fold_ignored_result (tree t
)
14190 if (!TREE_SIDE_EFFECTS (t
))
14191 return integer_zero_node
;
14194 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14197 t
= TREE_OPERAND (t
, 0);
14201 case tcc_comparison
:
14202 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14203 t
= TREE_OPERAND (t
, 0);
14204 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14205 t
= TREE_OPERAND (t
, 1);
14210 case tcc_expression
:
14211 switch (TREE_CODE (t
))
14213 case COMPOUND_EXPR
:
14214 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14216 t
= TREE_OPERAND (t
, 0);
14220 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14221 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14223 t
= TREE_OPERAND (t
, 0);
14236 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14239 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14241 tree div
= NULL_TREE
;
14246 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14247 have to do anything. Only do this when we are not given a const,
14248 because in that case, this check is more expensive than just
14250 if (TREE_CODE (value
) != INTEGER_CST
)
14252 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14254 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14258 /* If divisor is a power of two, simplify this to bit manipulation. */
14259 if (divisor
== (divisor
& -divisor
))
14261 if (TREE_CODE (value
) == INTEGER_CST
)
14263 wide_int val
= value
;
14266 if ((val
& (divisor
- 1)) == 0)
14269 overflow_p
= TREE_OVERFLOW (value
);
14270 val
+= divisor
- 1;
14271 val
&= - (int) divisor
;
14275 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14281 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14282 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14283 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14284 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14290 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14291 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14292 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14298 /* Likewise, but round down. */
14301 round_down_loc (location_t loc
, tree value
, int divisor
)
14303 tree div
= NULL_TREE
;
14305 gcc_assert (divisor
> 0);
14309 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14310 have to do anything. Only do this when we are not given a const,
14311 because in that case, this check is more expensive than just
14313 if (TREE_CODE (value
) != INTEGER_CST
)
14315 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14317 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14321 /* If divisor is a power of two, simplify this to bit manipulation. */
14322 if (divisor
== (divisor
& -divisor
))
14326 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14327 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14332 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14333 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14334 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14340 /* Returns the pointer to the base of the object addressed by EXP and
14341 extracts the information about the offset of the access, storing it
14342 to PBITPOS and POFFSET. */
14345 split_address_to_core_and_offset (tree exp
,
14346 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14350 int unsignedp
, reversep
, volatilep
;
14351 HOST_WIDE_INT bitsize
;
14352 location_t loc
= EXPR_LOCATION (exp
);
14354 if (TREE_CODE (exp
) == ADDR_EXPR
)
14356 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14357 poffset
, &mode
, &unsignedp
, &reversep
,
14358 &volatilep
, false);
14359 core
= build_fold_addr_expr_loc (loc
, core
);
14365 *poffset
= NULL_TREE
;
14371 /* Returns true if addresses of E1 and E2 differ by a constant, false
14372 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14375 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14378 HOST_WIDE_INT bitpos1
, bitpos2
;
14379 tree toffset1
, toffset2
, tdiff
, type
;
14381 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14382 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14384 if (bitpos1
% BITS_PER_UNIT
!= 0
14385 || bitpos2
% BITS_PER_UNIT
!= 0
14386 || !operand_equal_p (core1
, core2
, 0))
14389 if (toffset1
&& toffset2
)
14391 type
= TREE_TYPE (toffset1
);
14392 if (type
!= TREE_TYPE (toffset2
))
14393 toffset2
= fold_convert (type
, toffset2
);
14395 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14396 if (!cst_and_fits_in_hwi (tdiff
))
14399 *diff
= int_cst_value (tdiff
);
14401 else if (toffset1
|| toffset2
)
14403 /* If only one of the offsets is non-constant, the difference cannot
14410 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14414 /* Return OFF converted to a pointer offset type suitable as offset for
14415 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14417 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14419 return fold_convert_loc (loc
, sizetype
, off
);
14422 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14424 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14426 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14427 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14430 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14432 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14434 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14435 ptr
, size_int (off
));
14438 /* Return a char pointer for a C string if it is a string constant
14439 or sum of string constant and integer constant. */
14442 c_getstr (tree src
)
14446 src
= string_constant (src
, &offset_node
);
14450 if (offset_node
== 0)
14451 return TREE_STRING_POINTER (src
);
14452 else if (!tree_fits_uhwi_p (offset_node
)
14453 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
14456 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);