1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
80 #include "tree-ssanames.h"
82 #include "stringpool.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
93 int folding_initializer
= 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr
= false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code
{
123 static bool negate_expr_p (tree
);
124 static tree
negate_expr (tree
);
125 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
126 static enum comparison_code
comparison_to_compcode (enum tree_code
);
127 static enum tree_code
compcode_to_comparison (enum comparison_code
);
128 static bool twoval_comparison_p (tree
, tree
*, tree
*);
129 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
130 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
132 static bool simple_operand_p (const_tree
);
133 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
134 static tree
range_predecessor (tree
);
135 static tree
range_successor (tree
);
136 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_cond_expr_with_comparison (location_t
, tree
, enum tree_code
,
138 tree
, tree
, tree
, tree
);
139 static tree
unextend (tree
, int, int, tree
);
140 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
141 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
142 static tree
fold_binary_op_with_conditional_arg (location_t
,
143 enum tree_code
, tree
,
146 static tree
fold_negate_const (tree
, tree
);
147 static tree
fold_not_const (const_tree
, tree
);
148 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
149 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
150 static tree
fold_view_convert_expr (tree
, tree
);
151 static tree
fold_negate_expr (location_t
, tree
);
153 /* This is a helper function to detect min/max for some operands of COND_EXPR.
154 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 minmax_from_comparison (tree_code cmp
, tree exp0
, tree exp1
, tree exp2
, tree exp3
)
158 enum tree_code code
= ERROR_MARK
;
160 if (HONOR_NANS (exp0
) || HONOR_SIGNED_ZEROS (exp0
))
163 if (!operand_equal_p (exp0
, exp2
))
166 if (TREE_CODE (exp3
) == INTEGER_CST
&& TREE_CODE (exp1
) == INTEGER_CST
)
168 if (wi::to_widest (exp1
) == (wi::to_widest (exp3
) - 1))
170 /* X <= Y - 1 equals to X < Y. */
173 /* X > Y - 1 equals to X >= Y. */
176 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
177 if (cmp
== NE_EXPR
&& TREE_CODE (exp0
) == SSA_NAME
)
180 get_range_query (cfun
)->range_of_expr (r
, exp0
);
181 if (r
.undefined_p ())
182 r
.set_varying (TREE_TYPE (exp0
));
184 widest_int min
= widest_int::from (r
.lower_bound (),
185 TYPE_SIGN (TREE_TYPE (exp0
)));
186 if (min
== wi::to_widest (exp1
))
190 if (wi::to_widest (exp1
) == (wi::to_widest (exp3
) + 1))
192 /* X < Y + 1 equals to X <= Y. */
195 /* X >= Y + 1 equals to X > Y. */
198 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
199 if (cmp
== NE_EXPR
&& TREE_CODE (exp0
) == SSA_NAME
)
202 get_range_query (cfun
)->range_of_expr (r
, exp0
);
203 if (r
.undefined_p ())
204 r
.set_varying (TREE_TYPE (exp0
));
206 widest_int max
= widest_int::from (r
.upper_bound (),
207 TYPE_SIGN (TREE_TYPE (exp0
)));
208 if (max
== wi::to_widest (exp1
))
213 if (code
!= ERROR_MARK
214 || operand_equal_p (exp1
, exp3
))
216 if (cmp
== LT_EXPR
|| cmp
== LE_EXPR
)
218 if (cmp
== GT_EXPR
|| cmp
== GE_EXPR
)
224 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
225 Otherwise, return LOC. */
228 expr_location_or (tree t
, location_t loc
)
230 location_t tloc
= EXPR_LOCATION (t
);
231 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
234 /* Similar to protected_set_expr_location, but never modify x in place,
235 if location can and needs to be set, unshare it. */
238 protected_set_expr_location_unshare (tree x
, location_t loc
)
240 if (CAN_HAVE_LOCATION_P (x
)
241 && EXPR_LOCATION (x
) != loc
242 && !(TREE_CODE (x
) == SAVE_EXPR
243 || TREE_CODE (x
) == TARGET_EXPR
244 || TREE_CODE (x
) == BIND_EXPR
))
247 SET_EXPR_LOCATION (x
, loc
);
252 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
253 division and returns the quotient. Otherwise returns
257 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
261 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
263 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
268 /* This is nonzero if we should defer warnings about undefined
269 overflow. This facility exists because these warnings are a
270 special case. The code to estimate loop iterations does not want
271 to issue any warnings, since it works with expressions which do not
272 occur in user code. Various bits of cleanup code call fold(), but
273 only use the result if it has certain characteristics (e.g., is a
274 constant); that code only wants to issue a warning if the result is
277 static int fold_deferring_overflow_warnings
;
279 /* If a warning about undefined overflow is deferred, this is the
280 warning. Note that this may cause us to turn two warnings into
281 one, but that is fine since it is sufficient to only give one
282 warning per expression. */
284 static const char* fold_deferred_overflow_warning
;
286 /* If a warning about undefined overflow is deferred, this is the
287 level at which the warning should be emitted. */
289 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
291 /* Start deferring overflow warnings. We could use a stack here to
292 permit nested calls, but at present it is not necessary. */
295 fold_defer_overflow_warnings (void)
297 ++fold_deferring_overflow_warnings
;
300 /* Stop deferring overflow warnings. If there is a pending warning,
301 and ISSUE is true, then issue the warning if appropriate. STMT is
302 the statement with which the warning should be associated (used for
303 location information); STMT may be NULL. CODE is the level of the
304 warning--a warn_strict_overflow_code value. This function will use
305 the smaller of CODE and the deferred code when deciding whether to
306 issue the warning. CODE may be zero to mean to always use the
310 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
315 gcc_assert (fold_deferring_overflow_warnings
> 0);
316 --fold_deferring_overflow_warnings
;
317 if (fold_deferring_overflow_warnings
> 0)
319 if (fold_deferred_overflow_warning
!= NULL
321 && code
< (int) fold_deferred_overflow_code
)
322 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
326 warnmsg
= fold_deferred_overflow_warning
;
327 fold_deferred_overflow_warning
= NULL
;
329 if (!issue
|| warnmsg
== NULL
)
332 if (warning_suppressed_p (stmt
, OPT_Wstrict_overflow
))
335 /* Use the smallest code level when deciding to issue the
337 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
338 code
= fold_deferred_overflow_code
;
340 if (!issue_strict_overflow_warning (code
))
344 locus
= input_location
;
346 locus
= gimple_location (stmt
);
347 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
350 /* Stop deferring overflow warnings, ignoring any deferred
354 fold_undefer_and_ignore_overflow_warnings (void)
356 fold_undefer_overflow_warnings (false, NULL
, 0);
359 /* Whether we are deferring overflow warnings. */
362 fold_deferring_overflow_warnings_p (void)
364 return fold_deferring_overflow_warnings
> 0;
367 /* This is called when we fold something based on the fact that signed
368 overflow is undefined. */
371 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
373 if (fold_deferring_overflow_warnings
> 0)
375 if (fold_deferred_overflow_warning
== NULL
376 || wc
< fold_deferred_overflow_code
)
378 fold_deferred_overflow_warning
= gmsgid
;
379 fold_deferred_overflow_code
= wc
;
382 else if (issue_strict_overflow_warning (wc
))
383 warning (OPT_Wstrict_overflow
, gmsgid
);
386 /* Return true if the built-in mathematical function specified by CODE
387 is odd, i.e. -f(x) == f(-x). */
390 negate_mathfn_p (combined_fn fn
)
430 CASE_CFN_ROUNDEVEN_FN
:
448 CASE_CFN_NEARBYINT_FN
:
451 return !flag_rounding_math
;
459 /* Check whether we may negate an integer constant T without causing
463 may_negate_without_overflow_p (const_tree t
)
467 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
469 type
= TREE_TYPE (t
);
470 if (TYPE_UNSIGNED (type
))
473 return !wi::only_sign_bit_p (wi::to_wide (t
));
476 /* Determine whether an expression T can be cheaply negated using
477 the function negate_expr without introducing undefined overflow. */
480 negate_expr_p (tree t
)
487 type
= TREE_TYPE (t
);
490 switch (TREE_CODE (t
))
493 if (INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
))
496 /* Check that -CST will not overflow type. */
497 return may_negate_without_overflow_p (t
);
499 return (INTEGRAL_TYPE_P (type
)
500 && TYPE_OVERFLOW_WRAPS (type
));
506 return !TYPE_OVERFLOW_SANITIZED (type
);
509 /* We want to canonicalize to positive real constants. Pretend
510 that only negative ones can be easily negated. */
511 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
514 return negate_expr_p (TREE_REALPART (t
))
515 && negate_expr_p (TREE_IMAGPART (t
));
519 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
522 /* Steps don't prevent negation. */
523 unsigned int count
= vector_cst_encoded_nelts (t
);
524 for (unsigned int i
= 0; i
< count
; ++i
)
525 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t
, i
)))
532 return negate_expr_p (TREE_OPERAND (t
, 0))
533 && negate_expr_p (TREE_OPERAND (t
, 1));
536 return negate_expr_p (TREE_OPERAND (t
, 0));
539 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
)
540 || HONOR_SIGNED_ZEROS (type
)
541 || (ANY_INTEGRAL_TYPE_P (type
)
542 && ! TYPE_OVERFLOW_WRAPS (type
)))
544 /* -(A + B) -> (-B) - A. */
545 if (negate_expr_p (TREE_OPERAND (t
, 1)))
547 /* -(A + B) -> (-A) - B. */
548 return negate_expr_p (TREE_OPERAND (t
, 0));
551 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
552 return !HONOR_SIGN_DEPENDENT_ROUNDING (type
)
553 && !HONOR_SIGNED_ZEROS (type
)
554 && (! ANY_INTEGRAL_TYPE_P (type
)
555 || TYPE_OVERFLOW_WRAPS (type
));
558 if (TYPE_UNSIGNED (type
))
560 /* INT_MIN/n * n doesn't overflow while negating one operand it does
561 if n is a (negative) power of two. */
562 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
563 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
564 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
566 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 0))))) != 1)
567 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
569 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 1))))) != 1)))
575 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t
))
576 return negate_expr_p (TREE_OPERAND (t
, 1))
577 || negate_expr_p (TREE_OPERAND (t
, 0));
583 if (TYPE_UNSIGNED (type
))
585 /* In general we can't negate A in A / B, because if A is INT_MIN and
586 B is not 1 we change the sign of the result. */
587 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
588 && negate_expr_p (TREE_OPERAND (t
, 0)))
590 /* In general we can't negate B in A / B, because if A is INT_MIN and
591 B is 1, we may turn this into INT_MIN / -1 which is undefined
592 and actually traps on some architectures. */
593 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
594 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
595 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
596 && ! integer_onep (TREE_OPERAND (t
, 1))))
597 return negate_expr_p (TREE_OPERAND (t
, 1));
601 /* Negate -((double)float) as (double)(-float). */
602 if (SCALAR_FLOAT_TYPE_P (type
))
604 tree tem
= strip_float_extensions (t
);
606 return negate_expr_p (tem
);
611 /* Negate -f(x) as f(-x). */
612 if (negate_mathfn_p (get_call_combined_fn (t
)))
613 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
617 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
618 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
620 tree op1
= TREE_OPERAND (t
, 1);
621 if (wi::to_wide (op1
) == element_precision (type
) - 1)
632 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
633 simplification is possible.
634 If negate_expr_p would return true for T, NULL_TREE will never be
638 fold_negate_expr_1 (location_t loc
, tree t
)
640 tree type
= TREE_TYPE (t
);
643 switch (TREE_CODE (t
))
645 /* Convert - (~A) to A + 1. */
647 if (INTEGRAL_TYPE_P (type
))
648 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
649 build_one_cst (type
));
653 tem
= fold_negate_const (t
, type
);
654 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
655 || (ANY_INTEGRAL_TYPE_P (type
)
656 && !TYPE_OVERFLOW_TRAPS (type
)
657 && TYPE_OVERFLOW_WRAPS (type
))
658 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
665 tem
= fold_negate_const (t
, type
);
670 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
671 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
673 return build_complex (type
, rpart
, ipart
);
679 tree_vector_builder elts
;
680 elts
.new_unary_operation (type
, t
, true);
681 unsigned int count
= elts
.encoded_nelts ();
682 for (unsigned int i
= 0; i
< count
; ++i
)
684 tree elt
= fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
685 if (elt
== NULL_TREE
)
687 elts
.quick_push (elt
);
690 return elts
.build ();
694 if (negate_expr_p (t
))
695 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
696 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
697 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
701 if (negate_expr_p (t
))
702 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
703 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
707 if (!TYPE_OVERFLOW_SANITIZED (type
))
708 return TREE_OPERAND (t
, 0);
712 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type
)
713 && !HONOR_SIGNED_ZEROS (type
))
715 /* -(A + B) -> (-B) - A. */
716 if (negate_expr_p (TREE_OPERAND (t
, 1)))
718 tem
= negate_expr (TREE_OPERAND (t
, 1));
719 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
720 tem
, TREE_OPERAND (t
, 0));
723 /* -(A + B) -> (-A) - B. */
724 if (negate_expr_p (TREE_OPERAND (t
, 0)))
726 tem
= negate_expr (TREE_OPERAND (t
, 0));
727 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
728 tem
, TREE_OPERAND (t
, 1));
734 /* - (A - B) -> B - A */
735 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type
)
736 && !HONOR_SIGNED_ZEROS (type
))
737 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
738 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
742 if (TYPE_UNSIGNED (type
))
748 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type
))
750 tem
= TREE_OPERAND (t
, 1);
751 if (negate_expr_p (tem
))
752 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
753 TREE_OPERAND (t
, 0), negate_expr (tem
));
754 tem
= TREE_OPERAND (t
, 0);
755 if (negate_expr_p (tem
))
756 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
757 negate_expr (tem
), TREE_OPERAND (t
, 1));
764 if (TYPE_UNSIGNED (type
))
766 /* In general we can't negate A in A / B, because if A is INT_MIN and
767 B is not 1 we change the sign of the result. */
768 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
769 && negate_expr_p (TREE_OPERAND (t
, 0)))
770 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
771 negate_expr (TREE_OPERAND (t
, 0)),
772 TREE_OPERAND (t
, 1));
773 /* In general we can't negate B in A / B, because if A is INT_MIN and
774 B is 1, we may turn this into INT_MIN / -1 which is undefined
775 and actually traps on some architectures. */
776 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
777 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
778 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
779 && ! integer_onep (TREE_OPERAND (t
, 1))))
780 && negate_expr_p (TREE_OPERAND (t
, 1)))
781 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
783 negate_expr (TREE_OPERAND (t
, 1)));
787 /* Convert -((double)float) into (double)(-float). */
788 if (SCALAR_FLOAT_TYPE_P (type
))
790 tem
= strip_float_extensions (t
);
791 if (tem
!= t
&& negate_expr_p (tem
))
792 return fold_convert_loc (loc
, type
, negate_expr (tem
));
797 /* Negate -f(x) as f(-x). */
798 if (negate_mathfn_p (get_call_combined_fn (t
))
799 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
803 fndecl
= get_callee_fndecl (t
);
804 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
805 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
810 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
811 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
813 tree op1
= TREE_OPERAND (t
, 1);
814 if (wi::to_wide (op1
) == element_precision (type
) - 1)
816 tree ntype
= TYPE_UNSIGNED (type
)
817 ? signed_type_for (type
)
818 : unsigned_type_for (type
);
819 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
820 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
821 return fold_convert_loc (loc
, type
, temp
);
833 /* A wrapper for fold_negate_expr_1. */
836 fold_negate_expr (location_t loc
, tree t
)
838 tree type
= TREE_TYPE (t
);
840 tree tem
= fold_negate_expr_1 (loc
, t
);
841 if (tem
== NULL_TREE
)
843 return fold_convert_loc (loc
, type
, tem
);
846 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
847 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
859 loc
= EXPR_LOCATION (t
);
860 type
= TREE_TYPE (t
);
863 tem
= fold_negate_expr (loc
, t
);
865 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
866 return fold_convert_loc (loc
, type
, tem
);
869 /* Split a tree IN into a constant, literal and variable parts that could be
870 combined with CODE to make IN. "constant" means an expression with
871 TREE_CONSTANT but that isn't an actual constant. CODE must be a
872 commutative arithmetic operation. Store the constant part into *CONP,
873 the literal in *LITP and return the variable part. If a part isn't
874 present, set it to null. If the tree does not decompose in this way,
875 return the entire tree as the variable part and the other parts as null.
877 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
878 case, we negate an operand that was subtracted. Except if it is a
879 literal for which we use *MINUS_LITP instead.
881 If NEGATE_P is true, we are negating all of IN, again except a literal
882 for which we use *MINUS_LITP instead. If a variable part is of pointer
883 type, it is negated after converting to TYPE. This prevents us from
884 generating illegal MINUS pointer expression. LOC is the location of
885 the converted variable part.
887 If IN is itself a literal or constant, return it as appropriate.
889 Note that we do not guarantee that any of the three values will be the
890 same type as IN, but they will have the same signedness and mode. */
893 split_tree (tree in
, tree type
, enum tree_code code
,
894 tree
*minus_varp
, tree
*conp
, tree
*minus_conp
,
895 tree
*litp
, tree
*minus_litp
, int negate_p
)
904 /* Strip any conversions that don't change the machine mode or signedness. */
905 STRIP_SIGN_NOPS (in
);
907 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
908 || TREE_CODE (in
) == FIXED_CST
)
910 else if (TREE_CODE (in
) == code
911 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
912 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
913 /* We can associate addition and subtraction together (even
914 though the C standard doesn't say so) for integers because
915 the value is not affected. For reals, the value might be
916 affected, so we can't. */
917 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == POINTER_PLUS_EXPR
)
918 || (code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
919 || (code
== MINUS_EXPR
920 && (TREE_CODE (in
) == PLUS_EXPR
921 || TREE_CODE (in
) == POINTER_PLUS_EXPR
)))))
923 tree op0
= TREE_OPERAND (in
, 0);
924 tree op1
= TREE_OPERAND (in
, 1);
925 bool neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
926 bool neg_litp_p
= false, neg_conp_p
= false, neg_var_p
= false;
928 /* First see if either of the operands is a literal, then a constant. */
929 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
930 || TREE_CODE (op0
) == FIXED_CST
)
931 *litp
= op0
, op0
= 0;
932 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
933 || TREE_CODE (op1
) == FIXED_CST
)
934 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
936 if (op0
!= 0 && TREE_CONSTANT (op0
))
937 *conp
= op0
, op0
= 0;
938 else if (op1
!= 0 && TREE_CONSTANT (op1
))
939 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
941 /* If we haven't dealt with either operand, this is not a case we can
942 decompose. Otherwise, VAR is either of the ones remaining, if any. */
943 if (op0
!= 0 && op1
!= 0)
948 var
= op1
, neg_var_p
= neg1_p
;
950 /* Now do any needed negations. */
952 *minus_litp
= *litp
, *litp
= 0;
953 if (neg_conp_p
&& *conp
)
954 *minus_conp
= *conp
, *conp
= 0;
955 if (neg_var_p
&& var
)
956 *minus_varp
= var
, var
= 0;
958 else if (TREE_CONSTANT (in
))
960 else if (TREE_CODE (in
) == BIT_NOT_EXPR
961 && code
== PLUS_EXPR
)
963 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
964 when IN is constant. */
965 *litp
= build_minus_one_cst (type
);
966 *minus_varp
= TREE_OPERAND (in
, 0);
974 *minus_litp
= *litp
, *litp
= 0;
975 else if (*minus_litp
)
976 *litp
= *minus_litp
, *minus_litp
= 0;
978 *minus_conp
= *conp
, *conp
= 0;
979 else if (*minus_conp
)
980 *conp
= *minus_conp
, *minus_conp
= 0;
982 *minus_varp
= var
, var
= 0;
983 else if (*minus_varp
)
984 var
= *minus_varp
, *minus_varp
= 0;
988 && TREE_OVERFLOW_P (*litp
))
989 *litp
= drop_tree_overflow (*litp
);
991 && TREE_OVERFLOW_P (*minus_litp
))
992 *minus_litp
= drop_tree_overflow (*minus_litp
);
997 /* Re-associate trees split by the above function. T1 and T2 are
998 either expressions to associate or null. Return the new
999 expression, if any. LOC is the location of the new expression. If
1000 we build an operation, do it in TYPE and with CODE. */
1003 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
1007 gcc_assert (t2
== 0 || code
!= MINUS_EXPR
);
1013 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1014 try to fold this since we will have infinite recursion. But do
1015 deal with any NEGATE_EXPRs. */
1016 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1017 || TREE_CODE (t1
) == PLUS_EXPR
|| TREE_CODE (t2
) == PLUS_EXPR
1018 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1020 if (code
== PLUS_EXPR
)
1022 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1023 return build2_loc (loc
, MINUS_EXPR
, type
,
1024 fold_convert_loc (loc
, type
, t2
),
1025 fold_convert_loc (loc
, type
,
1026 TREE_OPERAND (t1
, 0)));
1027 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1028 return build2_loc (loc
, MINUS_EXPR
, type
,
1029 fold_convert_loc (loc
, type
, t1
),
1030 fold_convert_loc (loc
, type
,
1031 TREE_OPERAND (t2
, 0)));
1032 else if (integer_zerop (t2
))
1033 return fold_convert_loc (loc
, type
, t1
);
1035 else if (code
== MINUS_EXPR
)
1037 if (integer_zerop (t2
))
1038 return fold_convert_loc (loc
, type
, t1
);
1041 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
1042 fold_convert_loc (loc
, type
, t2
));
1045 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
1046 fold_convert_loc (loc
, type
, t2
));
1049 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1050 for use in int_const_binop, size_binop and size_diffop. */
1053 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1055 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
1057 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
1072 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1073 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1074 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1077 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1078 a new constant in RES. Return FALSE if we don't know how to
1079 evaluate CODE at compile-time. */
1082 wide_int_binop (wide_int
&res
,
1083 enum tree_code code
, const wide_int
&arg1
, const wide_int
&arg2
,
1084 signop sign
, wi::overflow_type
*overflow
)
1087 *overflow
= wi::OVF_NONE
;
1091 res
= wi::bit_or (arg1
, arg2
);
1095 res
= wi::bit_xor (arg1
, arg2
);
1099 res
= wi::bit_and (arg1
, arg2
);
1103 if (wi::neg_p (arg2
))
1105 res
= wi::lshift (arg1
, arg2
);
1109 if (wi::neg_p (arg2
))
1111 /* It's unclear from the C standard whether shifts can overflow.
1112 The following code ignores overflow; perhaps a C standard
1113 interpretation ruling is needed. */
1114 res
= wi::rshift (arg1
, arg2
, sign
);
1119 if (wi::neg_p (arg2
))
1122 if (code
== RROTATE_EXPR
)
1123 code
= LROTATE_EXPR
;
1125 code
= RROTATE_EXPR
;
1130 if (code
== RROTATE_EXPR
)
1131 res
= wi::rrotate (arg1
, tmp
);
1133 res
= wi::lrotate (arg1
, tmp
);
1137 res
= wi::add (arg1
, arg2
, sign
, overflow
);
1141 res
= wi::sub (arg1
, arg2
, sign
, overflow
);
1145 res
= wi::mul (arg1
, arg2
, sign
, overflow
);
1148 case MULT_HIGHPART_EXPR
:
1149 res
= wi::mul_high (arg1
, arg2
, sign
);
1152 case TRUNC_DIV_EXPR
:
1153 case EXACT_DIV_EXPR
:
1156 res
= wi::div_trunc (arg1
, arg2
, sign
, overflow
);
1159 case FLOOR_DIV_EXPR
:
1162 res
= wi::div_floor (arg1
, arg2
, sign
, overflow
);
1168 res
= wi::div_ceil (arg1
, arg2
, sign
, overflow
);
1171 case ROUND_DIV_EXPR
:
1174 res
= wi::div_round (arg1
, arg2
, sign
, overflow
);
1177 case TRUNC_MOD_EXPR
:
1180 res
= wi::mod_trunc (arg1
, arg2
, sign
, overflow
);
1183 case FLOOR_MOD_EXPR
:
1186 res
= wi::mod_floor (arg1
, arg2
, sign
, overflow
);
1192 res
= wi::mod_ceil (arg1
, arg2
, sign
, overflow
);
1195 case ROUND_MOD_EXPR
:
1198 res
= wi::mod_round (arg1
, arg2
, sign
, overflow
);
1202 res
= wi::min (arg1
, arg2
, sign
);
1206 res
= wi::max (arg1
, arg2
, sign
);
1215 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1216 produce a new constant in RES. Return FALSE if we don't know how
1217 to evaluate CODE at compile-time. */
1220 poly_int_binop (poly_wide_int
&res
, enum tree_code code
,
1221 const_tree arg1
, const_tree arg2
,
1222 signop sign
, wi::overflow_type
*overflow
)
1224 gcc_assert (NUM_POLY_INT_COEFFS
!= 1);
1225 gcc_assert (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
));
1229 res
= wi::add (wi::to_poly_wide (arg1
),
1230 wi::to_poly_wide (arg2
), sign
, overflow
);
1234 res
= wi::sub (wi::to_poly_wide (arg1
),
1235 wi::to_poly_wide (arg2
), sign
, overflow
);
1239 if (TREE_CODE (arg2
) == INTEGER_CST
)
1240 res
= wi::mul (wi::to_poly_wide (arg1
),
1241 wi::to_wide (arg2
), sign
, overflow
);
1242 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1243 res
= wi::mul (wi::to_poly_wide (arg2
),
1244 wi::to_wide (arg1
), sign
, overflow
);
1250 if (TREE_CODE (arg2
) == INTEGER_CST
)
1251 res
= wi::to_poly_wide (arg1
) << wi::to_wide (arg2
);
1257 if (TREE_CODE (arg2
) != INTEGER_CST
1258 || !can_ior_p (wi::to_poly_wide (arg1
), wi::to_wide (arg2
),
1269 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1270 produce a new constant. Return NULL_TREE if we don't know how to
1271 evaluate CODE at compile-time. */
1274 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
,
1277 poly_wide_int poly_res
;
1278 tree type
= TREE_TYPE (arg1
);
1279 signop sign
= TYPE_SIGN (type
);
1280 wi::overflow_type overflow
= wi::OVF_NONE
;
1282 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1284 wide_int warg1
= wi::to_wide (arg1
), res
;
1285 wide_int warg2
= wi::to_wide (arg2
, TYPE_PRECISION (type
));
1286 if (!wide_int_binop (res
, code
, warg1
, warg2
, sign
, &overflow
))
1290 else if (!poly_int_tree_p (arg1
)
1291 || !poly_int_tree_p (arg2
)
1292 || !poly_int_binop (poly_res
, code
, arg1
, arg2
, sign
, &overflow
))
1294 return force_fit_type (type
, poly_res
, overflowable
,
1295 (((sign
== SIGNED
|| overflowable
== -1)
1297 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
)));
1300 /* Return true if binary operation OP distributes over addition in operand
1301 OPNO, with the other operand being held constant. OPNO counts from 1. */
1304 distributes_over_addition_p (tree_code op
, int opno
)
1321 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1322 constant. We assume ARG1 and ARG2 have the same data type, or at least
1323 are the same kind of constant and the same machine mode. Return zero if
1324 combining the constants is not allowed in the current operating mode. */
1327 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1329 /* Sanity check for the recursive cases. */
1336 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1338 if (code
== POINTER_PLUS_EXPR
)
1339 return int_const_binop (PLUS_EXPR
,
1340 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1342 return int_const_binop (code
, arg1
, arg2
);
1345 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1350 REAL_VALUE_TYPE value
;
1351 REAL_VALUE_TYPE result
;
1355 /* The following codes are handled by real_arithmetic. */
1370 d1
= TREE_REAL_CST (arg1
);
1371 d2
= TREE_REAL_CST (arg2
);
1373 type
= TREE_TYPE (arg1
);
1374 mode
= TYPE_MODE (type
);
1376 /* Don't perform operation if we honor signaling NaNs and
1377 either operand is a signaling NaN. */
1378 if (HONOR_SNANS (mode
)
1379 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1380 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1383 /* Don't perform operation if it would raise a division
1384 by zero exception. */
1385 if (code
== RDIV_EXPR
1386 && real_equal (&d2
, &dconst0
)
1387 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1390 /* If either operand is a NaN, just return it. Otherwise, set up
1391 for floating-point trap; we return an overflow. */
1392 if (REAL_VALUE_ISNAN (d1
))
1394 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1397 t
= build_real (type
, d1
);
1400 else if (REAL_VALUE_ISNAN (d2
))
1402 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1405 t
= build_real (type
, d2
);
1409 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1410 real_convert (&result
, mode
, &value
);
1412 /* Don't constant fold this floating point operation if
1413 both operands are not NaN but the result is NaN, and
1414 flag_trapping_math. Such operations should raise an
1415 invalid operation exception. */
1416 if (flag_trapping_math
1417 && MODE_HAS_NANS (mode
)
1418 && REAL_VALUE_ISNAN (result
)
1419 && !REAL_VALUE_ISNAN (d1
)
1420 && !REAL_VALUE_ISNAN (d2
))
1423 /* Don't constant fold this floating point operation if
1424 the result has overflowed and flag_trapping_math. */
1425 if (flag_trapping_math
1426 && MODE_HAS_INFINITIES (mode
)
1427 && REAL_VALUE_ISINF (result
)
1428 && !REAL_VALUE_ISINF (d1
)
1429 && !REAL_VALUE_ISINF (d2
))
1432 /* Don't constant fold this floating point operation if the
1433 result may dependent upon the run-time rounding mode and
1434 flag_rounding_math is set, or if GCC's software emulation
1435 is unable to accurately represent the result. */
1436 if ((flag_rounding_math
1437 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1438 && (inexact
|| !real_identical (&result
, &value
)))
1441 t
= build_real (type
, result
);
1443 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1447 if (TREE_CODE (arg1
) == FIXED_CST
)
1449 FIXED_VALUE_TYPE f1
;
1450 FIXED_VALUE_TYPE f2
;
1451 FIXED_VALUE_TYPE result
;
1456 /* The following codes are handled by fixed_arithmetic. */
1462 case TRUNC_DIV_EXPR
:
1463 if (TREE_CODE (arg2
) != FIXED_CST
)
1465 f2
= TREE_FIXED_CST (arg2
);
1471 if (TREE_CODE (arg2
) != INTEGER_CST
)
1473 wi::tree_to_wide_ref w2
= wi::to_wide (arg2
);
1474 f2
.data
.high
= w2
.elt (1);
1475 f2
.data
.low
= w2
.ulow ();
1484 f1
= TREE_FIXED_CST (arg1
);
1485 type
= TREE_TYPE (arg1
);
1486 sat_p
= TYPE_SATURATING (type
);
1487 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1488 t
= build_fixed (type
, result
);
1489 /* Propagate overflow flags. */
1490 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1491 TREE_OVERFLOW (t
) = 1;
1495 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1497 tree type
= TREE_TYPE (arg1
);
1498 tree r1
= TREE_REALPART (arg1
);
1499 tree i1
= TREE_IMAGPART (arg1
);
1500 tree r2
= TREE_REALPART (arg2
);
1501 tree i2
= TREE_IMAGPART (arg2
);
1508 real
= const_binop (code
, r1
, r2
);
1509 imag
= const_binop (code
, i1
, i2
);
1513 if (COMPLEX_FLOAT_TYPE_P (type
))
1514 return do_mpc_arg2 (arg1
, arg2
, type
,
1515 /* do_nonfinite= */ folding_initializer
,
1518 real
= const_binop (MINUS_EXPR
,
1519 const_binop (MULT_EXPR
, r1
, r2
),
1520 const_binop (MULT_EXPR
, i1
, i2
));
1521 imag
= const_binop (PLUS_EXPR
,
1522 const_binop (MULT_EXPR
, r1
, i2
),
1523 const_binop (MULT_EXPR
, i1
, r2
));
1527 if (COMPLEX_FLOAT_TYPE_P (type
))
1528 return do_mpc_arg2 (arg1
, arg2
, type
,
1529 /* do_nonfinite= */ folding_initializer
,
1532 case TRUNC_DIV_EXPR
:
1534 case FLOOR_DIV_EXPR
:
1535 case ROUND_DIV_EXPR
:
1536 if (flag_complex_method
== 0)
1538 /* Keep this algorithm in sync with
1539 tree-complex.cc:expand_complex_div_straight().
1541 Expand complex division to scalars, straightforward algorithm.
1542 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1546 = const_binop (PLUS_EXPR
,
1547 const_binop (MULT_EXPR
, r2
, r2
),
1548 const_binop (MULT_EXPR
, i2
, i2
));
1550 = const_binop (PLUS_EXPR
,
1551 const_binop (MULT_EXPR
, r1
, r2
),
1552 const_binop (MULT_EXPR
, i1
, i2
));
1554 = const_binop (MINUS_EXPR
,
1555 const_binop (MULT_EXPR
, i1
, r2
),
1556 const_binop (MULT_EXPR
, r1
, i2
));
1558 real
= const_binop (code
, t1
, magsquared
);
1559 imag
= const_binop (code
, t2
, magsquared
);
1563 /* Keep this algorithm in sync with
1564 tree-complex.cc:expand_complex_div_wide().
1566 Expand complex division to scalars, modified algorithm to minimize
1567 overflow with wide input ranges. */
1568 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1569 fold_abs_const (r2
, TREE_TYPE (type
)),
1570 fold_abs_const (i2
, TREE_TYPE (type
)));
1572 if (integer_nonzerop (compare
))
1574 /* In the TRUE branch, we compute
1576 div = (br * ratio) + bi;
1577 tr = (ar * ratio) + ai;
1578 ti = (ai * ratio) - ar;
1581 tree ratio
= const_binop (code
, r2
, i2
);
1582 tree div
= const_binop (PLUS_EXPR
, i2
,
1583 const_binop (MULT_EXPR
, r2
, ratio
));
1584 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1585 real
= const_binop (PLUS_EXPR
, real
, i1
);
1586 real
= const_binop (code
, real
, div
);
1588 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1589 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1590 imag
= const_binop (code
, imag
, div
);
1594 /* In the FALSE branch, we compute
1596 divisor = (d * ratio) + c;
1597 tr = (b * ratio) + a;
1598 ti = b - (a * ratio);
1601 tree ratio
= const_binop (code
, i2
, r2
);
1602 tree div
= const_binop (PLUS_EXPR
, r2
,
1603 const_binop (MULT_EXPR
, i2
, ratio
));
1605 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1606 real
= const_binop (PLUS_EXPR
, real
, r1
);
1607 real
= const_binop (code
, real
, div
);
1609 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1610 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1611 imag
= const_binop (code
, imag
, div
);
1621 return build_complex (type
, real
, imag
);
1624 if (TREE_CODE (arg1
) == VECTOR_CST
1625 && TREE_CODE (arg2
) == VECTOR_CST
1626 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)),
1627 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
))))
1629 tree type
= TREE_TYPE (arg1
);
1631 if (VECTOR_CST_STEPPED_P (arg1
)
1632 && VECTOR_CST_STEPPED_P (arg2
))
1633 /* We can operate directly on the encoding if:
1635 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1637 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1639 Addition and subtraction are the supported operators
1640 for which this is true. */
1641 step_ok_p
= (code
== PLUS_EXPR
|| code
== MINUS_EXPR
);
1642 else if (VECTOR_CST_STEPPED_P (arg1
))
1643 /* We can operate directly on stepped encodings if:
1647 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1649 which is true if (x -> x op c) distributes over addition. */
1650 step_ok_p
= distributes_over_addition_p (code
, 1);
1652 /* Similarly in reverse. */
1653 step_ok_p
= distributes_over_addition_p (code
, 2);
1654 tree_vector_builder elts
;
1655 if (!elts
.new_binary_operation (type
, arg1
, arg2
, step_ok_p
))
1657 unsigned int count
= elts
.encoded_nelts ();
1658 for (unsigned int i
= 0; i
< count
; ++i
)
1660 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1661 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1663 tree elt
= const_binop (code
, elem1
, elem2
);
1665 /* It is possible that const_binop cannot handle the given
1666 code and return NULL_TREE */
1667 if (elt
== NULL_TREE
)
1669 elts
.quick_push (elt
);
1672 return elts
.build ();
1675 /* Shifts allow a scalar offset for a vector. */
1676 if (TREE_CODE (arg1
) == VECTOR_CST
1677 && TREE_CODE (arg2
) == INTEGER_CST
)
1679 tree type
= TREE_TYPE (arg1
);
1680 bool step_ok_p
= distributes_over_addition_p (code
, 1);
1681 tree_vector_builder elts
;
1682 if (!elts
.new_unary_operation (type
, arg1
, step_ok_p
))
1684 unsigned int count
= elts
.encoded_nelts ();
1685 for (unsigned int i
= 0; i
< count
; ++i
)
1687 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1689 tree elt
= const_binop (code
, elem1
, arg2
);
1691 /* It is possible that const_binop cannot handle the given
1692 code and return NULL_TREE. */
1693 if (elt
== NULL_TREE
)
1695 elts
.quick_push (elt
);
1698 return elts
.build ();
1703 /* Overload that adds a TYPE parameter to be able to dispatch
1704 to fold_relational_const. */
1707 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1709 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1710 return fold_relational_const (code
, type
, arg1
, arg2
);
1712 /* ??? Until we make the const_binop worker take the type of the
1713 result as argument put those cases that need it here. */
1716 case VEC_SERIES_EXPR
:
1717 if (CONSTANT_CLASS_P (arg1
)
1718 && CONSTANT_CLASS_P (arg2
))
1719 return build_vec_series (type
, arg1
, arg2
);
1723 if ((TREE_CODE (arg1
) == REAL_CST
1724 && TREE_CODE (arg2
) == REAL_CST
)
1725 || (TREE_CODE (arg1
) == INTEGER_CST
1726 && TREE_CODE (arg2
) == INTEGER_CST
))
1727 return build_complex (type
, arg1
, arg2
);
1730 case POINTER_DIFF_EXPR
:
1731 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1733 poly_offset_int res
= (wi::to_poly_offset (arg1
)
1734 - wi::to_poly_offset (arg2
));
1735 return force_fit_type (type
, res
, 1,
1736 TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1740 case VEC_PACK_TRUNC_EXPR
:
1741 case VEC_PACK_FIX_TRUNC_EXPR
:
1742 case VEC_PACK_FLOAT_EXPR
:
1744 unsigned int HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1746 if (TREE_CODE (arg1
) != VECTOR_CST
1747 || TREE_CODE (arg2
) != VECTOR_CST
)
1750 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1753 out_nelts
= in_nelts
* 2;
1754 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1755 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1757 tree_vector_builder
elts (type
, out_nelts
, 1);
1758 for (i
= 0; i
< out_nelts
; i
++)
1760 tree elt
= (i
< in_nelts
1761 ? VECTOR_CST_ELT (arg1
, i
)
1762 : VECTOR_CST_ELT (arg2
, i
- in_nelts
));
1763 elt
= fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1765 : code
== VEC_PACK_FLOAT_EXPR
1766 ? FLOAT_EXPR
: FIX_TRUNC_EXPR
,
1767 TREE_TYPE (type
), elt
);
1768 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1770 elts
.quick_push (elt
);
1773 return elts
.build ();
1776 case VEC_WIDEN_MULT_LO_EXPR
:
1777 case VEC_WIDEN_MULT_HI_EXPR
:
1778 case VEC_WIDEN_MULT_EVEN_EXPR
:
1779 case VEC_WIDEN_MULT_ODD_EXPR
:
1781 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, out
, ofs
, scale
;
1783 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1786 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1788 out_nelts
= in_nelts
/ 2;
1789 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1790 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1792 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1793 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? out_nelts
: 0;
1794 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1795 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : out_nelts
;
1796 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1798 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1801 tree_vector_builder
elts (type
, out_nelts
, 1);
1802 for (out
= 0; out
< out_nelts
; out
++)
1804 unsigned int in
= (out
<< scale
) + ofs
;
1805 tree t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1806 VECTOR_CST_ELT (arg1
, in
));
1807 tree t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1808 VECTOR_CST_ELT (arg2
, in
));
1810 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1812 tree elt
= const_binop (MULT_EXPR
, t1
, t2
);
1813 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1815 elts
.quick_push (elt
);
1818 return elts
.build ();
1824 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1827 /* Make sure type and arg0 have the same saturating flag. */
1828 gcc_checking_assert (TYPE_SATURATING (type
)
1829 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1831 return const_binop (code
, arg1
, arg2
);
1834 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1835 Return zero if computing the constants is not possible. */
1838 const_unop (enum tree_code code
, tree type
, tree arg0
)
1840 /* Don't perform the operation, other than NEGATE and ABS, if
1841 flag_signaling_nans is on and the operand is a signaling NaN. */
1842 if (TREE_CODE (arg0
) == REAL_CST
1843 && HONOR_SNANS (arg0
)
1844 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1845 && code
!= NEGATE_EXPR
1847 && code
!= ABSU_EXPR
)
1854 case FIX_TRUNC_EXPR
:
1855 case FIXED_CONVERT_EXPR
:
1856 return fold_convert_const (code
, type
, arg0
);
1858 case ADDR_SPACE_CONVERT_EXPR
:
1859 /* If the source address is 0, and the source address space
1860 cannot have a valid object at 0, fold to dest type null. */
1861 if (integer_zerop (arg0
)
1862 && !(targetm
.addr_space
.zero_address_valid
1863 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1864 return fold_convert_const (code
, type
, arg0
);
1867 case VIEW_CONVERT_EXPR
:
1868 return fold_view_convert_expr (type
, arg0
);
1872 /* Can't call fold_negate_const directly here as that doesn't
1873 handle all cases and we might not be able to negate some
1875 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1876 if (tem
&& CONSTANT_CLASS_P (tem
))
1883 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1884 return fold_abs_const (arg0
, type
);
1888 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1890 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1892 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1897 if (TREE_CODE (arg0
) == INTEGER_CST
)
1898 return fold_not_const (arg0
, type
);
1899 else if (POLY_INT_CST_P (arg0
))
1900 return wide_int_to_tree (type
, -poly_int_cst_value (arg0
));
1901 /* Perform BIT_NOT_EXPR on each element individually. */
1902 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1906 /* This can cope with stepped encodings because ~x == -1 - x. */
1907 tree_vector_builder elements
;
1908 elements
.new_unary_operation (type
, arg0
, true);
1909 unsigned int i
, count
= elements
.encoded_nelts ();
1910 for (i
= 0; i
< count
; ++i
)
1912 elem
= VECTOR_CST_ELT (arg0
, i
);
1913 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1914 if (elem
== NULL_TREE
)
1916 elements
.quick_push (elem
);
1919 return elements
.build ();
1923 case TRUTH_NOT_EXPR
:
1924 if (TREE_CODE (arg0
) == INTEGER_CST
)
1925 return constant_boolean_node (integer_zerop (arg0
), type
);
1929 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1930 return fold_convert (type
, TREE_REALPART (arg0
));
1934 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1935 return fold_convert (type
, TREE_IMAGPART (arg0
));
1938 case VEC_UNPACK_LO_EXPR
:
1939 case VEC_UNPACK_HI_EXPR
:
1940 case VEC_UNPACK_FLOAT_LO_EXPR
:
1941 case VEC_UNPACK_FLOAT_HI_EXPR
:
1942 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
1943 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
1945 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1946 enum tree_code subcode
;
1948 if (TREE_CODE (arg0
) != VECTOR_CST
)
1951 if (!VECTOR_CST_NELTS (arg0
).is_constant (&in_nelts
))
1953 out_nelts
= in_nelts
/ 2;
1954 gcc_assert (known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1956 unsigned int offset
= 0;
1957 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1958 || code
== VEC_UNPACK_FLOAT_LO_EXPR
1959 || code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
))
1962 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1964 else if (code
== VEC_UNPACK_FLOAT_LO_EXPR
1965 || code
== VEC_UNPACK_FLOAT_HI_EXPR
)
1966 subcode
= FLOAT_EXPR
;
1968 subcode
= FIX_TRUNC_EXPR
;
1970 tree_vector_builder
elts (type
, out_nelts
, 1);
1971 for (i
= 0; i
< out_nelts
; i
++)
1973 tree elt
= fold_convert_const (subcode
, TREE_TYPE (type
),
1974 VECTOR_CST_ELT (arg0
, i
+ offset
));
1975 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1977 elts
.quick_push (elt
);
1980 return elts
.build ();
1983 case VEC_DUPLICATE_EXPR
:
1984 if (CONSTANT_CLASS_P (arg0
))
1985 return build_vector_from_val (type
, arg0
);
1995 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1996 indicates which particular sizetype to create. */
1999 size_int_kind (poly_int64 number
, enum size_type_kind kind
)
2001 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2004 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2005 is a tree code. The type of the result is taken from the operands.
2006 Both must be equivalent integer types, ala int_binop_types_match_p.
2007 If the operands are constant, so is the result. */
2010 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
2012 tree type
= TREE_TYPE (arg0
);
2014 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2015 return error_mark_node
;
2017 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2020 /* Handle the special case of two poly_int constants faster. */
2021 if (poly_int_tree_p (arg0
) && poly_int_tree_p (arg1
))
2023 /* And some specific cases even faster than that. */
2024 if (code
== PLUS_EXPR
)
2026 if (integer_zerop (arg0
)
2027 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
2029 if (integer_zerop (arg1
)
2030 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
2033 else if (code
== MINUS_EXPR
)
2035 if (integer_zerop (arg1
)
2036 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
2039 else if (code
== MULT_EXPR
)
2041 if (integer_onep (arg0
)
2042 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
2046 /* Handle general case of two integer constants. For sizetype
2047 constant calculations we always want to know about overflow,
2048 even in the unsigned case. */
2049 tree res
= int_const_binop (code
, arg0
, arg1
, -1);
2050 if (res
!= NULL_TREE
)
2054 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2057 /* Given two values, either both of sizetype or both of bitsizetype,
2058 compute the difference between the two values. Return the value
2059 in signed type corresponding to the type of the operands. */
2062 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
2064 tree type
= TREE_TYPE (arg0
);
2067 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2070 /* If the type is already signed, just do the simple thing. */
2071 if (!TYPE_UNSIGNED (type
))
2072 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
2074 if (type
== sizetype
)
2076 else if (type
== bitsizetype
)
2077 ctype
= sbitsizetype
;
2079 ctype
= signed_type_for (type
);
2081 /* If either operand is not a constant, do the conversions to the signed
2082 type and subtract. The hardware will do the right thing with any
2083 overflow in the subtraction. */
2084 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2085 return size_binop_loc (loc
, MINUS_EXPR
,
2086 fold_convert_loc (loc
, ctype
, arg0
),
2087 fold_convert_loc (loc
, ctype
, arg1
));
2089 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2090 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2091 overflow) and negate (which can't either). Special-case a result
2092 of zero while we're here. */
2093 if (tree_int_cst_equal (arg0
, arg1
))
2094 return build_int_cst (ctype
, 0);
2095 else if (tree_int_cst_lt (arg1
, arg0
))
2096 return fold_convert_loc (loc
, ctype
,
2097 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
2099 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
2100 fold_convert_loc (loc
, ctype
,
2101 size_binop_loc (loc
,
2106 /* A subroutine of fold_convert_const handling conversions of an
2107 INTEGER_CST to another integer type. */
2110 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2112 /* Given an integer constant, make new constant with new type,
2113 appropriately sign-extended or truncated. Use widest_int
2114 so that any extension is done according ARG1's type. */
2115 return force_fit_type (type
, wi::to_widest (arg1
),
2116 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
2117 TREE_OVERFLOW (arg1
));
2120 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2121 to an integer type. */
2124 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2126 bool overflow
= false;
2129 /* The following code implements the floating point to integer
2130 conversion rules required by the Java Language Specification,
2131 that IEEE NaNs are mapped to zero and values that overflow
2132 the target precision saturate, i.e. values greater than
2133 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2134 are mapped to INT_MIN. These semantics are allowed by the
2135 C and C++ standards that simply state that the behavior of
2136 FP-to-integer conversion is unspecified upon overflow. */
2140 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2144 case FIX_TRUNC_EXPR
:
2145 real_trunc (&r
, VOIDmode
, &x
);
2152 /* If R is NaN, return zero and show we have an overflow. */
2153 if (REAL_VALUE_ISNAN (r
))
2156 val
= wi::zero (TYPE_PRECISION (type
));
2159 /* See if R is less than the lower bound or greater than the
2164 tree lt
= TYPE_MIN_VALUE (type
);
2165 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2166 if (real_less (&r
, &l
))
2169 val
= wi::to_wide (lt
);
2175 tree ut
= TYPE_MAX_VALUE (type
);
2178 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2179 if (real_less (&u
, &r
))
2182 val
= wi::to_wide (ut
);
2188 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
2190 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
2194 /* A subroutine of fold_convert_const handling conversions of a
2195 FIXED_CST to an integer type. */
2198 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2201 double_int temp
, temp_trunc
;
2204 /* Right shift FIXED_CST to temp by fbit. */
2205 temp
= TREE_FIXED_CST (arg1
).data
;
2206 mode
= TREE_FIXED_CST (arg1
).mode
;
2207 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
2209 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
2210 HOST_BITS_PER_DOUBLE_INT
,
2211 SIGNED_FIXED_POINT_MODE_P (mode
));
2213 /* Left shift temp to temp_trunc by fbit. */
2214 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
2215 HOST_BITS_PER_DOUBLE_INT
,
2216 SIGNED_FIXED_POINT_MODE_P (mode
));
2220 temp
= double_int_zero
;
2221 temp_trunc
= double_int_zero
;
2224 /* If FIXED_CST is negative, we need to round the value toward 0.
2225 By checking if the fractional bits are not zero to add 1 to temp. */
2226 if (SIGNED_FIXED_POINT_MODE_P (mode
)
2227 && temp_trunc
.is_negative ()
2228 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
2229 temp
+= double_int_one
;
2231 /* Given a fixed-point constant, make new constant with new type,
2232 appropriately sign-extended or truncated. */
2233 t
= force_fit_type (type
, temp
, -1,
2234 (temp
.is_negative ()
2235 && (TYPE_UNSIGNED (type
)
2236 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2237 | TREE_OVERFLOW (arg1
));
2242 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2243 to another floating point type. */
2246 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2248 REAL_VALUE_TYPE value
;
2251 /* If the underlying modes are the same, simply treat it as
2252 copy and rebuild with TREE_REAL_CST information and the
2254 if (TYPE_MODE (type
) == TYPE_MODE (TREE_TYPE (arg1
)))
2256 t
= build_real (type
, TREE_REAL_CST (arg1
));
2260 /* Don't perform the operation if flag_signaling_nans is on
2261 and the operand is a signaling NaN. */
2262 if (HONOR_SNANS (arg1
)
2263 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
2266 /* With flag_rounding_math we should respect the current rounding mode
2267 unless the conversion is exact. */
2268 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1
)
2269 && !exact_real_truncate (TYPE_MODE (type
), &TREE_REAL_CST (arg1
)))
2272 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2273 t
= build_real (type
, value
);
2275 /* If converting an infinity or NAN to a representation that doesn't
2276 have one, set the overflow bit so that we can produce some kind of
2277 error message at the appropriate point if necessary. It's not the
2278 most user-friendly message, but it's better than nothing. */
2279 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2280 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2281 TREE_OVERFLOW (t
) = 1;
2282 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2283 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2284 TREE_OVERFLOW (t
) = 1;
2285 /* Regular overflow, conversion produced an infinity in a mode that
2286 can't represent them. */
2287 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2288 && REAL_VALUE_ISINF (value
)
2289 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2290 TREE_OVERFLOW (t
) = 1;
2292 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2296 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2297 to a floating point type. */
2300 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2302 REAL_VALUE_TYPE value
;
2305 real_convert_from_fixed (&value
, SCALAR_FLOAT_TYPE_MODE (type
),
2306 &TREE_FIXED_CST (arg1
));
2307 t
= build_real (type
, value
);
2309 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2313 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2314 to another fixed-point type. */
2317 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2319 FIXED_VALUE_TYPE value
;
2323 overflow_p
= fixed_convert (&value
, SCALAR_TYPE_MODE (type
),
2324 &TREE_FIXED_CST (arg1
), TYPE_SATURATING (type
));
2325 t
= build_fixed (type
, value
);
2327 /* Propagate overflow flags. */
2328 if (overflow_p
| TREE_OVERFLOW (arg1
))
2329 TREE_OVERFLOW (t
) = 1;
2333 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2334 to a fixed-point type. */
2337 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2339 FIXED_VALUE_TYPE value
;
2344 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2346 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2347 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2348 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2350 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2352 overflow_p
= fixed_convert_from_int (&value
, SCALAR_TYPE_MODE (type
), di
,
2353 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2354 TYPE_SATURATING (type
));
2355 t
= build_fixed (type
, value
);
2357 /* Propagate overflow flags. */
2358 if (overflow_p
| TREE_OVERFLOW (arg1
))
2359 TREE_OVERFLOW (t
) = 1;
2363 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2364 to a fixed-point type. */
2367 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2369 FIXED_VALUE_TYPE value
;
2373 overflow_p
= fixed_convert_from_real (&value
, SCALAR_TYPE_MODE (type
),
2374 &TREE_REAL_CST (arg1
),
2375 TYPE_SATURATING (type
));
2376 t
= build_fixed (type
, value
);
2378 /* Propagate overflow flags. */
2379 if (overflow_p
| TREE_OVERFLOW (arg1
))
2380 TREE_OVERFLOW (t
) = 1;
2384 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2385 type TYPE. If no simplification can be done return NULL_TREE. */
2388 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2390 tree arg_type
= TREE_TYPE (arg1
);
2391 if (arg_type
== type
)
2394 /* We can't widen types, since the runtime value could overflow the
2395 original type before being extended to the new type. */
2396 if (POLY_INT_CST_P (arg1
)
2397 && (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2398 && TYPE_PRECISION (type
) <= TYPE_PRECISION (arg_type
))
2399 return build_poly_int_cst (type
,
2400 poly_wide_int::from (poly_int_cst_value (arg1
),
2401 TYPE_PRECISION (type
),
2402 TYPE_SIGN (arg_type
)));
2404 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2405 || TREE_CODE (type
) == OFFSET_TYPE
)
2407 if (TREE_CODE (arg1
) == INTEGER_CST
)
2408 return fold_convert_const_int_from_int (type
, arg1
);
2409 else if (TREE_CODE (arg1
) == REAL_CST
)
2410 return fold_convert_const_int_from_real (code
, type
, arg1
);
2411 else if (TREE_CODE (arg1
) == FIXED_CST
)
2412 return fold_convert_const_int_from_fixed (type
, arg1
);
2414 else if (SCALAR_FLOAT_TYPE_P (type
))
2416 if (TREE_CODE (arg1
) == INTEGER_CST
)
2418 tree res
= build_real_from_int_cst (type
, arg1
);
2419 /* Avoid the folding if flag_rounding_math is on and the
2420 conversion is not exact. */
2421 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
))
2424 wide_int w
= real_to_integer (&TREE_REAL_CST (res
), &fail
,
2425 TYPE_PRECISION (TREE_TYPE (arg1
)));
2426 if (fail
|| wi::ne_p (w
, wi::to_wide (arg1
)))
2431 else if (TREE_CODE (arg1
) == REAL_CST
)
2432 return fold_convert_const_real_from_real (type
, arg1
);
2433 else if (TREE_CODE (arg1
) == FIXED_CST
)
2434 return fold_convert_const_real_from_fixed (type
, arg1
);
2436 else if (FIXED_POINT_TYPE_P (type
))
2438 if (TREE_CODE (arg1
) == FIXED_CST
)
2439 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2440 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2441 return fold_convert_const_fixed_from_int (type
, arg1
);
2442 else if (TREE_CODE (arg1
) == REAL_CST
)
2443 return fold_convert_const_fixed_from_real (type
, arg1
);
2445 else if (VECTOR_TYPE_P (type
))
2447 if (TREE_CODE (arg1
) == VECTOR_CST
2448 && known_eq (TYPE_VECTOR_SUBPARTS (type
), VECTOR_CST_NELTS (arg1
)))
2450 tree elttype
= TREE_TYPE (type
);
2451 tree arg1_elttype
= TREE_TYPE (TREE_TYPE (arg1
));
2452 /* We can't handle steps directly when extending, since the
2453 values need to wrap at the original precision first. */
2455 = (INTEGRAL_TYPE_P (elttype
)
2456 && INTEGRAL_TYPE_P (arg1_elttype
)
2457 && TYPE_PRECISION (elttype
) <= TYPE_PRECISION (arg1_elttype
));
2458 tree_vector_builder v
;
2459 if (!v
.new_unary_operation (type
, arg1
, step_ok_p
))
2461 unsigned int len
= v
.encoded_nelts ();
2462 for (unsigned int i
= 0; i
< len
; ++i
)
2464 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2465 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2466 if (cvt
== NULL_TREE
)
2476 /* Construct a vector of zero elements of vector type TYPE. */
2479 build_zero_vector (tree type
)
2483 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2484 return build_vector_from_val (type
, t
);
2487 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2490 fold_convertible_p (const_tree type
, const_tree arg
)
2492 const_tree orig
= TREE_TYPE (arg
);
2497 if (TREE_CODE (arg
) == ERROR_MARK
2498 || TREE_CODE (type
) == ERROR_MARK
2499 || TREE_CODE (orig
) == ERROR_MARK
)
2502 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2505 switch (TREE_CODE (type
))
2507 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2508 case POINTER_TYPE
: case REFERENCE_TYPE
:
2510 return (INTEGRAL_TYPE_P (orig
)
2511 || (POINTER_TYPE_P (orig
)
2512 && TYPE_PRECISION (type
) <= TYPE_PRECISION (orig
))
2513 || TREE_CODE (orig
) == OFFSET_TYPE
);
2516 case FIXED_POINT_TYPE
:
2518 return TREE_CODE (type
) == TREE_CODE (orig
);
2521 return (VECTOR_TYPE_P (orig
)
2522 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
2523 TYPE_VECTOR_SUBPARTS (orig
))
2524 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2531 /* Convert expression ARG to type TYPE. Used by the middle-end for
2532 simple conversions in preference to calling the front-end's convert. */
2535 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2537 tree orig
= TREE_TYPE (arg
);
2543 if (TREE_CODE (arg
) == ERROR_MARK
2544 || TREE_CODE (type
) == ERROR_MARK
2545 || TREE_CODE (orig
) == ERROR_MARK
)
2546 return error_mark_node
;
2548 switch (TREE_CODE (type
))
2551 case REFERENCE_TYPE
:
2552 /* Handle conversions between pointers to different address spaces. */
2553 if (POINTER_TYPE_P (orig
)
2554 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2555 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2556 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2559 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2561 if (TREE_CODE (arg
) == INTEGER_CST
)
2563 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2564 if (tem
!= NULL_TREE
)
2567 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2568 || TREE_CODE (orig
) == OFFSET_TYPE
)
2569 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2570 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2571 return fold_convert_loc (loc
, type
,
2572 fold_build1_loc (loc
, REALPART_EXPR
,
2573 TREE_TYPE (orig
), arg
));
2574 gcc_assert (VECTOR_TYPE_P (orig
)
2575 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2576 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2579 if (TREE_CODE (arg
) == INTEGER_CST
)
2581 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2582 if (tem
!= NULL_TREE
)
2585 else if (TREE_CODE (arg
) == REAL_CST
)
2587 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2588 if (tem
!= NULL_TREE
)
2591 else if (TREE_CODE (arg
) == FIXED_CST
)
2593 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2594 if (tem
!= NULL_TREE
)
2598 switch (TREE_CODE (orig
))
2601 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2602 case POINTER_TYPE
: case REFERENCE_TYPE
:
2603 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2606 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2608 case FIXED_POINT_TYPE
:
2609 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2612 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2613 return fold_convert_loc (loc
, type
, tem
);
2619 case FIXED_POINT_TYPE
:
2620 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2621 || TREE_CODE (arg
) == REAL_CST
)
2623 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2624 if (tem
!= NULL_TREE
)
2625 goto fold_convert_exit
;
2628 switch (TREE_CODE (orig
))
2630 case FIXED_POINT_TYPE
:
2635 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2638 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2639 return fold_convert_loc (loc
, type
, tem
);
2646 switch (TREE_CODE (orig
))
2649 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2650 case POINTER_TYPE
: case REFERENCE_TYPE
:
2652 case FIXED_POINT_TYPE
:
2653 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2654 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2655 fold_convert_loc (loc
, TREE_TYPE (type
),
2656 integer_zero_node
));
2661 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2663 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2664 TREE_OPERAND (arg
, 0));
2665 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2666 TREE_OPERAND (arg
, 1));
2667 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2670 arg
= save_expr (arg
);
2671 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2672 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2673 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2674 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2675 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2683 if (integer_zerop (arg
))
2684 return build_zero_vector (type
);
2685 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2686 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2687 || VECTOR_TYPE_P (orig
));
2688 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2691 tem
= fold_ignored_result (arg
);
2692 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2695 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2696 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2700 tem
= protected_set_expr_location_unshare (tem
, loc
);
2704 /* Return false if expr can be assumed not to be an lvalue, true
2708 maybe_lvalue_p (const_tree x
)
2710 /* We only need to wrap lvalue tree codes. */
2711 switch (TREE_CODE (x
))
2719 case COMPOUND_LITERAL_EXPR
:
2725 case ARRAY_RANGE_REF
:
2731 case PREINCREMENT_EXPR
:
2732 case PREDECREMENT_EXPR
:
2734 case TRY_CATCH_EXPR
:
2735 case WITH_CLEANUP_EXPR
:
2741 case VIEW_CONVERT_EXPR
:
2745 /* Assume the worst for front-end tree codes. */
2746 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2754 /* Return an expr equal to X but certainly not valid as an lvalue. */
2757 non_lvalue_loc (location_t loc
, tree x
)
2759 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2764 if (! maybe_lvalue_p (x
))
2766 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2769 /* Given a tree comparison code, return the code that is the logical inverse.
2770 It is generally not safe to do this for floating-point comparisons, except
2771 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2772 ERROR_MARK in this case. */
2775 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2777 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2778 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2788 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2790 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2792 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2794 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2808 return UNORDERED_EXPR
;
2809 case UNORDERED_EXPR
:
2810 return ORDERED_EXPR
;
2816 /* Similar, but return the comparison that results if the operands are
2817 swapped. This is safe for floating-point. */
2820 swap_tree_comparison (enum tree_code code
)
2827 case UNORDERED_EXPR
:
2853 /* Convert a comparison tree code from an enum tree_code representation
2854 into a compcode bit-based encoding. This function is the inverse of
2855 compcode_to_comparison. */
2857 static enum comparison_code
2858 comparison_to_compcode (enum tree_code code
)
2875 return COMPCODE_ORD
;
2876 case UNORDERED_EXPR
:
2877 return COMPCODE_UNORD
;
2879 return COMPCODE_UNLT
;
2881 return COMPCODE_UNEQ
;
2883 return COMPCODE_UNLE
;
2885 return COMPCODE_UNGT
;
2887 return COMPCODE_LTGT
;
2889 return COMPCODE_UNGE
;
2895 /* Convert a compcode bit-based encoding of a comparison operator back
2896 to GCC's enum tree_code representation. This function is the
2897 inverse of comparison_to_compcode. */
2899 static enum tree_code
2900 compcode_to_comparison (enum comparison_code code
)
2917 return ORDERED_EXPR
;
2918 case COMPCODE_UNORD
:
2919 return UNORDERED_EXPR
;
2937 /* Return true if COND1 tests the opposite condition of COND2. */
2940 inverse_conditions_p (const_tree cond1
, const_tree cond2
)
2942 return (COMPARISON_CLASS_P (cond1
)
2943 && COMPARISON_CLASS_P (cond2
)
2944 && (invert_tree_comparison
2946 HONOR_NANS (TREE_OPERAND (cond1
, 0))) == TREE_CODE (cond2
))
2947 && operand_equal_p (TREE_OPERAND (cond1
, 0),
2948 TREE_OPERAND (cond2
, 0), 0)
2949 && operand_equal_p (TREE_OPERAND (cond1
, 1),
2950 TREE_OPERAND (cond2
, 1), 0));
2953 /* Return a tree for the comparison which is the combination of
2954 doing the AND or OR (depending on CODE) of the two operations LCODE
2955 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2956 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2957 if this makes the transformation invalid. */
2960 combine_comparisons (location_t loc
,
2961 enum tree_code code
, enum tree_code lcode
,
2962 enum tree_code rcode
, tree truth_type
,
2963 tree ll_arg
, tree lr_arg
)
2965 bool honor_nans
= HONOR_NANS (ll_arg
);
2966 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2967 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2972 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2973 compcode
= lcompcode
& rcompcode
;
2976 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2977 compcode
= lcompcode
| rcompcode
;
2986 /* Eliminate unordered comparisons, as well as LTGT and ORD
2987 which are not used unless the mode has NaNs. */
2988 compcode
&= ~COMPCODE_UNORD
;
2989 if (compcode
== COMPCODE_LTGT
)
2990 compcode
= COMPCODE_NE
;
2991 else if (compcode
== COMPCODE_ORD
)
2992 compcode
= COMPCODE_TRUE
;
2994 else if (flag_trapping_math
)
2996 /* Check that the original operation and the optimized ones will trap
2997 under the same condition. */
2998 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2999 && (lcompcode
!= COMPCODE_EQ
)
3000 && (lcompcode
!= COMPCODE_ORD
);
3001 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
3002 && (rcompcode
!= COMPCODE_EQ
)
3003 && (rcompcode
!= COMPCODE_ORD
);
3004 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
3005 && (compcode
!= COMPCODE_EQ
)
3006 && (compcode
!= COMPCODE_ORD
);
3008 /* In a short-circuited boolean expression the LHS might be
3009 such that the RHS, if evaluated, will never trap. For
3010 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3011 if neither x nor y is NaN. (This is a mixed blessing: for
3012 example, the expression above will never trap, hence
3013 optimizing it to x < y would be invalid). */
3014 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
3015 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
3018 /* If the comparison was short-circuited, and only the RHS
3019 trapped, we may now generate a spurious trap. */
3021 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3024 /* If we changed the conditions that cause a trap, we lose. */
3025 if ((ltrap
|| rtrap
) != trap
)
3029 if (compcode
== COMPCODE_TRUE
)
3030 return constant_boolean_node (true, truth_type
);
3031 else if (compcode
== COMPCODE_FALSE
)
3032 return constant_boolean_node (false, truth_type
);
3035 enum tree_code tcode
;
3037 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
3038 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
3042 /* Return nonzero if two operands (typically of the same tree node)
3043 are necessarily equal. FLAGS modifies behavior as follows:
3045 If OEP_ONLY_CONST is set, only return nonzero for constants.
3046 This function tests whether the operands are indistinguishable;
3047 it does not test whether they are equal using C's == operation.
3048 The distinction is important for IEEE floating point, because
3049 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3050 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3052 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3053 even though it may hold multiple values during a function.
3054 This is because a GCC tree node guarantees that nothing else is
3055 executed between the evaluation of its "operands" (which may often
3056 be evaluated in arbitrary order). Hence if the operands themselves
3057 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3058 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3059 unset means assuming isochronic (or instantaneous) tree equivalence.
3060 Unless comparing arbitrary expression trees, such as from different
3061 statements, this flag can usually be left unset.
3063 If OEP_PURE_SAME is set, then pure functions with identical arguments
3064 are considered the same. It is used when the caller has other ways
3065 to ensure that global memory is unchanged in between.
3067 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3068 not values of expressions.
3070 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3071 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3073 If OEP_BITWISE is set, then require the values to be bitwise identical
3074 rather than simply numerically equal. Do not take advantage of things
3075 like math-related flags or undefined behavior; only return true for
3076 values that are provably bitwise identical in all circumstances.
3078 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3079 any operand with side effect. This is unnecesarily conservative in the
3080 case we know that arg0 and arg1 are in disjoint code paths (such as in
3081 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3082 addresses with TREE_CONSTANT flag set so we know that &var == &var
3083 even if var is volatile. */
3086 operand_compare::operand_equal_p (const_tree arg0
, const_tree arg1
,
3090 if (verify_hash_value (arg0
, arg1
, flags
, &r
))
3093 STRIP_ANY_LOCATION_WRAPPER (arg0
);
3094 STRIP_ANY_LOCATION_WRAPPER (arg1
);
3096 /* If either is ERROR_MARK, they aren't equal. */
3097 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
3098 || TREE_TYPE (arg0
) == error_mark_node
3099 || TREE_TYPE (arg1
) == error_mark_node
)
3102 /* Similar, if either does not have a type (like a template id),
3103 they aren't equal. */
3104 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
3107 /* Bitwise identity makes no sense if the values have different layouts. */
3108 if ((flags
& OEP_BITWISE
)
3109 && !tree_nop_conversion_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3112 /* We cannot consider pointers to different address space equal. */
3113 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
3114 && POINTER_TYPE_P (TREE_TYPE (arg1
))
3115 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
3116 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
3119 /* Check equality of integer constants before bailing out due to
3120 precision differences. */
3121 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
3123 /* Address of INTEGER_CST is not defined; check that we did not forget
3124 to drop the OEP_ADDRESS_OF flags. */
3125 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3126 return tree_int_cst_equal (arg0
, arg1
);
3129 if (!(flags
& OEP_ADDRESS_OF
))
3131 /* If both types don't have the same signedness, then we can't consider
3132 them equal. We must check this before the STRIP_NOPS calls
3133 because they may change the signedness of the arguments. As pointers
3134 strictly don't have a signedness, require either two pointers or
3135 two non-pointers as well. */
3136 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3137 || POINTER_TYPE_P (TREE_TYPE (arg0
))
3138 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3141 /* If both types don't have the same precision, then it is not safe
3143 if (element_precision (TREE_TYPE (arg0
))
3144 != element_precision (TREE_TYPE (arg1
)))
3151 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3152 sanity check once the issue is solved. */
3154 /* Addresses of conversions and SSA_NAMEs (and many other things)
3155 are not defined. Check that we did not forget to drop the
3156 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3157 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
3158 && TREE_CODE (arg0
) != SSA_NAME
);
3161 /* In case both args are comparisons but with different comparison
3162 code, try to swap the comparison operands of one arg to produce
3163 a match and compare that variant. */
3164 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3165 && COMPARISON_CLASS_P (arg0
)
3166 && COMPARISON_CLASS_P (arg1
))
3168 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3170 if (TREE_CODE (arg0
) == swap_code
)
3171 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3172 TREE_OPERAND (arg1
, 1), flags
)
3173 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3174 TREE_OPERAND (arg1
, 0), flags
);
3177 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
3179 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3180 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
3182 else if (flags
& OEP_ADDRESS_OF
)
3184 /* If we are interested in comparing addresses ignore
3185 MEM_REF wrappings of the base that can appear just for
3187 if (TREE_CODE (arg0
) == MEM_REF
3189 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
3190 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
3191 && integer_zerop (TREE_OPERAND (arg0
, 1)))
3193 else if (TREE_CODE (arg1
) == MEM_REF
3195 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
3196 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
3197 && integer_zerop (TREE_OPERAND (arg1
, 1)))
3205 /* When not checking adddresses, this is needed for conversions and for
3206 COMPONENT_REF. Might as well play it safe and always test this. */
3207 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3208 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3209 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
3210 && !(flags
& OEP_ADDRESS_OF
)))
3213 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3214 We don't care about side effects in that case because the SAVE_EXPR
3215 takes care of that for us. In all other cases, two expressions are
3216 equal if they have no side effects. If we have two identical
3217 expressions with side effects that should be treated the same due
3218 to the only side effects being identical SAVE_EXPR's, that will
3219 be detected in the recursive calls below.
3220 If we are taking an invariant address of two identical objects
3221 they are necessarily equal as well. */
3222 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3223 && (TREE_CODE (arg0
) == SAVE_EXPR
3224 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
3225 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3228 /* Next handle constant cases, those for which we can return 1 even
3229 if ONLY_CONST is set. */
3230 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3231 switch (TREE_CODE (arg0
))
3234 return tree_int_cst_equal (arg0
, arg1
);
3237 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3238 TREE_FIXED_CST (arg1
));
3241 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
3244 if (!(flags
& OEP_BITWISE
) && !HONOR_SIGNED_ZEROS (arg0
))
3246 /* If we do not distinguish between signed and unsigned zero,
3247 consider them equal. */
3248 if (real_zerop (arg0
) && real_zerop (arg1
))
3255 if (VECTOR_CST_LOG2_NPATTERNS (arg0
)
3256 != VECTOR_CST_LOG2_NPATTERNS (arg1
))
3259 if (VECTOR_CST_NELTS_PER_PATTERN (arg0
)
3260 != VECTOR_CST_NELTS_PER_PATTERN (arg1
))
3263 unsigned int count
= vector_cst_encoded_nelts (arg0
);
3264 for (unsigned int i
= 0; i
< count
; ++i
)
3265 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0
, i
),
3266 VECTOR_CST_ENCODED_ELT (arg1
, i
), flags
))
3272 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3274 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3278 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3279 && ! memcmp (TREE_STRING_POINTER (arg0
),
3280 TREE_STRING_POINTER (arg1
),
3281 TREE_STRING_LENGTH (arg0
)));
3284 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3285 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3286 flags
| OEP_ADDRESS_OF
3287 | OEP_MATCH_SIDE_EFFECTS
);
3289 /* In GIMPLE empty constructors are allowed in initializers of
3291 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
3296 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3297 two instances of undefined behavior will give identical results. */
3298 if (flags
& (OEP_ONLY_CONST
| OEP_BITWISE
))
3301 /* Define macros to test an operand from arg0 and arg1 for equality and a
3302 variant that allows null and views null as being different from any
3303 non-null value. In the latter case, if either is null, the both
3304 must be; otherwise, do the normal comparison. */
3305 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3306 TREE_OPERAND (arg1, N), flags)
3308 #define OP_SAME_WITH_NULL(N) \
3309 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3310 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3312 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3315 /* Two conversions are equal only if signedness and modes match. */
3316 switch (TREE_CODE (arg0
))
3319 case FIX_TRUNC_EXPR
:
3320 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3321 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3331 case tcc_comparison
:
3333 if (OP_SAME (0) && OP_SAME (1))
3336 /* For commutative ops, allow the other order. */
3337 return (commutative_tree_code (TREE_CODE (arg0
))
3338 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3339 TREE_OPERAND (arg1
, 1), flags
)
3340 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3341 TREE_OPERAND (arg1
, 0), flags
));
3344 /* If either of the pointer (or reference) expressions we are
3345 dereferencing contain a side effect, these cannot be equal,
3346 but their addresses can be. */
3347 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3348 && (TREE_SIDE_EFFECTS (arg0
)
3349 || TREE_SIDE_EFFECTS (arg1
)))
3352 switch (TREE_CODE (arg0
))
3355 if (!(flags
& OEP_ADDRESS_OF
))
3357 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3358 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3360 /* Verify that the access types are compatible. */
3361 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0
))
3362 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)))
3365 flags
&= ~OEP_ADDRESS_OF
;
3369 /* Require the same offset. */
3370 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3371 TYPE_SIZE (TREE_TYPE (arg1
)),
3372 flags
& ~OEP_ADDRESS_OF
))
3377 case VIEW_CONVERT_EXPR
:
3380 case TARGET_MEM_REF
:
3382 if (!(flags
& OEP_ADDRESS_OF
))
3384 /* Require equal access sizes */
3385 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3386 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3387 || !TYPE_SIZE (TREE_TYPE (arg1
))
3388 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3389 TYPE_SIZE (TREE_TYPE (arg1
)),
3392 /* Verify that access happens in similar types. */
3393 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3395 /* Verify that accesses are TBAA compatible. */
3396 if (!alias_ptr_types_compatible_p
3397 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3398 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3399 || (MR_DEPENDENCE_CLIQUE (arg0
)
3400 != MR_DEPENDENCE_CLIQUE (arg1
))
3401 || (MR_DEPENDENCE_BASE (arg0
)
3402 != MR_DEPENDENCE_BASE (arg1
)))
3404 /* Verify that alignment is compatible. */
3405 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3406 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3409 flags
&= ~OEP_ADDRESS_OF
;
3410 return (OP_SAME (0) && OP_SAME (1)
3411 /* TARGET_MEM_REF require equal extra operands. */
3412 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3413 || (OP_SAME_WITH_NULL (2)
3414 && OP_SAME_WITH_NULL (3)
3415 && OP_SAME_WITH_NULL (4))));
3418 case ARRAY_RANGE_REF
:
3421 flags
&= ~OEP_ADDRESS_OF
;
3422 /* Compare the array index by value if it is constant first as we
3423 may have different types but same value here. */
3424 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3425 TREE_OPERAND (arg1
, 1))
3427 && OP_SAME_WITH_NULL (2)
3428 && OP_SAME_WITH_NULL (3)
3429 /* Compare low bound and element size as with OEP_ADDRESS_OF
3430 we have to account for the offset of the ref. */
3431 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3432 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3433 || (operand_equal_p (array_ref_low_bound
3434 (CONST_CAST_TREE (arg0
)),
3436 (CONST_CAST_TREE (arg1
)), flags
)
3437 && operand_equal_p (array_ref_element_size
3438 (CONST_CAST_TREE (arg0
)),
3439 array_ref_element_size
3440 (CONST_CAST_TREE (arg1
)),
3444 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3445 may be NULL when we're called to compare MEM_EXPRs. */
3446 if (!OP_SAME_WITH_NULL (0))
3449 bool compare_address
= flags
& OEP_ADDRESS_OF
;
3451 /* Most of time we only need to compare FIELD_DECLs for equality.
3452 However when determining address look into actual offsets.
3453 These may match for unions and unshared record types. */
3454 flags
&= ~OEP_ADDRESS_OF
;
3458 && (flags
& OEP_ADDRESS_OF_SAME_FIELD
) == 0)
3460 tree field0
= TREE_OPERAND (arg0
, 1);
3461 tree field1
= TREE_OPERAND (arg1
, 1);
3463 /* Non-FIELD_DECL operands can appear in C++ templates. */
3464 if (TREE_CODE (field0
) != FIELD_DECL
3465 || TREE_CODE (field1
) != FIELD_DECL
3466 || !operand_equal_p (DECL_FIELD_OFFSET (field0
),
3467 DECL_FIELD_OFFSET (field1
), flags
)
3468 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0
),
3469 DECL_FIELD_BIT_OFFSET (field1
),
3477 return OP_SAME_WITH_NULL (2);
3482 flags
&= ~OEP_ADDRESS_OF
;
3483 return OP_SAME (1) && OP_SAME (2);
3489 case tcc_expression
:
3490 switch (TREE_CODE (arg0
))
3493 /* Be sure we pass right ADDRESS_OF flag. */
3494 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3495 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3496 TREE_OPERAND (arg1
, 0),
3497 flags
| OEP_ADDRESS_OF
);
3499 case TRUTH_NOT_EXPR
:
3502 case TRUTH_ANDIF_EXPR
:
3503 case TRUTH_ORIF_EXPR
:
3504 return OP_SAME (0) && OP_SAME (1);
3506 case WIDEN_MULT_PLUS_EXPR
:
3507 case WIDEN_MULT_MINUS_EXPR
:
3510 /* The multiplcation operands are commutative. */
3513 case TRUTH_AND_EXPR
:
3515 case TRUTH_XOR_EXPR
:
3516 if (OP_SAME (0) && OP_SAME (1))
3519 /* Otherwise take into account this is a commutative operation. */
3520 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3521 TREE_OPERAND (arg1
, 1), flags
)
3522 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3523 TREE_OPERAND (arg1
, 0), flags
));
3526 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3528 flags
&= ~OEP_ADDRESS_OF
;
3531 case BIT_INSERT_EXPR
:
3532 /* BIT_INSERT_EXPR has an implict operand as the type precision
3533 of op1. Need to check to make sure they are the same. */
3534 if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3535 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3536 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
3537 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
3543 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3548 case PREDECREMENT_EXPR
:
3549 case PREINCREMENT_EXPR
:
3550 case POSTDECREMENT_EXPR
:
3551 case POSTINCREMENT_EXPR
:
3552 if (flags
& OEP_LEXICOGRAPHIC
)
3553 return OP_SAME (0) && OP_SAME (1);
3556 case CLEANUP_POINT_EXPR
:
3559 if (flags
& OEP_LEXICOGRAPHIC
)
3564 /* Virtual table reference. */
3565 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0
),
3566 OBJ_TYPE_REF_EXPR (arg1
), flags
))
3568 flags
&= ~OEP_ADDRESS_OF
;
3569 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0
))
3570 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1
)))
3572 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0
),
3573 OBJ_TYPE_REF_OBJECT (arg1
), flags
))
3575 if (virtual_method_call_p (arg0
))
3577 if (!virtual_method_call_p (arg1
))
3579 return types_same_for_odr (obj_type_ref_class (arg0
),
3580 obj_type_ref_class (arg1
));
3589 switch (TREE_CODE (arg0
))
3592 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3593 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3594 /* If not both CALL_EXPRs are either internal or normal function
3595 functions, then they are not equal. */
3597 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3599 /* If the CALL_EXPRs call different internal functions, then they
3601 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3606 /* If the CALL_EXPRs call different functions, then they are not
3608 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3613 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3615 unsigned int cef
= call_expr_flags (arg0
);
3616 if (flags
& OEP_PURE_SAME
)
3617 cef
&= ECF_CONST
| ECF_PURE
;
3620 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3624 /* Now see if all the arguments are the same. */
3626 const_call_expr_arg_iterator iter0
, iter1
;
3628 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3629 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3631 a0
= next_const_call_expr_arg (&iter0
),
3632 a1
= next_const_call_expr_arg (&iter1
))
3633 if (! operand_equal_p (a0
, a1
, flags
))
3636 /* If we get here and both argument lists are exhausted
3637 then the CALL_EXPRs are equal. */
3638 return ! (a0
|| a1
);
3644 case tcc_declaration
:
3645 /* Consider __builtin_sqrt equal to sqrt. */
3646 if (TREE_CODE (arg0
) == FUNCTION_DECL
)
3647 return (fndecl_built_in_p (arg0
) && fndecl_built_in_p (arg1
)
3648 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3649 && (DECL_UNCHECKED_FUNCTION_CODE (arg0
)
3650 == DECL_UNCHECKED_FUNCTION_CODE (arg1
)));
3653 && (flags
& OEP_DECL_NAME
)
3654 && (flags
& OEP_LEXICOGRAPHIC
))
3656 /* Consider decls with the same name equal. The caller needs
3657 to make sure they refer to the same entity (such as a function
3658 formal parameter). */
3659 tree a0name
= DECL_NAME (arg0
);
3660 tree a1name
= DECL_NAME (arg1
);
3661 const char *a0ns
= a0name
? IDENTIFIER_POINTER (a0name
) : NULL
;
3662 const char *a1ns
= a1name
? IDENTIFIER_POINTER (a1name
) : NULL
;
3663 return a0ns
&& a1ns
&& strcmp (a0ns
, a1ns
) == 0;
3667 case tcc_exceptional
:
3668 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3670 if (CONSTRUCTOR_NO_CLEARING (arg0
) != CONSTRUCTOR_NO_CLEARING (arg1
))
3673 /* In GIMPLE constructors are used only to build vectors from
3674 elements. Individual elements in the constructor must be
3675 indexed in increasing order and form an initial sequence.
3677 We make no effort to compare constructors in generic.
3678 (see sem_variable::equals in ipa-icf which can do so for
3680 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3681 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3684 /* Be sure that vectors constructed have the same representation.
3685 We only tested element precision and modes to match.
3686 Vectors may be BLKmode and thus also check that the number of
3688 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)),
3689 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
))))
3692 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3693 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3694 unsigned int len
= vec_safe_length (v0
);
3696 if (len
!= vec_safe_length (v1
))
3699 for (unsigned int i
= 0; i
< len
; i
++)
3701 constructor_elt
*c0
= &(*v0
)[i
];
3702 constructor_elt
*c1
= &(*v1
)[i
];
3704 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3705 /* In GIMPLE the indexes can be either NULL or matching i.
3706 Double check this so we won't get false
3707 positives for GENERIC. */
3709 && (TREE_CODE (c0
->index
) != INTEGER_CST
3710 || compare_tree_int (c0
->index
, i
)))
3712 && (TREE_CODE (c1
->index
) != INTEGER_CST
3713 || compare_tree_int (c1
->index
, i
))))
3718 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3719 && (flags
& OEP_LEXICOGRAPHIC
))
3721 /* Compare the STATEMENT_LISTs. */
3722 tree_stmt_iterator tsi1
, tsi2
;
3723 tree body1
= CONST_CAST_TREE (arg0
);
3724 tree body2
= CONST_CAST_TREE (arg1
);
3725 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3726 tsi_next (&tsi1
), tsi_next (&tsi2
))
3728 /* The lists don't have the same number of statements. */
3729 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3731 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3733 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3734 flags
& (OEP_LEXICOGRAPHIC
3735 | OEP_NO_HASH_CHECK
)))
3742 switch (TREE_CODE (arg0
))
3745 if (flags
& OEP_LEXICOGRAPHIC
)
3746 return OP_SAME_WITH_NULL (0);
3748 case DEBUG_BEGIN_STMT
:
3749 if (flags
& OEP_LEXICOGRAPHIC
)
3761 #undef OP_SAME_WITH_NULL
3764 /* Generate a hash value for an expression. This can be used iteratively
3765 by passing a previous result as the HSTATE argument. */
3768 operand_compare::hash_operand (const_tree t
, inchash::hash
&hstate
,
3772 enum tree_code code
;
3773 enum tree_code_class tclass
;
3775 if (t
== NULL_TREE
|| t
== error_mark_node
)
3777 hstate
.merge_hash (0);
3781 STRIP_ANY_LOCATION_WRAPPER (t
);
3783 if (!(flags
& OEP_ADDRESS_OF
))
3786 code
= TREE_CODE (t
);
3790 /* Alas, constants aren't shared, so we can't rely on pointer
3793 hstate
.merge_hash (0);
3796 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3797 for (i
= 0; i
< TREE_INT_CST_EXT_NUNITS (t
); i
++)
3798 hstate
.add_hwi (TREE_INT_CST_ELT (t
, i
));
3803 if (!HONOR_SIGNED_ZEROS (t
) && real_zerop (t
))
3806 val2
= real_hash (TREE_REAL_CST_PTR (t
));
3807 hstate
.merge_hash (val2
);
3812 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
3813 hstate
.merge_hash (val2
);
3817 hstate
.add ((const void *) TREE_STRING_POINTER (t
),
3818 TREE_STRING_LENGTH (t
));
3821 hash_operand (TREE_REALPART (t
), hstate
, flags
);
3822 hash_operand (TREE_IMAGPART (t
), hstate
, flags
);
3826 hstate
.add_int (VECTOR_CST_NPATTERNS (t
));
3827 hstate
.add_int (VECTOR_CST_NELTS_PER_PATTERN (t
));
3828 unsigned int count
= vector_cst_encoded_nelts (t
);
3829 for (unsigned int i
= 0; i
< count
; ++i
)
3830 hash_operand (VECTOR_CST_ENCODED_ELT (t
, i
), hstate
, flags
);
3834 /* We can just compare by pointer. */
3835 hstate
.add_hwi (SSA_NAME_VERSION (t
));
3837 case PLACEHOLDER_EXPR
:
3838 /* The node itself doesn't matter. */
3845 /* A list of expressions, for a CALL_EXPR or as the elements of a
3847 for (; t
; t
= TREE_CHAIN (t
))
3848 hash_operand (TREE_VALUE (t
), hstate
, flags
);
3852 unsigned HOST_WIDE_INT idx
;
3854 flags
&= ~OEP_ADDRESS_OF
;
3855 hstate
.add_int (CONSTRUCTOR_NO_CLEARING (t
));
3856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
3858 /* In GIMPLE the indexes can be either NULL or matching i. */
3859 if (field
== NULL_TREE
)
3860 field
= bitsize_int (idx
);
3861 hash_operand (field
, hstate
, flags
);
3862 hash_operand (value
, hstate
, flags
);
3866 case STATEMENT_LIST
:
3868 tree_stmt_iterator i
;
3869 for (i
= tsi_start (CONST_CAST_TREE (t
));
3870 !tsi_end_p (i
); tsi_next (&i
))
3871 hash_operand (tsi_stmt (i
), hstate
, flags
);
3875 for (i
= 0; i
< TREE_VEC_LENGTH (t
); ++i
)
3876 hash_operand (TREE_VEC_ELT (t
, i
), hstate
, flags
);
3878 case IDENTIFIER_NODE
:
3879 hstate
.add_object (IDENTIFIER_HASH_VALUE (t
));
3882 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3883 Otherwise nodes that compare equal according to operand_equal_p might
3884 get different hash codes. However, don't do this for machine specific
3885 or front end builtins, since the function code is overloaded in those
3887 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
3888 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
3890 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
3891 code
= TREE_CODE (t
);
3895 if (POLY_INT_CST_P (t
))
3897 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3898 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
3901 tclass
= TREE_CODE_CLASS (code
);
3903 if (tclass
== tcc_declaration
)
3905 /* DECL's have a unique ID */
3906 hstate
.add_hwi (DECL_UID (t
));
3908 else if (tclass
== tcc_comparison
&& !commutative_tree_code (code
))
3910 /* For comparisons that can be swapped, use the lower
3912 enum tree_code ccode
= swap_tree_comparison (code
);
3915 hstate
.add_object (ccode
);
3916 hash_operand (TREE_OPERAND (t
, ccode
!= code
), hstate
, flags
);
3917 hash_operand (TREE_OPERAND (t
, ccode
== code
), hstate
, flags
);
3919 else if (CONVERT_EXPR_CODE_P (code
))
3921 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3923 enum tree_code ccode
= NOP_EXPR
;
3924 hstate
.add_object (ccode
);
3926 /* Don't hash the type, that can lead to having nodes which
3927 compare equal according to operand_equal_p, but which
3928 have different hash codes. Make sure to include signedness
3929 in the hash computation. */
3930 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3931 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3933 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3934 else if (code
== MEM_REF
3935 && (flags
& OEP_ADDRESS_OF
) != 0
3936 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
3937 && DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
3938 && integer_zerop (TREE_OPERAND (t
, 1)))
3939 hash_operand (TREE_OPERAND (TREE_OPERAND (t
, 0), 0),
3941 /* Don't ICE on FE specific trees, or their arguments etc.
3942 during operand_equal_p hash verification. */
3943 else if (!IS_EXPR_CODE_CLASS (tclass
))
3944 gcc_assert (flags
& OEP_HASH_CHECK
);
3947 unsigned int sflags
= flags
;
3949 hstate
.add_object (code
);
3954 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3955 flags
|= OEP_ADDRESS_OF
;
3961 case TARGET_MEM_REF
:
3962 flags
&= ~OEP_ADDRESS_OF
;
3967 if (sflags
& OEP_ADDRESS_OF
)
3969 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3970 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t
, 1)),
3971 hstate
, flags
& ~OEP_ADDRESS_OF
);
3972 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t
, 1)),
3973 hstate
, flags
& ~OEP_ADDRESS_OF
);
3978 case ARRAY_RANGE_REF
:
3980 sflags
&= ~OEP_ADDRESS_OF
;
3984 flags
&= ~OEP_ADDRESS_OF
;
3987 case WIDEN_MULT_PLUS_EXPR
:
3988 case WIDEN_MULT_MINUS_EXPR
:
3990 /* The multiplication operands are commutative. */
3991 inchash::hash one
, two
;
3992 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3993 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3994 hstate
.add_commutative (one
, two
);
3995 hash_operand (TREE_OPERAND (t
, 2), two
, flags
);
4000 if (CALL_EXPR_FN (t
) == NULL_TREE
)
4001 hstate
.add_int (CALL_EXPR_IFN (t
));
4005 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4006 Usually different TARGET_EXPRs just should use
4007 different temporaries in their slots. */
4008 hash_operand (TARGET_EXPR_SLOT (t
), hstate
, flags
);
4012 /* Virtual table reference. */
4013 inchash::add_expr (OBJ_TYPE_REF_EXPR (t
), hstate
, flags
);
4014 flags
&= ~OEP_ADDRESS_OF
;
4015 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t
), hstate
, flags
);
4016 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t
), hstate
, flags
);
4017 if (!virtual_method_call_p (t
))
4019 if (tree c
= obj_type_ref_class (t
))
4021 c
= TYPE_NAME (TYPE_MAIN_VARIANT (c
));
4022 /* We compute mangled names only when free_lang_data is run.
4023 In that case we can hash precisely. */
4024 if (TREE_CODE (c
) == TYPE_DECL
4025 && DECL_ASSEMBLER_NAME_SET_P (c
))
4027 (IDENTIFIER_HASH_VALUE
4028 (DECL_ASSEMBLER_NAME (c
)));
4035 /* Don't hash the type, that can lead to having nodes which
4036 compare equal according to operand_equal_p, but which
4037 have different hash codes. */
4038 if (code
== NON_LVALUE_EXPR
)
4040 /* Make sure to include signness in the hash computation. */
4041 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
4042 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
4045 else if (commutative_tree_code (code
))
4047 /* It's a commutative expression. We want to hash it the same
4048 however it appears. We do this by first hashing both operands
4049 and then rehashing based on the order of their independent
4051 inchash::hash one
, two
;
4052 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
4053 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
4054 hstate
.add_commutative (one
, two
);
4057 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
4058 hash_operand (TREE_OPERAND (t
, i
), hstate
,
4059 i
== 0 ? flags
: sflags
);
4066 operand_compare::verify_hash_value (const_tree arg0
, const_tree arg1
,
4067 unsigned int flags
, bool *ret
)
4069 /* When checking and unless comparing DECL names, verify that if
4070 the outermost operand_equal_p call returns non-zero then ARG0
4071 and ARG1 have the same hash value. */
4072 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
4074 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
4076 if (arg0
!= arg1
&& !(flags
& OEP_DECL_NAME
))
4078 inchash::hash
hstate0 (0), hstate1 (0);
4079 hash_operand (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
4080 hash_operand (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
4081 hashval_t h0
= hstate0
.end ();
4082 hashval_t h1
= hstate1
.end ();
4083 gcc_assert (h0
== h1
);
4097 static operand_compare default_compare_instance
;
4099 /* Conveinece wrapper around operand_compare class because usually we do
4100 not need to play with the valueizer. */
4103 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
4105 return default_compare_instance
.operand_equal_p (arg0
, arg1
, flags
);
4111 /* Generate a hash value for an expression. This can be used iteratively
4112 by passing a previous result as the HSTATE argument.
4114 This function is intended to produce the same hash for expressions which
4115 would compare equal using operand_equal_p. */
4117 add_expr (const_tree t
, inchash::hash
&hstate
, unsigned int flags
)
4119 default_compare_instance
.hash_operand (t
, hstate
, flags
);
4124 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4125 with a different signedness or a narrower precision. */
4128 operand_equal_for_comparison_p (tree arg0
, tree arg1
)
4130 if (operand_equal_p (arg0
, arg1
, 0))
4133 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
4134 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
4137 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4138 and see if the inner values are the same. This removes any
4139 signedness comparison, which doesn't matter here. */
4144 if (operand_equal_p (op0
, op1
, 0))
4147 /* Discard a single widening conversion from ARG1 and see if the inner
4148 value is the same as ARG0. */
4149 if (CONVERT_EXPR_P (arg1
)
4150 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
4151 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
4152 < TYPE_PRECISION (TREE_TYPE (arg1
))
4153 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
4159 /* See if ARG is an expression that is either a comparison or is performing
4160 arithmetic on comparisons. The comparisons must only be comparing
4161 two different values, which will be stored in *CVAL1 and *CVAL2; if
4162 they are nonzero it means that some operands have already been found.
4163 No variables may be used anywhere else in the expression except in the
4166 If this is true, return 1. Otherwise, return zero. */
4169 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
)
4171 enum tree_code code
= TREE_CODE (arg
);
4172 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4174 /* We can handle some of the tcc_expression cases here. */
4175 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4177 else if (tclass
== tcc_expression
4178 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
4179 || code
== COMPOUND_EXPR
))
4180 tclass
= tcc_binary
;
4185 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
);
4188 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4189 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
));
4194 case tcc_expression
:
4195 if (code
== COND_EXPR
)
4196 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4197 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
)
4198 && twoval_comparison_p (TREE_OPERAND (arg
, 2), cval1
, cval2
));
4201 case tcc_comparison
:
4202 /* First see if we can handle the first operand, then the second. For
4203 the second operand, we know *CVAL1 can't be zero. It must be that
4204 one side of the comparison is each of the values; test for the
4205 case where this isn't true by failing if the two operands
4208 if (operand_equal_p (TREE_OPERAND (arg
, 0),
4209 TREE_OPERAND (arg
, 1), 0))
4213 *cval1
= TREE_OPERAND (arg
, 0);
4214 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
4216 else if (*cval2
== 0)
4217 *cval2
= TREE_OPERAND (arg
, 0);
4218 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
4223 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
4225 else if (*cval2
== 0)
4226 *cval2
= TREE_OPERAND (arg
, 1);
4227 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
4239 /* ARG is a tree that is known to contain just arithmetic operations and
4240 comparisons. Evaluate the operations in the tree substituting NEW0 for
4241 any occurrence of OLD0 as an operand of a comparison and likewise for
4245 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
4246 tree old1
, tree new1
)
4248 tree type
= TREE_TYPE (arg
);
4249 enum tree_code code
= TREE_CODE (arg
);
4250 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4252 /* We can handle some of the tcc_expression cases here. */
4253 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4255 else if (tclass
== tcc_expression
4256 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
4257 tclass
= tcc_binary
;
4262 return fold_build1_loc (loc
, code
, type
,
4263 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4264 old0
, new0
, old1
, new1
));
4267 return fold_build2_loc (loc
, code
, type
,
4268 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4269 old0
, new0
, old1
, new1
),
4270 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4271 old0
, new0
, old1
, new1
));
4273 case tcc_expression
:
4277 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
4281 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
4285 return fold_build3_loc (loc
, code
, type
,
4286 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4287 old0
, new0
, old1
, new1
),
4288 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4289 old0
, new0
, old1
, new1
),
4290 eval_subst (loc
, TREE_OPERAND (arg
, 2),
4291 old0
, new0
, old1
, new1
));
4295 /* Fall through - ??? */
4297 case tcc_comparison
:
4299 tree arg0
= TREE_OPERAND (arg
, 0);
4300 tree arg1
= TREE_OPERAND (arg
, 1);
4302 /* We need to check both for exact equality and tree equality. The
4303 former will be true if the operand has a side-effect. In that
4304 case, we know the operand occurred exactly once. */
4306 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
4308 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
4311 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
4313 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
4316 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
4324 /* Return a tree for the case when the result of an expression is RESULT
4325 converted to TYPE and OMITTED was previously an operand of the expression
4326 but is now not needed (e.g., we folded OMITTED * 0).
4328 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4329 the conversion of RESULT to TYPE. */
4332 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
4334 tree t
= fold_convert_loc (loc
, type
, result
);
4336 /* If the resulting operand is an empty statement, just return the omitted
4337 statement casted to void. */
4338 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
4339 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
4340 fold_ignored_result (omitted
));
4342 if (TREE_SIDE_EFFECTS (omitted
))
4343 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4344 fold_ignored_result (omitted
), t
);
4346 return non_lvalue_loc (loc
, t
);
4349 /* Return a tree for the case when the result of an expression is RESULT
4350 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4351 of the expression but are now not needed.
4353 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4354 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4355 evaluated before OMITTED2. Otherwise, if neither has side effects,
4356 just do the conversion of RESULT to TYPE. */
4359 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
4360 tree omitted1
, tree omitted2
)
4362 tree t
= fold_convert_loc (loc
, type
, result
);
4364 if (TREE_SIDE_EFFECTS (omitted2
))
4365 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
4366 if (TREE_SIDE_EFFECTS (omitted1
))
4367 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
4369 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
4373 /* Return a simplified tree node for the truth-negation of ARG. This
4374 never alters ARG itself. We assume that ARG is an operation that
4375 returns a truth value (0 or 1).
4377 FIXME: one would think we would fold the result, but it causes
4378 problems with the dominator optimizer. */
4381 fold_truth_not_expr (location_t loc
, tree arg
)
4383 tree type
= TREE_TYPE (arg
);
4384 enum tree_code code
= TREE_CODE (arg
);
4385 location_t loc1
, loc2
;
4387 /* If this is a comparison, we can simply invert it, except for
4388 floating-point non-equality comparisons, in which case we just
4389 enclose a TRUTH_NOT_EXPR around what we have. */
4391 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4393 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
4394 if (FLOAT_TYPE_P (op_type
)
4395 && flag_trapping_math
4396 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
4397 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
4400 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
4401 if (code
== ERROR_MARK
)
4404 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
4405 TREE_OPERAND (arg
, 1));
4406 copy_warning (ret
, arg
);
4413 return constant_boolean_node (integer_zerop (arg
), type
);
4415 case TRUTH_AND_EXPR
:
4416 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4417 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4418 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
4419 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4420 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4423 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4424 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4425 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
4426 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4427 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4429 case TRUTH_XOR_EXPR
:
4430 /* Here we can invert either operand. We invert the first operand
4431 unless the second operand is a TRUTH_NOT_EXPR in which case our
4432 result is the XOR of the first operand with the inside of the
4433 negation of the second operand. */
4435 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
4436 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
4437 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
4439 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
4440 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
4441 TREE_OPERAND (arg
, 1));
4443 case TRUTH_ANDIF_EXPR
:
4444 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4445 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4446 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
4447 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4448 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4450 case TRUTH_ORIF_EXPR
:
4451 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4452 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4453 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
4454 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4455 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4457 case TRUTH_NOT_EXPR
:
4458 return TREE_OPERAND (arg
, 0);
4462 tree arg1
= TREE_OPERAND (arg
, 1);
4463 tree arg2
= TREE_OPERAND (arg
, 2);
4465 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4466 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
4468 /* A COND_EXPR may have a throw as one operand, which
4469 then has void type. Just leave void operands
4471 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
4472 VOID_TYPE_P (TREE_TYPE (arg1
))
4473 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
4474 VOID_TYPE_P (TREE_TYPE (arg2
))
4475 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
4479 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4480 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4481 TREE_OPERAND (arg
, 0),
4482 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
4484 case NON_LVALUE_EXPR
:
4485 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4486 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
4489 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
4490 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4495 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4496 return build1_loc (loc
, TREE_CODE (arg
), type
,
4497 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4500 if (!integer_onep (TREE_OPERAND (arg
, 1)))
4502 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
4505 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4507 case CLEANUP_POINT_EXPR
:
4508 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4509 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
4510 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4517 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4518 assume that ARG is an operation that returns a truth value (0 or 1
4519 for scalars, 0 or -1 for vectors). Return the folded expression if
4520 folding is successful. Otherwise, return NULL_TREE. */
4523 fold_invert_truthvalue (location_t loc
, tree arg
)
4525 tree type
= TREE_TYPE (arg
);
4526 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
4532 /* Return a simplified tree node for the truth-negation of ARG. This
4533 never alters ARG itself. We assume that ARG is an operation that
4534 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4537 invert_truthvalue_loc (location_t loc
, tree arg
)
4539 if (TREE_CODE (arg
) == ERROR_MARK
)
4542 tree type
= TREE_TYPE (arg
);
4543 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
4549 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4550 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4551 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4552 is the original memory reference used to preserve the alias set of
4556 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
4557 HOST_WIDE_INT bitsize
, poly_int64 bitpos
,
4558 int unsignedp
, int reversep
)
4560 tree result
, bftype
;
4562 /* Attempt not to lose the access path if possible. */
4563 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
4565 tree ninner
= TREE_OPERAND (orig_inner
, 0);
4567 poly_int64 nbitsize
, nbitpos
;
4569 int nunsignedp
, nreversep
, nvolatilep
= 0;
4570 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
4571 &noffset
, &nmode
, &nunsignedp
,
4572 &nreversep
, &nvolatilep
);
4574 && noffset
== NULL_TREE
4575 && known_subrange_p (bitpos
, bitsize
, nbitpos
, nbitsize
)
4585 alias_set_type iset
= get_alias_set (orig_inner
);
4586 if (iset
== 0 && get_alias_set (inner
) != iset
)
4587 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
4588 build_fold_addr_expr (inner
),
4589 build_int_cst (ptr_type_node
, 0));
4591 if (known_eq (bitpos
, 0) && !reversep
)
4593 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
4594 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
4595 || POINTER_TYPE_P (TREE_TYPE (inner
)))
4596 && tree_fits_shwi_p (size
)
4597 && tree_to_shwi (size
) == bitsize
)
4598 return fold_convert_loc (loc
, type
, inner
);
4602 if (TYPE_PRECISION (bftype
) != bitsize
4603 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
4604 bftype
= build_nonstandard_integer_type (bitsize
, 0);
4606 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
4607 bitsize_int (bitsize
), bitsize_int (bitpos
));
4608 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
4611 result
= fold_convert_loc (loc
, type
, result
);
4616 /* Optimize a bit-field compare.
4618 There are two cases: First is a compare against a constant and the
4619 second is a comparison of two items where the fields are at the same
4620 bit position relative to the start of a chunk (byte, halfword, word)
4621 large enough to contain it. In these cases we can avoid the shift
4622 implicit in bitfield extractions.
4624 For constants, we emit a compare of the shifted constant with the
4625 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4626 compared. For two fields at the same position, we do the ANDs with the
4627 similar mask and compare the result of the ANDs.
4629 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4630 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4631 are the left and right operands of the comparison, respectively.
4633 If the optimization described above can be done, we return the resulting
4634 tree. Otherwise we return zero. */
4637 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
4638 tree compare_type
, tree lhs
, tree rhs
)
4640 poly_int64 plbitpos
, plbitsize
, rbitpos
, rbitsize
;
4641 HOST_WIDE_INT lbitpos
, lbitsize
, nbitpos
, nbitsize
;
4642 tree type
= TREE_TYPE (lhs
);
4644 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
4645 machine_mode lmode
, rmode
;
4646 scalar_int_mode nmode
;
4647 int lunsignedp
, runsignedp
;
4648 int lreversep
, rreversep
;
4649 int lvolatilep
= 0, rvolatilep
= 0;
4650 tree linner
, rinner
= NULL_TREE
;
4654 /* Get all the information about the extractions being done. If the bit size
4655 is the same as the size of the underlying object, we aren't doing an
4656 extraction at all and so can do nothing. We also don't want to
4657 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4658 then will no longer be able to replace it. */
4659 linner
= get_inner_reference (lhs
, &plbitsize
, &plbitpos
, &offset
, &lmode
,
4660 &lunsignedp
, &lreversep
, &lvolatilep
);
4662 || !known_size_p (plbitsize
)
4663 || !plbitsize
.is_constant (&lbitsize
)
4664 || !plbitpos
.is_constant (&lbitpos
)
4665 || known_eq (lbitsize
, GET_MODE_BITSIZE (lmode
))
4667 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
4672 rreversep
= lreversep
;
4675 /* If this is not a constant, we can only do something if bit positions,
4676 sizes, signedness and storage order are the same. */
4678 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
4679 &runsignedp
, &rreversep
, &rvolatilep
);
4682 || maybe_ne (lbitpos
, rbitpos
)
4683 || maybe_ne (lbitsize
, rbitsize
)
4684 || lunsignedp
!= runsignedp
4685 || lreversep
!= rreversep
4687 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
4692 /* Honor the C++ memory model and mimic what RTL expansion does. */
4693 poly_uint64 bitstart
= 0;
4694 poly_uint64 bitend
= 0;
4695 if (TREE_CODE (lhs
) == COMPONENT_REF
)
4697 get_bit_range (&bitstart
, &bitend
, lhs
, &plbitpos
, &offset
);
4698 if (!plbitpos
.is_constant (&lbitpos
) || offset
!= NULL_TREE
)
4702 /* See if we can find a mode to refer to this field. We should be able to,
4703 but fail if we can't. */
4704 if (!get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
4705 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4706 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4707 TYPE_ALIGN (TREE_TYPE (rinner
))),
4708 BITS_PER_WORD
, false, &nmode
))
4711 /* Set signed and unsigned types of the precision of this mode for the
4713 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4715 /* Compute the bit position and size for the new reference and our offset
4716 within it. If the new reference is the same size as the original, we
4717 won't optimize anything, so return zero. */
4718 nbitsize
= GET_MODE_BITSIZE (nmode
);
4719 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4721 if (nbitsize
== lbitsize
)
4724 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4725 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4727 /* Make the mask to be used against the extracted field. */
4728 mask
= build_int_cst_type (unsigned_type
, -1);
4729 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4730 mask
= const_binop (RSHIFT_EXPR
, mask
,
4731 size_int (nbitsize
- lbitsize
- lbitpos
));
4738 /* If not comparing with constant, just rework the comparison
4740 tree t1
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4741 nbitsize
, nbitpos
, 1, lreversep
);
4742 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t1
, mask
);
4743 tree t2
= make_bit_field_ref (loc
, rinner
, rhs
, unsigned_type
,
4744 nbitsize
, nbitpos
, 1, rreversep
);
4745 t2
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t2
, mask
);
4746 return fold_build2_loc (loc
, code
, compare_type
, t1
, t2
);
4749 /* Otherwise, we are handling the constant case. See if the constant is too
4750 big for the field. Warn and return a tree for 0 (false) if so. We do
4751 this not only for its own sake, but to avoid having to test for this
4752 error case below. If we didn't, we might generate wrong code.
4754 For unsigned fields, the constant shifted right by the field length should
4755 be all zero. For signed fields, the high-order bits should agree with
4760 if (wi::lrshift (wi::to_wide (rhs
), lbitsize
) != 0)
4762 warning (0, "comparison is always %d due to width of bit-field",
4764 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4769 wide_int tem
= wi::arshift (wi::to_wide (rhs
), lbitsize
- 1);
4770 if (tem
!= 0 && tem
!= -1)
4772 warning (0, "comparison is always %d due to width of bit-field",
4774 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4781 /* Single-bit compares should always be against zero. */
4782 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4784 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4785 rhs
= build_int_cst (type
, 0);
4788 /* Make a new bitfield reference, shift the constant over the
4789 appropriate number of bits and mask it with the computed mask
4790 (in case this was a signed field). If we changed it, make a new one. */
4791 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4792 nbitsize
, nbitpos
, 1, lreversep
);
4794 rhs
= const_binop (BIT_AND_EXPR
,
4795 const_binop (LSHIFT_EXPR
,
4796 fold_convert_loc (loc
, unsigned_type
, rhs
),
4797 size_int (lbitpos
)),
4800 lhs
= build2_loc (loc
, code
, compare_type
,
4801 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4805 /* Subroutine for fold_truth_andor_1: decode a field reference.
4807 If EXP is a comparison reference, we return the innermost reference.
4809 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4810 set to the starting bit number.
4812 If the innermost field can be completely contained in a mode-sized
4813 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4815 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4816 otherwise it is not changed.
4818 *PUNSIGNEDP is set to the signedness of the field.
4820 *PREVERSEP is set to the storage order of the field.
4822 *PMASK is set to the mask used. This is either contained in a
4823 BIT_AND_EXPR or derived from the width of the field.
4825 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4827 Return 0 if this is not a component reference or is one that we can't
4828 do anything with. */
4831 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4832 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4833 int *punsignedp
, int *preversep
, int *pvolatilep
,
4834 tree
*pmask
, tree
*pand_mask
)
4837 tree outer_type
= 0;
4839 tree mask
, inner
, offset
;
4841 unsigned int precision
;
4843 /* All the optimizations using this function assume integer fields.
4844 There are problems with FP fields since the type_for_size call
4845 below can fail for, e.g., XFmode. */
4846 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4849 /* We are interested in the bare arrangement of bits, so strip everything
4850 that doesn't affect the machine mode. However, record the type of the
4851 outermost expression if it may matter below. */
4852 if (CONVERT_EXPR_P (exp
)
4853 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4854 outer_type
= TREE_TYPE (exp
);
4857 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4859 and_mask
= TREE_OPERAND (exp
, 1);
4860 exp
= TREE_OPERAND (exp
, 0);
4861 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4862 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4866 poly_int64 poly_bitsize
, poly_bitpos
;
4867 inner
= get_inner_reference (exp
, &poly_bitsize
, &poly_bitpos
, &offset
,
4868 pmode
, punsignedp
, preversep
, pvolatilep
);
4869 if ((inner
== exp
&& and_mask
== 0)
4870 || !poly_bitsize
.is_constant (pbitsize
)
4871 || !poly_bitpos
.is_constant (pbitpos
)
4874 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4875 /* Reject out-of-bound accesses (PR79731). */
4876 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4877 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4878 *pbitpos
+ *pbitsize
) < 0))
4881 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4882 if (unsigned_type
== NULL_TREE
)
4887 /* If the number of bits in the reference is the same as the bitsize of
4888 the outer type, then the outer type gives the signedness. Otherwise
4889 (in case of a small bitfield) the signedness is unchanged. */
4890 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4891 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4893 /* Compute the mask to access the bitfield. */
4894 precision
= TYPE_PRECISION (unsigned_type
);
4896 mask
= build_int_cst_type (unsigned_type
, -1);
4898 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4899 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4901 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4903 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4904 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4907 *pand_mask
= and_mask
;
4911 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4912 bit positions and MASK is SIGNED. */
4915 all_ones_mask_p (const_tree mask
, unsigned int size
)
4917 tree type
= TREE_TYPE (mask
);
4918 unsigned int precision
= TYPE_PRECISION (type
);
4920 /* If this function returns true when the type of the mask is
4921 UNSIGNED, then there will be errors. In particular see
4922 gcc.c-torture/execute/990326-1.c. There does not appear to be
4923 any documentation paper trail as to why this is so. But the pre
4924 wide-int worked with that restriction and it has been preserved
4926 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4929 return wi::mask (size
, false, precision
) == wi::to_wide (mask
);
4932 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4933 represents the sign bit of EXP's type. If EXP represents a sign
4934 or zero extension, also test VAL against the unextended type.
4935 The return value is the (sub)expression whose sign bit is VAL,
4936 or NULL_TREE otherwise. */
4939 sign_bit_p (tree exp
, const_tree val
)
4944 /* Tree EXP must have an integral type. */
4945 t
= TREE_TYPE (exp
);
4946 if (! INTEGRAL_TYPE_P (t
))
4949 /* Tree VAL must be an integer constant. */
4950 if (TREE_CODE (val
) != INTEGER_CST
4951 || TREE_OVERFLOW (val
))
4954 width
= TYPE_PRECISION (t
);
4955 if (wi::only_sign_bit_p (wi::to_wide (val
), width
))
4958 /* Handle extension from a narrower type. */
4959 if (TREE_CODE (exp
) == NOP_EXPR
4960 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4961 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4966 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4967 operand is simple enough to be evaluated unconditionally. */
4970 simple_operand_p (const_tree exp
)
4972 /* Strip any conversions that don't change the machine mode. */
4975 return (CONSTANT_CLASS_P (exp
)
4976 || TREE_CODE (exp
) == SSA_NAME
4978 && ! TREE_ADDRESSABLE (exp
)
4979 && ! TREE_THIS_VOLATILE (exp
)
4980 && ! DECL_NONLOCAL (exp
)
4981 /* Don't regard global variables as simple. They may be
4982 allocated in ways unknown to the compiler (shared memory,
4983 #pragma weak, etc). */
4984 && ! TREE_PUBLIC (exp
)
4985 && ! DECL_EXTERNAL (exp
)
4986 /* Weakrefs are not safe to be read, since they can be NULL.
4987 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4988 have DECL_WEAK flag set. */
4989 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4990 /* Loading a static variable is unduly expensive, but global
4991 registers aren't expensive. */
4992 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4995 /* Determine if an operand is simple enough to be evaluated unconditionally.
4996 In addition to simple_operand_p, we assume that comparisons, conversions,
4997 and logic-not operations are simple, if their operands are simple, too. */
5000 simple_condition_p (tree exp
)
5002 enum tree_code code
;
5004 if (TREE_SIDE_EFFECTS (exp
) || generic_expr_could_trap_p (exp
))
5007 while (CONVERT_EXPR_P (exp
))
5008 exp
= TREE_OPERAND (exp
, 0);
5010 code
= TREE_CODE (exp
);
5012 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
5013 return (simple_operand_p (TREE_OPERAND (exp
, 0))
5014 && simple_operand_p (TREE_OPERAND (exp
, 1)));
5016 if (code
== TRUTH_NOT_EXPR
)
5017 return simple_condition_p (TREE_OPERAND (exp
, 0));
5019 return simple_operand_p (exp
);
5023 /* The following functions are subroutines to fold_range_test and allow it to
5024 try to change a logical combination of comparisons into a range test.
5027 X == 2 || X == 3 || X == 4 || X == 5
5031 (unsigned) (X - 2) <= 3
5033 We describe each set of comparisons as being either inside or outside
5034 a range, using a variable named like IN_P, and then describe the
5035 range with a lower and upper bound. If one of the bounds is omitted,
5036 it represents either the highest or lowest value of the type.
5038 In the comments below, we represent a range by two numbers in brackets
5039 preceded by a "+" to designate being inside that range, or a "-" to
5040 designate being outside that range, so the condition can be inverted by
5041 flipping the prefix. An omitted bound is represented by a "-". For
5042 example, "- [-, 10]" means being outside the range starting at the lowest
5043 possible value and ending at 10, in other words, being greater than 10.
5044 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5047 We set up things so that the missing bounds are handled in a consistent
5048 manner so neither a missing bound nor "true" and "false" need to be
5049 handled using a special case. */
5051 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5052 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5053 and UPPER1_P are nonzero if the respective argument is an upper bound
5054 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5055 must be specified for a comparison. ARG1 will be converted to ARG0's
5056 type if both are specified. */
5059 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
5060 tree arg1
, int upper1_p
)
5066 /* If neither arg represents infinity, do the normal operation.
5067 Else, if not a comparison, return infinity. Else handle the special
5068 comparison rules. Note that most of the cases below won't occur, but
5069 are handled for consistency. */
5071 if (arg0
!= 0 && arg1
!= 0)
5073 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
5074 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
5076 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
5079 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5082 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5083 for neither. In real maths, we cannot assume open ended ranges are
5084 the same. But, this is computer arithmetic, where numbers are finite.
5085 We can therefore make the transformation of any unbounded range with
5086 the value Z, Z being greater than any representable number. This permits
5087 us to treat unbounded ranges as equal. */
5088 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
5089 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
5093 result
= sgn0
== sgn1
;
5096 result
= sgn0
!= sgn1
;
5099 result
= sgn0
< sgn1
;
5102 result
= sgn0
<= sgn1
;
5105 result
= sgn0
> sgn1
;
5108 result
= sgn0
>= sgn1
;
5114 return constant_boolean_node (result
, type
);
5117 /* Helper routine for make_range. Perform one step for it, return
5118 new expression if the loop should continue or NULL_TREE if it should
5122 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
5123 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
5124 bool *strict_overflow_p
)
5126 tree arg0_type
= TREE_TYPE (arg0
);
5127 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
5128 int in_p
= *p_in_p
, n_in_p
;
5132 case TRUTH_NOT_EXPR
:
5133 /* We can only do something if the range is testing for zero. */
5134 if (low
== NULL_TREE
|| high
== NULL_TREE
5135 || ! integer_zerop (low
) || ! integer_zerop (high
))
5140 case EQ_EXPR
: case NE_EXPR
:
5141 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
5142 /* We can only do something if the range is testing for zero
5143 and if the second operand is an integer constant. Note that
5144 saying something is "in" the range we make is done by
5145 complementing IN_P since it will set in the initial case of
5146 being not equal to zero; "out" is leaving it alone. */
5147 if (low
== NULL_TREE
|| high
== NULL_TREE
5148 || ! integer_zerop (low
) || ! integer_zerop (high
)
5149 || TREE_CODE (arg1
) != INTEGER_CST
)
5154 case NE_EXPR
: /* - [c, c] */
5157 case EQ_EXPR
: /* + [c, c] */
5158 in_p
= ! in_p
, low
= high
= arg1
;
5160 case GT_EXPR
: /* - [-, c] */
5161 low
= 0, high
= arg1
;
5163 case GE_EXPR
: /* + [c, -] */
5164 in_p
= ! in_p
, low
= arg1
, high
= 0;
5166 case LT_EXPR
: /* - [c, -] */
5167 low
= arg1
, high
= 0;
5169 case LE_EXPR
: /* + [-, c] */
5170 in_p
= ! in_p
, low
= 0, high
= arg1
;
5176 /* If this is an unsigned comparison, we also know that EXP is
5177 greater than or equal to zero. We base the range tests we make
5178 on that fact, so we record it here so we can parse existing
5179 range tests. We test arg0_type since often the return type
5180 of, e.g. EQ_EXPR, is boolean. */
5181 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
5183 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
5185 build_int_cst (arg0_type
, 0),
5189 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
5191 /* If the high bound is missing, but we have a nonzero low
5192 bound, reverse the range so it goes from zero to the low bound
5194 if (high
== 0 && low
&& ! integer_zerop (low
))
5197 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
5198 build_int_cst (TREE_TYPE (low
), 1), 0);
5199 low
= build_int_cst (arg0_type
, 0);
5209 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5210 low and high are non-NULL, then normalize will DTRT. */
5211 if (!TYPE_UNSIGNED (arg0_type
)
5212 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5214 if (low
== NULL_TREE
)
5215 low
= TYPE_MIN_VALUE (arg0_type
);
5216 if (high
== NULL_TREE
)
5217 high
= TYPE_MAX_VALUE (arg0_type
);
5220 /* (-x) IN [a,b] -> x in [-b, -a] */
5221 n_low
= range_binop (MINUS_EXPR
, exp_type
,
5222 build_int_cst (exp_type
, 0),
5224 n_high
= range_binop (MINUS_EXPR
, exp_type
,
5225 build_int_cst (exp_type
, 0),
5227 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
5233 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
5234 build_int_cst (exp_type
, 1));
5238 if (TREE_CODE (arg1
) != INTEGER_CST
)
5241 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5242 move a constant to the other side. */
5243 if (!TYPE_UNSIGNED (arg0_type
)
5244 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5247 /* If EXP is signed, any overflow in the computation is undefined,
5248 so we don't worry about it so long as our computations on
5249 the bounds don't overflow. For unsigned, overflow is defined
5250 and this is exactly the right thing. */
5251 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5252 arg0_type
, low
, 0, arg1
, 0);
5253 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5254 arg0_type
, high
, 1, arg1
, 0);
5255 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
5256 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
5259 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5260 *strict_overflow_p
= true;
5263 /* Check for an unsigned range which has wrapped around the maximum
5264 value thus making n_high < n_low, and normalize it. */
5265 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
5267 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
5268 build_int_cst (TREE_TYPE (n_high
), 1), 0);
5269 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
5270 build_int_cst (TREE_TYPE (n_low
), 1), 0);
5272 /* If the range is of the form +/- [ x+1, x ], we won't
5273 be able to normalize it. But then, it represents the
5274 whole range or the empty set, so make it
5276 if (tree_int_cst_equal (n_low
, low
)
5277 && tree_int_cst_equal (n_high
, high
))
5283 low
= n_low
, high
= n_high
;
5291 case NON_LVALUE_EXPR
:
5292 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
5295 if (! INTEGRAL_TYPE_P (arg0_type
)
5296 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
5297 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
5300 n_low
= low
, n_high
= high
;
5303 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
5306 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
5308 /* If we're converting arg0 from an unsigned type, to exp,
5309 a signed type, we will be doing the comparison as unsigned.
5310 The tests above have already verified that LOW and HIGH
5313 So we have to ensure that we will handle large unsigned
5314 values the same way that the current signed bounds treat
5317 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
5321 /* For fixed-point modes, we need to pass the saturating flag
5322 as the 2nd parameter. */
5323 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
5325 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
5326 TYPE_SATURATING (arg0_type
));
5329 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
5331 /* A range without an upper bound is, naturally, unbounded.
5332 Since convert would have cropped a very large value, use
5333 the max value for the destination type. */
5335 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
5336 : TYPE_MAX_VALUE (arg0_type
);
5338 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
5339 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
5340 fold_convert_loc (loc
, arg0_type
,
5342 build_int_cst (arg0_type
, 1));
5344 /* If the low bound is specified, "and" the range with the
5345 range for which the original unsigned value will be
5349 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
5350 1, fold_convert_loc (loc
, arg0_type
,
5355 in_p
= (n_in_p
== in_p
);
5359 /* Otherwise, "or" the range with the range of the input
5360 that will be interpreted as negative. */
5361 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
5362 1, fold_convert_loc (loc
, arg0_type
,
5367 in_p
= (in_p
!= n_in_p
);
5371 /* Otherwise, if we are converting arg0 from signed type, to exp,
5372 an unsigned type, we will do the comparison as signed. If
5373 high is non-NULL, we punt above if it doesn't fit in the signed
5374 type, so if we get through here, +[-, high] or +[low, high] are
5375 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5376 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5377 high is not, the +[low, -] range is equivalent to union of
5378 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5379 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5380 low being 0, which should be treated as [-, -]. */
5381 else if (TYPE_UNSIGNED (exp_type
)
5382 && !TYPE_UNSIGNED (arg0_type
)
5386 if (integer_zerop (low
))
5390 n_high
= fold_build2_loc (loc
, PLUS_EXPR
, arg0_type
,
5391 n_low
, build_int_cst (arg0_type
, -1));
5392 n_low
= build_zero_cst (arg0_type
);
5407 /* Given EXP, a logical expression, set the range it is testing into
5408 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5409 actually being tested. *PLOW and *PHIGH will be made of the same
5410 type as the returned expression. If EXP is not a comparison, we
5411 will most likely not be returning a useful value and range. Set
5412 *STRICT_OVERFLOW_P to true if the return value is only valid
5413 because signed overflow is undefined; otherwise, do not change
5414 *STRICT_OVERFLOW_P. */
5417 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
5418 bool *strict_overflow_p
)
5420 enum tree_code code
;
5421 tree arg0
, arg1
= NULL_TREE
;
5422 tree exp_type
, nexp
;
5425 location_t loc
= EXPR_LOCATION (exp
);
5427 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5428 and see if we can refine the range. Some of the cases below may not
5429 happen, but it doesn't seem worth worrying about this. We "continue"
5430 the outer loop when we've changed something; otherwise we "break"
5431 the switch, which will "break" the while. */
5434 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
5438 code
= TREE_CODE (exp
);
5439 exp_type
= TREE_TYPE (exp
);
5442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
5444 if (TREE_OPERAND_LENGTH (exp
) > 0)
5445 arg0
= TREE_OPERAND (exp
, 0);
5446 if (TREE_CODE_CLASS (code
) == tcc_binary
5447 || TREE_CODE_CLASS (code
) == tcc_comparison
5448 || (TREE_CODE_CLASS (code
) == tcc_expression
5449 && TREE_OPERAND_LENGTH (exp
) > 1))
5450 arg1
= TREE_OPERAND (exp
, 1);
5452 if (arg0
== NULL_TREE
)
5455 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
5456 &high
, &in_p
, strict_overflow_p
);
5457 if (nexp
== NULL_TREE
)
5462 /* If EXP is a constant, we can evaluate whether this is true or false. */
5463 if (TREE_CODE (exp
) == INTEGER_CST
)
5465 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
5467 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5473 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5477 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5478 a bitwise check i.e. when
5479 LOW == 0xXX...X00...0
5480 HIGH == 0xXX...X11...1
5481 Return corresponding mask in MASK and stem in VALUE. */
5484 maskable_range_p (const_tree low
, const_tree high
, tree type
, tree
*mask
,
5487 if (TREE_CODE (low
) != INTEGER_CST
5488 || TREE_CODE (high
) != INTEGER_CST
)
5491 unsigned prec
= TYPE_PRECISION (type
);
5492 wide_int lo
= wi::to_wide (low
, prec
);
5493 wide_int hi
= wi::to_wide (high
, prec
);
5495 wide_int end_mask
= lo
^ hi
;
5496 if ((end_mask
& (end_mask
+ 1)) != 0
5497 || (lo
& end_mask
) != 0)
5500 wide_int stem_mask
= ~end_mask
;
5501 wide_int stem
= lo
& stem_mask
;
5502 if (stem
!= (hi
& stem_mask
))
5505 *mask
= wide_int_to_tree (type
, stem_mask
);
5506 *value
= wide_int_to_tree (type
, stem
);
5511 /* Helper routine for build_range_check and match.pd. Return the type to
5512 perform the check or NULL if it shouldn't be optimized. */
5515 range_check_type (tree etype
)
5517 /* First make sure that arithmetics in this type is valid, then make sure
5518 that it wraps around. */
5519 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
5520 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
), 1);
5522 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_UNSIGNED (etype
))
5524 tree utype
, minv
, maxv
;
5526 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5527 for the type in question, as we rely on this here. */
5528 utype
= unsigned_type_for (etype
);
5529 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
5530 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
5531 build_int_cst (TREE_TYPE (maxv
), 1), 1);
5532 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
5534 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
5540 else if (POINTER_TYPE_P (etype
) || TREE_CODE (etype
) == OFFSET_TYPE
)
5541 etype
= unsigned_type_for (etype
);
5545 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5546 type, TYPE, return an expression to test if EXP is in (or out of, depending
5547 on IN_P) the range. Return 0 if the test couldn't be created. */
5550 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
5551 tree low
, tree high
)
5553 tree etype
= TREE_TYPE (exp
), mask
, value
;
5555 /* Disable this optimization for function pointer expressions
5556 on targets that require function pointer canonicalization. */
5557 if (targetm
.have_canonicalize_funcptr_for_compare ()
5558 && POINTER_TYPE_P (etype
)
5559 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype
)))
5564 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
5566 return invert_truthvalue_loc (loc
, value
);
5571 if (low
== 0 && high
== 0)
5572 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
5575 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
5576 fold_convert_loc (loc
, etype
, high
));
5579 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
5580 fold_convert_loc (loc
, etype
, low
));
5582 if (operand_equal_p (low
, high
, 0))
5583 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
5584 fold_convert_loc (loc
, etype
, low
));
5586 if (TREE_CODE (exp
) == BIT_AND_EXPR
5587 && maskable_range_p (low
, high
, etype
, &mask
, &value
))
5588 return fold_build2_loc (loc
, EQ_EXPR
, type
,
5589 fold_build2_loc (loc
, BIT_AND_EXPR
, etype
,
5593 if (integer_zerop (low
))
5595 if (! TYPE_UNSIGNED (etype
))
5597 etype
= unsigned_type_for (etype
);
5598 high
= fold_convert_loc (loc
, etype
, high
);
5599 exp
= fold_convert_loc (loc
, etype
, exp
);
5601 return build_range_check (loc
, type
, exp
, 1, 0, high
);
5604 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5605 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
5607 int prec
= TYPE_PRECISION (etype
);
5609 if (wi::mask
<widest_int
> (prec
- 1, false) == wi::to_widest (high
))
5611 if (TYPE_UNSIGNED (etype
))
5613 tree signed_etype
= signed_type_for (etype
);
5614 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
5616 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
5618 etype
= signed_etype
;
5619 exp
= fold_convert_loc (loc
, etype
, exp
);
5621 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
5622 build_int_cst (etype
, 0));
5626 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5627 This requires wrap-around arithmetics for the type of the expression. */
5628 etype
= range_check_type (etype
);
5629 if (etype
== NULL_TREE
)
5632 high
= fold_convert_loc (loc
, etype
, high
);
5633 low
= fold_convert_loc (loc
, etype
, low
);
5634 exp
= fold_convert_loc (loc
, etype
, exp
);
5636 value
= const_binop (MINUS_EXPR
, high
, low
);
5638 if (value
!= 0 && !TREE_OVERFLOW (value
))
5639 return build_range_check (loc
, type
,
5640 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
5641 1, build_int_cst (etype
, 0), value
);
5646 /* Return the predecessor of VAL in its type, handling the infinite case. */
5649 range_predecessor (tree val
)
5651 tree type
= TREE_TYPE (val
);
5653 if (INTEGRAL_TYPE_P (type
)
5654 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
5657 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
5658 build_int_cst (TREE_TYPE (val
), 1), 0);
5661 /* Return the successor of VAL in its type, handling the infinite case. */
5664 range_successor (tree val
)
5666 tree type
= TREE_TYPE (val
);
5668 if (INTEGRAL_TYPE_P (type
)
5669 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
5672 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
5673 build_int_cst (TREE_TYPE (val
), 1), 0);
5676 /* Given two ranges, see if we can merge them into one. Return 1 if we
5677 can, 0 if we can't. Set the output range into the specified parameters. */
5680 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
5681 tree high0
, int in1_p
, tree low1
, tree high1
)
5689 int lowequal
= ((low0
== 0 && low1
== 0)
5690 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5691 low0
, 0, low1
, 0)));
5692 int highequal
= ((high0
== 0 && high1
== 0)
5693 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5694 high0
, 1, high1
, 1)));
5696 /* Make range 0 be the range that starts first, or ends last if they
5697 start at the same value. Swap them if it isn't. */
5698 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5701 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5702 high1
, 1, high0
, 1))))
5704 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
5705 tem
= low0
, low0
= low1
, low1
= tem
;
5706 tem
= high0
, high0
= high1
, high1
= tem
;
5709 /* If the second range is != high1 where high1 is the type maximum of
5710 the type, try first merging with < high1 range. */
5713 && TREE_CODE (low1
) == INTEGER_CST
5714 && (TREE_CODE (TREE_TYPE (low1
)) == INTEGER_TYPE
5715 || (TREE_CODE (TREE_TYPE (low1
)) == ENUMERAL_TYPE
5716 && known_eq (TYPE_PRECISION (TREE_TYPE (low1
)),
5717 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1
))))))
5718 && operand_equal_p (low1
, high1
, 0))
5720 if (tree_int_cst_equal (low1
, TYPE_MAX_VALUE (TREE_TYPE (low1
)))
5721 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5722 !in1_p
, NULL_TREE
, range_predecessor (low1
)))
5724 /* Similarly for the second range != low1 where low1 is the type minimum
5725 of the type, try first merging with > low1 range. */
5726 if (tree_int_cst_equal (low1
, TYPE_MIN_VALUE (TREE_TYPE (low1
)))
5727 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5728 !in1_p
, range_successor (low1
), NULL_TREE
))
5732 /* Now flag two cases, whether the ranges are disjoint or whether the
5733 second range is totally subsumed in the first. Note that the tests
5734 below are simplified by the ones above. */
5735 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
5736 high0
, 1, low1
, 0));
5737 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5738 high1
, 1, high0
, 1));
5740 /* We now have four cases, depending on whether we are including or
5741 excluding the two ranges. */
5744 /* If they don't overlap, the result is false. If the second range
5745 is a subset it is the result. Otherwise, the range is from the start
5746 of the second to the end of the first. */
5748 in_p
= 0, low
= high
= 0;
5750 in_p
= 1, low
= low1
, high
= high1
;
5752 in_p
= 1, low
= low1
, high
= high0
;
5755 else if (in0_p
&& ! in1_p
)
5757 /* If they don't overlap, the result is the first range. If they are
5758 equal, the result is false. If the second range is a subset of the
5759 first, and the ranges begin at the same place, we go from just after
5760 the end of the second range to the end of the first. If the second
5761 range is not a subset of the first, or if it is a subset and both
5762 ranges end at the same place, the range starts at the start of the
5763 first range and ends just before the second range.
5764 Otherwise, we can't describe this as a single range. */
5766 in_p
= 1, low
= low0
, high
= high0
;
5767 else if (lowequal
&& highequal
)
5768 in_p
= 0, low
= high
= 0;
5769 else if (subset
&& lowequal
)
5771 low
= range_successor (high1
);
5776 /* We are in the weird situation where high0 > high1 but
5777 high1 has no successor. Punt. */
5781 else if (! subset
|| highequal
)
5784 high
= range_predecessor (low1
);
5788 /* low0 < low1 but low1 has no predecessor. Punt. */
5796 else if (! in0_p
&& in1_p
)
5798 /* If they don't overlap, the result is the second range. If the second
5799 is a subset of the first, the result is false. Otherwise,
5800 the range starts just after the first range and ends at the
5801 end of the second. */
5803 in_p
= 1, low
= low1
, high
= high1
;
5804 else if (subset
|| highequal
)
5805 in_p
= 0, low
= high
= 0;
5808 low
= range_successor (high0
);
5813 /* high1 > high0 but high0 has no successor. Punt. */
5821 /* The case where we are excluding both ranges. Here the complex case
5822 is if they don't overlap. In that case, the only time we have a
5823 range is if they are adjacent. If the second is a subset of the
5824 first, the result is the first. Otherwise, the range to exclude
5825 starts at the beginning of the first range and ends at the end of the
5829 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5830 range_successor (high0
),
5832 in_p
= 0, low
= low0
, high
= high1
;
5835 /* Canonicalize - [min, x] into - [-, x]. */
5836 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5837 switch (TREE_CODE (TREE_TYPE (low0
)))
5840 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0
)),
5842 (TYPE_MODE (TREE_TYPE (low0
)))))
5846 if (tree_int_cst_equal (low0
,
5847 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5851 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5852 && integer_zerop (low0
))
5859 /* Canonicalize - [x, max] into - [x, -]. */
5860 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5861 switch (TREE_CODE (TREE_TYPE (high1
)))
5864 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1
)),
5866 (TYPE_MODE (TREE_TYPE (high1
)))))
5870 if (tree_int_cst_equal (high1
,
5871 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5875 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5876 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5878 build_int_cst (TREE_TYPE (high1
), 1),
5886 /* The ranges might be also adjacent between the maximum and
5887 minimum values of the given type. For
5888 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5889 return + [x + 1, y - 1]. */
5890 if (low0
== 0 && high1
== 0)
5892 low
= range_successor (high0
);
5893 high
= range_predecessor (low1
);
5894 if (low
== 0 || high
== 0)
5904 in_p
= 0, low
= low0
, high
= high0
;
5906 in_p
= 0, low
= low0
, high
= high1
;
5909 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5914 /* Subroutine of fold, looking inside expressions of the form
5915 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5916 are the three operands of the COND_EXPR. This function is
5917 being used also to optimize A op B ? C : A, by reversing the
5920 Return a folded expression whose code is not a COND_EXPR
5921 anymore, or NULL_TREE if no folding opportunity is found. */
5924 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5925 enum tree_code comp_code
,
5926 tree arg00
, tree arg01
, tree arg1
, tree arg2
)
5928 tree arg1_type
= TREE_TYPE (arg1
);
5934 /* If we have A op 0 ? A : -A, consider applying the following
5937 A == 0? A : -A same as -A
5938 A != 0? A : -A same as A
5939 A >= 0? A : -A same as abs (A)
5940 A > 0? A : -A same as abs (A)
5941 A <= 0? A : -A same as -abs (A)
5942 A < 0? A : -A same as -abs (A)
5944 None of these transformations work for modes with signed
5945 zeros. If A is +/-0, the first two transformations will
5946 change the sign of the result (from +0 to -0, or vice
5947 versa). The last four will fix the sign of the result,
5948 even though the original expressions could be positive or
5949 negative, depending on the sign of A.
5951 Note that all these transformations are correct if A is
5952 NaN, since the two alternatives (A and -A) are also NaNs. */
5953 if (!HONOR_SIGNED_ZEROS (type
)
5954 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5955 ? real_zerop (arg01
)
5956 : integer_zerop (arg01
))
5957 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5958 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5959 /* In the case that A is of the form X-Y, '-A' (arg2) may
5960 have already been folded to Y-X, check for that. */
5961 || (TREE_CODE (arg1
) == MINUS_EXPR
5962 && TREE_CODE (arg2
) == MINUS_EXPR
5963 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5964 TREE_OPERAND (arg2
, 1), 0)
5965 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5966 TREE_OPERAND (arg2
, 0), 0))))
5971 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5972 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5975 return fold_convert_loc (loc
, type
, arg1
);
5978 if (flag_trapping_math
)
5983 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5985 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5986 return fold_convert_loc (loc
, type
, tem
);
5989 if (flag_trapping_math
)
5994 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5996 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
5997 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
5999 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6000 is not, invokes UB both in abs and in the negation of it.
6001 So, use ABSU_EXPR instead. */
6002 tree utype
= unsigned_type_for (TREE_TYPE (arg1
));
6003 tem
= fold_build1_loc (loc
, ABSU_EXPR
, utype
, arg1
);
6004 tem
= negate_expr (tem
);
6005 return fold_convert_loc (loc
, type
, tem
);
6009 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
6010 return negate_expr (fold_convert_loc (loc
, type
, tem
));
6013 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
6017 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6018 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6019 both transformations are correct when A is NaN: A != 0
6020 is then true, and A == 0 is false. */
6022 if (!HONOR_SIGNED_ZEROS (type
)
6023 && integer_zerop (arg01
) && integer_zerop (arg2
))
6025 if (comp_code
== NE_EXPR
)
6026 return fold_convert_loc (loc
, type
, arg1
);
6027 else if (comp_code
== EQ_EXPR
)
6028 return build_zero_cst (type
);
6031 /* Try some transformations of A op B ? A : B.
6033 A == B? A : B same as B
6034 A != B? A : B same as A
6035 A >= B? A : B same as max (A, B)
6036 A > B? A : B same as max (B, A)
6037 A <= B? A : B same as min (A, B)
6038 A < B? A : B same as min (B, A)
6040 As above, these transformations don't work in the presence
6041 of signed zeros. For example, if A and B are zeros of
6042 opposite sign, the first two transformations will change
6043 the sign of the result. In the last four, the original
6044 expressions give different results for (A=+0, B=-0) and
6045 (A=-0, B=+0), but the transformed expressions do not.
6047 The first two transformations are correct if either A or B
6048 is a NaN. In the first transformation, the condition will
6049 be false, and B will indeed be chosen. In the case of the
6050 second transformation, the condition A != B will be true,
6051 and A will be chosen.
6053 The conversions to max() and min() are not correct if B is
6054 a number and A is not. The conditions in the original
6055 expressions will be false, so all four give B. The min()
6056 and max() versions would give a NaN instead. */
6057 if (!HONOR_SIGNED_ZEROS (type
)
6058 && operand_equal_for_comparison_p (arg01
, arg2
)
6059 /* Avoid these transformations if the COND_EXPR may be used
6060 as an lvalue in the C++ front-end. PR c++/19199. */
6062 || VECTOR_TYPE_P (type
)
6063 || (! lang_GNU_CXX ()
6064 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
6065 || ! maybe_lvalue_p (arg1
)
6066 || ! maybe_lvalue_p (arg2
)))
6068 tree comp_op0
= arg00
;
6069 tree comp_op1
= arg01
;
6070 tree comp_type
= TREE_TYPE (comp_op0
);
6075 return fold_convert_loc (loc
, type
, arg2
);
6077 return fold_convert_loc (loc
, type
, arg1
);
6082 /* In C++ a ?: expression can be an lvalue, so put the
6083 operand which will be used if they are equal first
6084 so that we can convert this back to the
6085 corresponding COND_EXPR. */
6086 if (!HONOR_NANS (arg1
))
6088 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
6089 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
6090 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
6091 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
6092 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
6093 comp_op1
, comp_op0
);
6094 return fold_convert_loc (loc
, type
, tem
);
6101 if (!HONOR_NANS (arg1
))
6103 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
6104 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
6105 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
6106 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
6107 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
6108 comp_op1
, comp_op0
);
6109 return fold_convert_loc (loc
, type
, tem
);
6113 if (!HONOR_NANS (arg1
))
6114 return fold_convert_loc (loc
, type
, arg2
);
6117 if (!HONOR_NANS (arg1
))
6118 return fold_convert_loc (loc
, type
, arg1
);
6121 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
6131 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6132 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6133 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6137 /* EXP is some logical combination of boolean tests. See if we can
6138 merge it into some range test. Return the new tree if so. */
6141 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
6144 int or_op
= (code
== TRUTH_ORIF_EXPR
6145 || code
== TRUTH_OR_EXPR
);
6146 int in0_p
, in1_p
, in_p
;
6147 tree low0
, low1
, low
, high0
, high1
, high
;
6148 bool strict_overflow_p
= false;
6150 const char * const warnmsg
= G_("assuming signed overflow does not occur "
6151 "when simplifying range test");
6153 if (!INTEGRAL_TYPE_P (type
))
6156 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
6157 /* If op0 is known true or false and this is a short-circuiting
6158 operation we must not merge with op1 since that makes side-effects
6159 unconditional. So special-case this. */
6161 && ((code
== TRUTH_ORIF_EXPR
&& in0_p
)
6162 || (code
== TRUTH_ANDIF_EXPR
&& !in0_p
)))
6164 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
6166 /* If this is an OR operation, invert both sides; we will invert
6167 again at the end. */
6169 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
6171 /* If both expressions are the same, if we can merge the ranges, and we
6172 can build the range test, return it or it inverted. If one of the
6173 ranges is always true or always false, consider it to be the same
6174 expression as the other. */
6175 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
6176 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
6178 && (tem
= (build_range_check (loc
, type
,
6180 : rhs
!= 0 ? rhs
: integer_zero_node
,
6181 in_p
, low
, high
))) != 0)
6183 if (strict_overflow_p
)
6184 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
6185 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
6188 /* On machines where the branch cost is expensive, if this is a
6189 short-circuited branch and the underlying object on both sides
6190 is the same, make a non-short-circuit operation. */
6191 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
6192 if (param_logical_op_non_short_circuit
!= -1)
6193 logical_op_non_short_circuit
6194 = param_logical_op_non_short_circuit
;
6195 if (logical_op_non_short_circuit
6196 && !sanitize_coverage_p ()
6197 && lhs
!= 0 && rhs
!= 0
6198 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6199 && operand_equal_p (lhs
, rhs
, 0))
6201 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6202 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6203 which cases we can't do this. */
6204 if (simple_operand_p (lhs
))
6205 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6206 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6209 else if (!lang_hooks
.decls
.global_bindings_p ()
6210 && !CONTAINS_PLACEHOLDER_P (lhs
))
6212 tree common
= save_expr (lhs
);
6214 if ((lhs
= build_range_check (loc
, type
, common
,
6215 or_op
? ! in0_p
: in0_p
,
6217 && (rhs
= build_range_check (loc
, type
, common
,
6218 or_op
? ! in1_p
: in1_p
,
6221 if (strict_overflow_p
)
6222 fold_overflow_warning (warnmsg
,
6223 WARN_STRICT_OVERFLOW_COMPARISON
);
6224 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6225 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6234 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6235 bit value. Arrange things so the extra bits will be set to zero if and
6236 only if C is signed-extended to its full width. If MASK is nonzero,
6237 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6240 unextend (tree c
, int p
, int unsignedp
, tree mask
)
6242 tree type
= TREE_TYPE (c
);
6243 int modesize
= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type
));
6246 if (p
== modesize
|| unsignedp
)
6249 /* We work by getting just the sign bit into the low-order bit, then
6250 into the high-order bit, then sign-extend. We then XOR that value
6252 temp
= build_int_cst (TREE_TYPE (c
),
6253 wi::extract_uhwi (wi::to_wide (c
), p
- 1, 1));
6255 /* We must use a signed type in order to get an arithmetic right shift.
6256 However, we must also avoid introducing accidental overflows, so that
6257 a subsequent call to integer_zerop will work. Hence we must
6258 do the type conversion here. At this point, the constant is either
6259 zero or one, and the conversion to a signed type can never overflow.
6260 We could get an overflow if this conversion is done anywhere else. */
6261 if (TYPE_UNSIGNED (type
))
6262 temp
= fold_convert (signed_type_for (type
), temp
);
6264 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
6265 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
6267 temp
= const_binop (BIT_AND_EXPR
, temp
,
6268 fold_convert (TREE_TYPE (c
), mask
));
6269 /* If necessary, convert the type back to match the type of C. */
6270 if (TYPE_UNSIGNED (type
))
6271 temp
= fold_convert (type
, temp
);
6273 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
6276 /* For an expression that has the form
6280 we can drop one of the inner expressions and simplify to
6284 LOC is the location of the resulting expression. OP is the inner
6285 logical operation; the left-hand side in the examples above, while CMPOP
6286 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6287 removing a condition that guards another, as in
6288 (A != NULL && A->...) || A == NULL
6289 which we must not transform. If RHS_ONLY is true, only eliminate the
6290 right-most operand of the inner logical operation. */
6293 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
6296 tree type
= TREE_TYPE (cmpop
);
6297 enum tree_code code
= TREE_CODE (cmpop
);
6298 enum tree_code truthop_code
= TREE_CODE (op
);
6299 tree lhs
= TREE_OPERAND (op
, 0);
6300 tree rhs
= TREE_OPERAND (op
, 1);
6301 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
6302 enum tree_code rhs_code
= TREE_CODE (rhs
);
6303 enum tree_code lhs_code
= TREE_CODE (lhs
);
6304 enum tree_code inv_code
;
6306 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
6309 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
6312 if (rhs_code
== truthop_code
)
6314 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
6315 if (newrhs
!= NULL_TREE
)
6318 rhs_code
= TREE_CODE (rhs
);
6321 if (lhs_code
== truthop_code
&& !rhs_only
)
6323 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
6324 if (newlhs
!= NULL_TREE
)
6327 lhs_code
= TREE_CODE (lhs
);
6331 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
6332 if (inv_code
== rhs_code
6333 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6334 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6336 if (!rhs_only
&& inv_code
== lhs_code
6337 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6338 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6340 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
6341 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
6346 /* Find ways of folding logical expressions of LHS and RHS:
6347 Try to merge two comparisons to the same innermost item.
6348 Look for range tests like "ch >= '0' && ch <= '9'".
6349 Look for combinations of simple terms on machines with expensive branches
6350 and evaluate the RHS unconditionally.
6352 For example, if we have p->a == 2 && p->b == 4 and we can make an
6353 object large enough to span both A and B, we can do this with a comparison
6354 against the object ANDed with the a mask.
6356 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6357 operations to do this with one comparison.
6359 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6360 function and the one above.
6362 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6363 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6365 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6368 We return the simplified tree or 0 if no optimization is possible. */
6371 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
6374 /* If this is the "or" of two comparisons, we can do something if
6375 the comparisons are NE_EXPR. If this is the "and", we can do something
6376 if the comparisons are EQ_EXPR. I.e.,
6377 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6379 WANTED_CODE is this operation code. For single bit fields, we can
6380 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6381 comparison for one-bit fields. */
6383 enum tree_code wanted_code
;
6384 enum tree_code lcode
, rcode
;
6385 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
6386 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
6387 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
6388 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
6389 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
6390 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
6391 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
6392 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
6393 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
6394 scalar_int_mode lnmode
, rnmode
;
6395 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
6396 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
6397 tree l_const
, r_const
;
6398 tree lntype
, rntype
, result
;
6399 HOST_WIDE_INT first_bit
, end_bit
;
6402 /* Start by getting the comparison codes. Fail if anything is volatile.
6403 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6404 it were surrounded with a NE_EXPR. */
6406 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
6409 lcode
= TREE_CODE (lhs
);
6410 rcode
= TREE_CODE (rhs
);
6412 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
6414 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
6415 build_int_cst (TREE_TYPE (lhs
), 0));
6419 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
6421 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
6422 build_int_cst (TREE_TYPE (rhs
), 0));
6426 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
6427 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
6430 ll_arg
= TREE_OPERAND (lhs
, 0);
6431 lr_arg
= TREE_OPERAND (lhs
, 1);
6432 rl_arg
= TREE_OPERAND (rhs
, 0);
6433 rr_arg
= TREE_OPERAND (rhs
, 1);
6435 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6436 if (simple_operand_p (ll_arg
)
6437 && simple_operand_p (lr_arg
))
6439 if (operand_equal_p (ll_arg
, rl_arg
, 0)
6440 && operand_equal_p (lr_arg
, rr_arg
, 0))
6442 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
6443 truth_type
, ll_arg
, lr_arg
);
6447 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
6448 && operand_equal_p (lr_arg
, rl_arg
, 0))
6450 result
= combine_comparisons (loc
, code
, lcode
,
6451 swap_tree_comparison (rcode
),
6452 truth_type
, ll_arg
, lr_arg
);
6458 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
6459 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
6461 /* If the RHS can be evaluated unconditionally and its operands are
6462 simple, it wins to evaluate the RHS unconditionally on machines
6463 with expensive branches. In this case, this isn't a comparison
6464 that can be merged. */
6466 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
6468 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
6469 && simple_operand_p (rl_arg
)
6470 && simple_operand_p (rr_arg
))
6472 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6473 if (code
== TRUTH_OR_EXPR
6474 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
6475 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
6476 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6477 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6478 return build2_loc (loc
, NE_EXPR
, truth_type
,
6479 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6481 build_int_cst (TREE_TYPE (ll_arg
), 0));
6483 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6484 if (code
== TRUTH_AND_EXPR
6485 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
6486 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
6487 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6488 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6489 return build2_loc (loc
, EQ_EXPR
, truth_type
,
6490 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6492 build_int_cst (TREE_TYPE (ll_arg
), 0));
6495 /* See if the comparisons can be merged. Then get all the parameters for
6498 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
6499 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
6502 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
6504 ll_inner
= decode_field_reference (loc
, &ll_arg
,
6505 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
6506 &ll_unsignedp
, &ll_reversep
, &volatilep
,
6507 &ll_mask
, &ll_and_mask
);
6508 lr_inner
= decode_field_reference (loc
, &lr_arg
,
6509 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
6510 &lr_unsignedp
, &lr_reversep
, &volatilep
,
6511 &lr_mask
, &lr_and_mask
);
6512 rl_inner
= decode_field_reference (loc
, &rl_arg
,
6513 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
6514 &rl_unsignedp
, &rl_reversep
, &volatilep
,
6515 &rl_mask
, &rl_and_mask
);
6516 rr_inner
= decode_field_reference (loc
, &rr_arg
,
6517 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
6518 &rr_unsignedp
, &rr_reversep
, &volatilep
,
6519 &rr_mask
, &rr_and_mask
);
6521 /* It must be true that the inner operation on the lhs of each
6522 comparison must be the same if we are to be able to do anything.
6523 Then see if we have constants. If not, the same must be true for
6526 || ll_reversep
!= rl_reversep
6527 || ll_inner
== 0 || rl_inner
== 0
6528 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
6531 if (TREE_CODE (lr_arg
) == INTEGER_CST
6532 && TREE_CODE (rr_arg
) == INTEGER_CST
)
6534 l_const
= lr_arg
, r_const
= rr_arg
;
6535 lr_reversep
= ll_reversep
;
6537 else if (lr_reversep
!= rr_reversep
6538 || lr_inner
== 0 || rr_inner
== 0
6539 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
6542 l_const
= r_const
= 0;
6544 /* If either comparison code is not correct for our logical operation,
6545 fail. However, we can convert a one-bit comparison against zero into
6546 the opposite comparison against that bit being set in the field. */
6548 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
6549 if (lcode
!= wanted_code
)
6551 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
6553 /* Make the left operand unsigned, since we are only interested
6554 in the value of one bit. Otherwise we are doing the wrong
6563 /* This is analogous to the code for l_const above. */
6564 if (rcode
!= wanted_code
)
6566 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
6575 /* See if we can find a mode that contains both fields being compared on
6576 the left. If we can't, fail. Otherwise, update all constants and masks
6577 to be relative to a field of that size. */
6578 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
6579 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
6580 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6581 TYPE_ALIGN (TREE_TYPE (ll_inner
)), BITS_PER_WORD
,
6582 volatilep
, &lnmode
))
6585 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
6586 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
6587 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
6588 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
6590 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6592 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
6593 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
6596 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
6597 size_int (xll_bitpos
));
6598 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
6599 size_int (xrl_bitpos
));
6600 if (ll_mask
== NULL_TREE
|| rl_mask
== NULL_TREE
)
6605 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
6606 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
6607 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
6608 if (l_const
== NULL_TREE
)
6610 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
6611 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6614 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6616 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6621 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
6622 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
6623 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
6624 if (r_const
== NULL_TREE
)
6626 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
6627 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6630 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6632 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6636 /* If the right sides are not constant, do the same for it. Also,
6637 disallow this optimization if a size, signedness or storage order
6638 mismatch occurs between the left and right sides. */
6641 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
6642 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
6643 || ll_reversep
!= lr_reversep
6644 /* Make sure the two fields on the right
6645 correspond to the left without being swapped. */
6646 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
6649 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
6650 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
6651 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6652 TYPE_ALIGN (TREE_TYPE (lr_inner
)), BITS_PER_WORD
,
6653 volatilep
, &rnmode
))
6656 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
6657 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
6658 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
6659 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
6661 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6663 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
6664 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
6667 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6669 size_int (xlr_bitpos
));
6670 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6672 size_int (xrr_bitpos
));
6673 if (lr_mask
== NULL_TREE
|| rr_mask
== NULL_TREE
)
6676 /* Make a mask that corresponds to both fields being compared.
6677 Do this for both items being compared. If the operands are the
6678 same size and the bits being compared are in the same position
6679 then we can do this by masking both and comparing the masked
6681 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6682 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
6683 if (lnbitsize
== rnbitsize
6684 && xll_bitpos
== xlr_bitpos
6688 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6689 lntype
, lnbitsize
, lnbitpos
,
6690 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6691 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6692 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
6694 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
6695 rntype
, rnbitsize
, rnbitpos
,
6696 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
6697 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
6698 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
6700 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6703 /* There is still another way we can do something: If both pairs of
6704 fields being compared are adjacent, we may be able to make a wider
6705 field containing them both.
6707 Note that we still must mask the lhs/rhs expressions. Furthermore,
6708 the mask must be shifted to account for the shift done by
6709 make_bit_field_ref. */
6710 if (((ll_bitsize
+ ll_bitpos
== rl_bitpos
6711 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
6712 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
6713 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
6721 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
6722 ll_bitsize
+ rl_bitsize
,
6723 MIN (ll_bitpos
, rl_bitpos
),
6724 ll_unsignedp
, ll_reversep
);
6725 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
6726 lr_bitsize
+ rr_bitsize
,
6727 MIN (lr_bitpos
, rr_bitpos
),
6728 lr_unsignedp
, lr_reversep
);
6730 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
6731 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
6732 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
6733 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
6734 if (ll_mask
== NULL_TREE
|| lr_mask
== NULL_TREE
)
6737 /* Convert to the smaller type before masking out unwanted bits. */
6739 if (lntype
!= rntype
)
6741 if (lnbitsize
> rnbitsize
)
6743 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
6744 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
6747 else if (lnbitsize
< rnbitsize
)
6749 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
6750 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
6755 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
6756 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
6758 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
6759 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
6761 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6767 /* Handle the case of comparisons with constants. If there is something in
6768 common between the masks, those bits of the constants must be the same.
6769 If not, the condition is always false. Test for this to avoid generating
6770 incorrect code below. */
6771 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
6772 if (! integer_zerop (result
)
6773 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
6774 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
6776 if (wanted_code
== NE_EXPR
)
6778 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6779 return constant_boolean_node (true, truth_type
);
6783 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6784 return constant_boolean_node (false, truth_type
);
6791 /* Construct the expression we will return. First get the component
6792 reference we will make. Unless the mask is all ones the width of
6793 that field, perform the mask operation. Then compare with the
6795 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6796 lntype
, lnbitsize
, lnbitpos
,
6797 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6799 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6800 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6801 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6803 return build2_loc (loc
, wanted_code
, truth_type
, result
,
6804 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
6807 /* T is an integer expression that is being multiplied, divided, or taken a
6808 modulus (CODE says which and what kind of divide or modulus) by a
6809 constant C. See if we can eliminate that operation by folding it with
6810 other operations already in T. WIDE_TYPE, if non-null, is a type that
6811 should be used for the computation if wider than our type.
6813 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6814 (X * 2) + (Y * 4). We must, however, be assured that either the original
6815 expression would not overflow or that overflow is undefined for the type
6816 in the language in question.
6818 If we return a non-null expression, it is an equivalent form of the
6819 original computation, but need not be in the original type.
6821 We set *STRICT_OVERFLOW_P to true if the return values depends on
6822 signed overflow being undefined. Otherwise we do not change
6823 *STRICT_OVERFLOW_P. */
6826 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6827 bool *strict_overflow_p
)
6829 /* To avoid exponential search depth, refuse to allow recursion past
6830 three levels. Beyond that (1) it's highly unlikely that we'll find
6831 something interesting and (2) we've probably processed it before
6832 when we built the inner expression. */
6841 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6848 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6849 bool *strict_overflow_p
)
6851 tree type
= TREE_TYPE (t
);
6852 enum tree_code tcode
= TREE_CODE (t
);
6853 tree ctype
= (wide_type
!= 0
6854 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type
))
6855 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
)))
6856 ? wide_type
: type
);
6858 bool same_p
= tcode
== code
;
6859 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6860 bool sub_strict_overflow_p
;
6862 /* Don't deal with constants of zero here; they confuse the code below. */
6863 if (integer_zerop (c
))
6866 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6867 op0
= TREE_OPERAND (t
, 0);
6869 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6870 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6872 /* Note that we need not handle conditional operations here since fold
6873 already handles those cases. So just do arithmetic here. */
6877 /* For a constant, we can always simplify if we are a multiply
6878 or (for divide and modulus) if it is a multiple of our constant. */
6879 if (code
== MULT_EXPR
6880 || wi::multiple_of_p (wi::to_wide (t
), wi::to_wide (c
),
6883 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6884 fold_convert (ctype
, c
));
6885 /* If the multiplication overflowed, we lost information on it.
6886 See PR68142 and PR69845. */
6887 if (TREE_OVERFLOW (tem
))
6893 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6894 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
6896 /* If op0 is an expression ... */
6897 if ((COMPARISON_CLASS_P (op0
)
6898 || UNARY_CLASS_P (op0
)
6899 || BINARY_CLASS_P (op0
)
6900 || VL_EXP_CLASS_P (op0
)
6901 || EXPRESSION_CLASS_P (op0
))
6902 /* ... and has wrapping overflow, and its type is smaller
6903 than ctype, then we cannot pass through as widening. */
6904 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
6905 && (TYPE_PRECISION (ctype
)
6906 > TYPE_PRECISION (TREE_TYPE (op0
))))
6907 /* ... or this is a truncation (t is narrower than op0),
6908 then we cannot pass through this narrowing. */
6909 || (TYPE_PRECISION (type
)
6910 < TYPE_PRECISION (TREE_TYPE (op0
)))
6911 /* ... or signedness changes for division or modulus,
6912 then we cannot pass through this conversion. */
6913 || (code
!= MULT_EXPR
6914 && (TYPE_UNSIGNED (ctype
)
6915 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6916 /* ... or has undefined overflow while the converted to
6917 type has not, we cannot do the operation in the inner type
6918 as that would introduce undefined overflow. */
6919 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
6920 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6923 /* Pass the constant down and see if we can make a simplification. If
6924 we can, replace this expression with the inner simplification for
6925 possible later conversion to our or some other type. */
6926 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6927 && TREE_CODE (t2
) == INTEGER_CST
6928 && !TREE_OVERFLOW (t2
)
6929 && (t1
= extract_muldiv (op0
, t2
, code
,
6930 code
== MULT_EXPR
? ctype
: NULL_TREE
,
6931 strict_overflow_p
)) != 0)
6936 /* If widening the type changes it from signed to unsigned, then we
6937 must avoid building ABS_EXPR itself as unsigned. */
6938 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6940 tree cstype
= (*signed_type_for
) (ctype
);
6941 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6944 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6945 return fold_convert (ctype
, t1
);
6949 /* If the constant is negative, we cannot simplify this. */
6950 if (tree_int_cst_sgn (c
) == -1)
6954 /* For division and modulus, type can't be unsigned, as e.g.
6955 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6956 For signed types, even with wrapping overflow, this is fine. */
6957 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6959 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6961 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6964 case MIN_EXPR
: case MAX_EXPR
:
6965 /* If widening the type changes the signedness, then we can't perform
6966 this optimization as that changes the result. */
6967 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6970 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6971 sub_strict_overflow_p
= false;
6972 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6973 &sub_strict_overflow_p
)) != 0
6974 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6975 &sub_strict_overflow_p
)) != 0)
6977 if (tree_int_cst_sgn (c
) < 0)
6978 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6979 if (sub_strict_overflow_p
)
6980 *strict_overflow_p
= true;
6981 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6982 fold_convert (ctype
, t2
));
6986 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6987 /* If the second operand is constant, this is a multiplication
6988 or floor division, by a power of two, so we can treat it that
6989 way unless the multiplier or divisor overflows. Signed
6990 left-shift overflow is implementation-defined rather than
6991 undefined in C90, so do not convert signed left shift into
6993 if (TREE_CODE (op1
) == INTEGER_CST
6994 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6995 /* const_binop may not detect overflow correctly,
6996 so check for it explicitly here. */
6997 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
6999 && (t1
= fold_convert (ctype
,
7000 const_binop (LSHIFT_EXPR
, size_one_node
,
7002 && !TREE_OVERFLOW (t1
))
7003 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
7004 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
7006 fold_convert (ctype
, op0
),
7008 c
, code
, wide_type
, strict_overflow_p
);
7011 case PLUS_EXPR
: case MINUS_EXPR
:
7012 /* See if we can eliminate the operation on both sides. If we can, we
7013 can return a new PLUS or MINUS. If we can't, the only remaining
7014 cases where we can do anything are if the second operand is a
7016 sub_strict_overflow_p
= false;
7017 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
7018 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
7019 if (t1
!= 0 && t2
!= 0
7020 && TYPE_OVERFLOW_WRAPS (ctype
)
7021 && (code
== MULT_EXPR
7022 /* If not multiplication, we can only do this if both operands
7023 are divisible by c. */
7024 || (multiple_of_p (ctype
, op0
, c
)
7025 && multiple_of_p (ctype
, op1
, c
))))
7027 if (sub_strict_overflow_p
)
7028 *strict_overflow_p
= true;
7029 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
7030 fold_convert (ctype
, t2
));
7033 /* If this was a subtraction, negate OP1 and set it to be an addition.
7034 This simplifies the logic below. */
7035 if (tcode
== MINUS_EXPR
)
7037 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
7038 /* If OP1 was not easily negatable, the constant may be OP0. */
7039 if (TREE_CODE (op0
) == INTEGER_CST
)
7041 std::swap (op0
, op1
);
7046 if (TREE_CODE (op1
) != INTEGER_CST
)
7049 /* If either OP1 or C are negative, this optimization is not safe for
7050 some of the division and remainder types while for others we need
7051 to change the code. */
7052 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
7054 if (code
== CEIL_DIV_EXPR
)
7055 code
= FLOOR_DIV_EXPR
;
7056 else if (code
== FLOOR_DIV_EXPR
)
7057 code
= CEIL_DIV_EXPR
;
7058 else if (code
!= MULT_EXPR
7059 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
7063 /* If it's a multiply or a division/modulus operation of a multiple
7064 of our constant, do the operation and verify it doesn't overflow. */
7065 if (code
== MULT_EXPR
7066 || wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
7069 op1
= const_binop (code
, fold_convert (ctype
, op1
),
7070 fold_convert (ctype
, c
));
7071 /* We allow the constant to overflow with wrapping semantics. */
7073 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
7079 /* If we have an unsigned type, we cannot widen the operation since it
7080 will change the result if the original computation overflowed. */
7081 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
7084 /* The last case is if we are a multiply. In that case, we can
7085 apply the distributive law to commute the multiply and addition
7086 if the multiplication of the constants doesn't overflow
7087 and overflow is defined. With undefined overflow
7088 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7089 But fold_plusminus_mult_expr would factor back any power-of-two
7090 value so do not distribute in the first place in this case. */
7091 if (code
== MULT_EXPR
7092 && TYPE_OVERFLOW_WRAPS (ctype
)
7093 && !(tree_fits_shwi_p (c
) && pow2p_hwi (absu_hwi (tree_to_shwi (c
)))))
7094 return fold_build2 (tcode
, ctype
,
7095 fold_build2 (code
, ctype
,
7096 fold_convert (ctype
, op0
),
7097 fold_convert (ctype
, c
)),
7103 /* We have a special case here if we are doing something like
7104 (C * 8) % 4 since we know that's zero. */
7105 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
7106 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
7107 /* If the multiplication can overflow we cannot optimize this. */
7108 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
7109 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
7110 && wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
7113 *strict_overflow_p
= true;
7114 return omit_one_operand (type
, integer_zero_node
, op0
);
7117 /* ... fall through ... */
7119 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
7120 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
7121 /* If we can extract our operation from the LHS, do so and return a
7122 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7123 do something only if the second operand is a constant. */
7125 && TYPE_OVERFLOW_WRAPS (ctype
)
7126 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
7127 strict_overflow_p
)) != 0)
7128 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
7129 fold_convert (ctype
, op1
));
7130 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
7131 && TYPE_OVERFLOW_WRAPS (ctype
)
7132 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
7133 strict_overflow_p
)) != 0)
7134 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
7135 fold_convert (ctype
, t1
));
7136 else if (TREE_CODE (op1
) != INTEGER_CST
)
7139 /* If these are the same operation types, we can associate them
7140 assuming no overflow. */
7143 bool overflow_p
= false;
7144 wi::overflow_type overflow_mul
;
7145 signop sign
= TYPE_SIGN (ctype
);
7146 unsigned prec
= TYPE_PRECISION (ctype
);
7147 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
7148 wi::to_wide (c
, prec
),
7149 sign
, &overflow_mul
);
7150 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
7152 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
7155 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
7156 wide_int_to_tree (ctype
, mul
));
7159 /* If these operations "cancel" each other, we have the main
7160 optimizations of this pass, which occur when either constant is a
7161 multiple of the other, in which case we replace this with either an
7162 operation or CODE or TCODE.
7164 If we have an unsigned type, we cannot do this since it will change
7165 the result if the original computation overflowed. */
7166 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
7167 && !TYPE_OVERFLOW_SANITIZED (ctype
)
7168 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
7169 || (tcode
== MULT_EXPR
7170 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
7171 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
7172 && code
!= MULT_EXPR
)))
7174 if (wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
7177 *strict_overflow_p
= true;
7178 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
7179 fold_convert (ctype
,
7180 const_binop (TRUNC_DIV_EXPR
,
7183 else if (wi::multiple_of_p (wi::to_wide (c
), wi::to_wide (op1
),
7186 *strict_overflow_p
= true;
7187 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
7188 fold_convert (ctype
,
7189 const_binop (TRUNC_DIV_EXPR
,
7202 /* Return a node which has the indicated constant VALUE (either 0 or
7203 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7204 and is of the indicated TYPE. */
7207 constant_boolean_node (bool value
, tree type
)
7209 if (type
== integer_type_node
)
7210 return value
? integer_one_node
: integer_zero_node
;
7211 else if (type
== boolean_type_node
)
7212 return value
? boolean_true_node
: boolean_false_node
;
7213 else if (VECTOR_TYPE_P (type
))
7214 return build_vector_from_val (type
,
7215 build_int_cst (TREE_TYPE (type
),
7218 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
7222 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7223 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7224 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7225 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7226 COND is the first argument to CODE; otherwise (as in the example
7227 given here), it is the second argument. TYPE is the type of the
7228 original expression. Return NULL_TREE if no simplification is
7232 fold_binary_op_with_conditional_arg (location_t loc
,
7233 enum tree_code code
,
7234 tree type
, tree op0
, tree op1
,
7235 tree cond
, tree arg
, int cond_first_p
)
7237 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
7238 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
7239 tree test
, true_value
, false_value
;
7240 tree lhs
= NULL_TREE
;
7241 tree rhs
= NULL_TREE
;
7242 enum tree_code cond_code
= COND_EXPR
;
7244 /* Do not move possibly trapping operations into the conditional as this
7245 pessimizes code and causes gimplification issues when applied late. */
7246 if (operation_could_trap_p (code
, FLOAT_TYPE_P (type
),
7247 ANY_INTEGRAL_TYPE_P (type
)
7248 && TYPE_OVERFLOW_TRAPS (type
), op1
))
7251 if (TREE_CODE (cond
) == COND_EXPR
7252 || TREE_CODE (cond
) == VEC_COND_EXPR
)
7254 test
= TREE_OPERAND (cond
, 0);
7255 true_value
= TREE_OPERAND (cond
, 1);
7256 false_value
= TREE_OPERAND (cond
, 2);
7257 /* If this operand throws an expression, then it does not make
7258 sense to try to perform a logical or arithmetic operation
7260 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
7262 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
7265 else if (!(TREE_CODE (type
) != VECTOR_TYPE
7266 && VECTOR_TYPE_P (TREE_TYPE (cond
))))
7268 tree testtype
= TREE_TYPE (cond
);
7270 true_value
= constant_boolean_node (true, testtype
);
7271 false_value
= constant_boolean_node (false, testtype
);
7274 /* Detect the case of mixing vector and scalar types - bail out. */
7277 if (VECTOR_TYPE_P (TREE_TYPE (test
)))
7278 cond_code
= VEC_COND_EXPR
;
7280 /* This transformation is only worthwhile if we don't have to wrap ARG
7281 in a SAVE_EXPR and the operation can be simplified without recursing
7282 on at least one of the branches once its pushed inside the COND_EXPR. */
7283 if (!TREE_CONSTANT (arg
)
7284 && (TREE_SIDE_EFFECTS (arg
)
7285 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
7286 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
7289 arg
= fold_convert_loc (loc
, arg_type
, arg
);
7292 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
7294 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
7296 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
7300 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
7302 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
7304 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
7307 /* Check that we have simplified at least one of the branches. */
7308 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
7311 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
7315 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7317 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7318 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7319 if ARG - ZERO_ARG is the same as X.
7321 If ARG is NULL, check for any value of type TYPE.
7323 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7324 and finite. The problematic cases are when X is zero, and its mode
7325 has signed zeros. In the case of rounding towards -infinity,
7326 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7327 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7330 fold_real_zero_addition_p (const_tree type
, const_tree arg
,
7331 const_tree zero_arg
, int negate
)
7333 if (!real_zerop (zero_arg
))
7336 /* Don't allow the fold with -fsignaling-nans. */
7337 if (arg
? tree_expr_maybe_signaling_nan_p (arg
) : HONOR_SNANS (type
))
7340 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7341 if (!HONOR_SIGNED_ZEROS (type
))
7344 /* There is no case that is safe for all rounding modes. */
7345 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
))
7348 /* In a vector or complex, we would need to check the sign of all zeros. */
7349 if (TREE_CODE (zero_arg
) == VECTOR_CST
)
7350 zero_arg
= uniform_vector_p (zero_arg
);
7351 if (!zero_arg
|| TREE_CODE (zero_arg
) != REAL_CST
)
7354 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7355 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg
)))
7358 /* The mode has signed zeros, and we have to honor their sign.
7359 In this situation, there are only two cases we can return true for.
7360 (i) X - 0 is the same as X with default rounding.
7361 (ii) X + 0 is X when X can't possibly be -0.0. */
7362 return negate
|| (arg
&& !tree_expr_maybe_real_minus_zero_p (arg
));
7365 /* Subroutine of match.pd that optimizes comparisons of a division by
7366 a nonzero integer constant against an integer constant, i.e.
7369 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7370 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7373 fold_div_compare (enum tree_code code
, tree c1
, tree c2
, tree
*lo
,
7374 tree
*hi
, bool *neg_overflow
)
7376 tree prod
, tmp
, type
= TREE_TYPE (c1
);
7377 signop sign
= TYPE_SIGN (type
);
7378 wi::overflow_type overflow
;
7380 /* We have to do this the hard way to detect unsigned overflow.
7381 prod = int_const_binop (MULT_EXPR, c1, c2); */
7382 wide_int val
= wi::mul (wi::to_wide (c1
), wi::to_wide (c2
), sign
, &overflow
);
7383 prod
= force_fit_type (type
, val
, -1, overflow
);
7384 *neg_overflow
= false;
7386 if (sign
== UNSIGNED
)
7388 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7391 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7392 val
= wi::add (wi::to_wide (prod
), wi::to_wide (tmp
), sign
, &overflow
);
7393 *hi
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (prod
));
7395 else if (tree_int_cst_sgn (c1
) >= 0)
7397 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7398 switch (tree_int_cst_sgn (c2
))
7401 *neg_overflow
= true;
7402 *lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7407 *lo
= fold_negate_const (tmp
, type
);
7412 *hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7422 /* A negative divisor reverses the relational operators. */
7423 code
= swap_tree_comparison (code
);
7425 tmp
= int_const_binop (PLUS_EXPR
, c1
, build_int_cst (type
, 1));
7426 switch (tree_int_cst_sgn (c2
))
7429 *hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7434 *hi
= fold_negate_const (tmp
, type
);
7439 *neg_overflow
= true;
7440 *lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7449 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
7452 if (TREE_OVERFLOW (*lo
)
7453 || operand_equal_p (*lo
, TYPE_MIN_VALUE (type
), 0))
7455 if (TREE_OVERFLOW (*hi
)
7456 || operand_equal_p (*hi
, TYPE_MAX_VALUE (type
), 0))
7462 /* Test whether it is preferable to swap two operands, ARG0 and
7463 ARG1, for example because ARG0 is an integer constant and ARG1
7467 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
7469 if (CONSTANT_CLASS_P (arg1
))
7471 if (CONSTANT_CLASS_P (arg0
))
7477 if (TREE_CONSTANT (arg1
))
7479 if (TREE_CONSTANT (arg0
))
7482 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7483 for commutative and comparison operators. Ensuring a canonical
7484 form allows the optimizers to find additional redundancies without
7485 having to explicitly check for both orderings. */
7486 if (TREE_CODE (arg0
) == SSA_NAME
7487 && TREE_CODE (arg1
) == SSA_NAME
7488 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7491 /* Put SSA_NAMEs last. */
7492 if (TREE_CODE (arg1
) == SSA_NAME
)
7494 if (TREE_CODE (arg0
) == SSA_NAME
)
7497 /* Put variables last. */
7507 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7508 means A >= Y && A != MAX, but in this case we know that
7509 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7512 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7514 tree a
, typea
, type
= TREE_TYPE (bound
), a1
, diff
, y
;
7516 if (TREE_CODE (bound
) == LT_EXPR
)
7517 a
= TREE_OPERAND (bound
, 0);
7518 else if (TREE_CODE (bound
) == GT_EXPR
)
7519 a
= TREE_OPERAND (bound
, 1);
7523 typea
= TREE_TYPE (a
);
7524 if (!INTEGRAL_TYPE_P (typea
)
7525 && !POINTER_TYPE_P (typea
))
7528 if (TREE_CODE (ineq
) == LT_EXPR
)
7530 a1
= TREE_OPERAND (ineq
, 1);
7531 y
= TREE_OPERAND (ineq
, 0);
7533 else if (TREE_CODE (ineq
) == GT_EXPR
)
7535 a1
= TREE_OPERAND (ineq
, 0);
7536 y
= TREE_OPERAND (ineq
, 1);
7541 if (TREE_TYPE (a1
) != typea
)
7544 if (POINTER_TYPE_P (typea
))
7546 /* Convert the pointer types into integer before taking the difference. */
7547 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7548 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7549 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7552 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7554 if (!diff
|| !integer_onep (diff
))
7557 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7560 /* Fold a sum or difference of at least one multiplication.
7561 Returns the folded tree or NULL if no simplification could be made. */
7564 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7565 tree arg0
, tree arg1
)
7567 tree arg00
, arg01
, arg10
, arg11
;
7568 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7570 /* (A * C) +- (B * C) -> (A+-B) * C.
7571 (A * C) +- A -> A * (C+-1).
7572 We are most concerned about the case where C is a constant,
7573 but other combinations show up during loop reduction. Since
7574 it is not difficult, try all four possibilities. */
7576 if (TREE_CODE (arg0
) == MULT_EXPR
)
7578 arg00
= TREE_OPERAND (arg0
, 0);
7579 arg01
= TREE_OPERAND (arg0
, 1);
7581 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7583 arg00
= build_one_cst (type
);
7588 /* We cannot generate constant 1 for fract. */
7589 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7592 arg01
= build_one_cst (type
);
7594 if (TREE_CODE (arg1
) == MULT_EXPR
)
7596 arg10
= TREE_OPERAND (arg1
, 0);
7597 arg11
= TREE_OPERAND (arg1
, 1);
7599 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7601 arg10
= build_one_cst (type
);
7602 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7603 the purpose of this canonicalization. */
7604 if (wi::neg_p (wi::to_wide (arg1
), TYPE_SIGN (TREE_TYPE (arg1
)))
7605 && negate_expr_p (arg1
)
7606 && code
== PLUS_EXPR
)
7608 arg11
= negate_expr (arg1
);
7616 /* We cannot generate constant 1 for fract. */
7617 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7620 arg11
= build_one_cst (type
);
7624 /* Prefer factoring a common non-constant. */
7625 if (operand_equal_p (arg00
, arg10
, 0))
7626 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7627 else if (operand_equal_p (arg01
, arg11
, 0))
7628 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7629 else if (operand_equal_p (arg00
, arg11
, 0))
7630 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7631 else if (operand_equal_p (arg01
, arg10
, 0))
7632 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7634 /* No identical multiplicands; see if we can find a common
7635 power-of-two factor in non-power-of-two multiplies. This
7636 can help in multi-dimensional array access. */
7637 else if (tree_fits_shwi_p (arg01
) && tree_fits_shwi_p (arg11
))
7639 HOST_WIDE_INT int01
= tree_to_shwi (arg01
);
7640 HOST_WIDE_INT int11
= tree_to_shwi (arg11
);
7645 /* Move min of absolute values to int11. */
7646 if (absu_hwi (int01
) < absu_hwi (int11
))
7648 tmp
= int01
, int01
= int11
, int11
= tmp
;
7649 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7656 const unsigned HOST_WIDE_INT factor
= absu_hwi (int11
);
7658 && pow2p_hwi (factor
)
7659 && (int01
& (factor
- 1)) == 0
7660 /* The remainder should not be a constant, otherwise we
7661 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7662 increased the number of multiplications necessary. */
7663 && TREE_CODE (arg10
) != INTEGER_CST
)
7665 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7666 build_int_cst (TREE_TYPE (arg00
),
7671 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7678 if (! ANY_INTEGRAL_TYPE_P (type
)
7679 || TYPE_OVERFLOW_WRAPS (type
)
7680 /* We are neither factoring zero nor minus one. */
7681 || TREE_CODE (same
) == INTEGER_CST
)
7682 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7683 fold_build2_loc (loc
, code
, type
,
7684 fold_convert_loc (loc
, type
, alt0
),
7685 fold_convert_loc (loc
, type
, alt1
)),
7686 fold_convert_loc (loc
, type
, same
));
7688 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7689 same may be minus one and thus the multiplication may overflow. Perform
7690 the sum operation in an unsigned type. */
7691 tree utype
= unsigned_type_for (type
);
7692 tree tem
= fold_build2_loc (loc
, code
, utype
,
7693 fold_convert_loc (loc
, utype
, alt0
),
7694 fold_convert_loc (loc
, utype
, alt1
));
7695 /* If the sum evaluated to a constant that is not -INF the multiplication
7697 if (TREE_CODE (tem
) == INTEGER_CST
7698 && (wi::to_wide (tem
)
7699 != wi::min_value (TYPE_PRECISION (utype
), SIGNED
)))
7700 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7701 fold_convert (type
, tem
), same
);
7703 /* Do not resort to unsigned multiplication because
7704 we lose the no-overflow property of the expression. */
7708 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7709 specified by EXPR into the buffer PTR of length LEN bytes.
7710 Return the number of bytes placed in the buffer, or zero
7714 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7716 tree type
= TREE_TYPE (expr
);
7717 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
7718 int byte
, offset
, word
, words
;
7719 unsigned char value
;
7721 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7728 return MIN (len
, total_bytes
- off
);
7730 words
= total_bytes
/ UNITS_PER_WORD
;
7732 for (byte
= 0; byte
< total_bytes
; byte
++)
7734 int bitpos
= byte
* BITS_PER_UNIT
;
7735 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7737 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7739 if (total_bytes
> UNITS_PER_WORD
)
7741 word
= byte
/ UNITS_PER_WORD
;
7742 if (WORDS_BIG_ENDIAN
)
7743 word
= (words
- 1) - word
;
7744 offset
= word
* UNITS_PER_WORD
;
7745 if (BYTES_BIG_ENDIAN
)
7746 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7748 offset
+= byte
% UNITS_PER_WORD
;
7751 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7752 if (offset
>= off
&& offset
- off
< len
)
7753 ptr
[offset
- off
] = value
;
7755 return MIN (len
, total_bytes
- off
);
7759 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7760 specified by EXPR into the buffer PTR of length LEN bytes.
7761 Return the number of bytes placed in the buffer, or zero
7765 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7767 tree type
= TREE_TYPE (expr
);
7768 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
7769 int total_bytes
= GET_MODE_SIZE (mode
);
7770 FIXED_VALUE_TYPE value
;
7771 tree i_value
, i_type
;
7773 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7776 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7778 if (NULL_TREE
== i_type
|| TYPE_PRECISION (i_type
) != total_bytes
)
7781 value
= TREE_FIXED_CST (expr
);
7782 i_value
= double_int_to_tree (i_type
, value
.data
);
7784 return native_encode_int (i_value
, ptr
, len
, off
);
7788 /* Subroutine of native_encode_expr. Encode the REAL_CST
7789 specified by EXPR into the buffer PTR of length LEN bytes.
7790 Return the number of bytes placed in the buffer, or zero
7794 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7796 tree type
= TREE_TYPE (expr
);
7797 int total_bytes
= GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type
));
7798 int byte
, offset
, word
, words
, bitpos
;
7799 unsigned char value
;
7801 /* There are always 32 bits in each long, no matter the size of
7802 the hosts long. We handle floating point representations with
7806 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7813 return MIN (len
, total_bytes
- off
);
7815 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7817 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7819 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7820 bitpos
+= BITS_PER_UNIT
)
7822 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7823 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7825 if (UNITS_PER_WORD
< 4)
7827 word
= byte
/ UNITS_PER_WORD
;
7828 if (WORDS_BIG_ENDIAN
)
7829 word
= (words
- 1) - word
;
7830 offset
= word
* UNITS_PER_WORD
;
7831 if (BYTES_BIG_ENDIAN
)
7832 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7834 offset
+= byte
% UNITS_PER_WORD
;
7839 if (BYTES_BIG_ENDIAN
)
7841 /* Reverse bytes within each long, or within the entire float
7842 if it's smaller than a long (for HFmode). */
7843 offset
= MIN (3, total_bytes
- 1) - offset
;
7844 gcc_assert (offset
>= 0);
7847 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7849 && offset
- off
< len
)
7850 ptr
[offset
- off
] = value
;
7852 return MIN (len
, total_bytes
- off
);
7855 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7856 specified by EXPR into the buffer PTR of length LEN bytes.
7857 Return the number of bytes placed in the buffer, or zero
7861 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7866 part
= TREE_REALPART (expr
);
7867 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7868 if (off
== -1 && rsize
== 0)
7870 part
= TREE_IMAGPART (expr
);
7872 off
= MAX (0, off
- GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part
))));
7873 isize
= native_encode_expr (part
, ptr
? ptr
+ rsize
: NULL
,
7875 if (off
== -1 && isize
!= rsize
)
7877 return rsize
+ isize
;
7880 /* Like native_encode_vector, but only encode the first COUNT elements.
7881 The other arguments are as for native_encode_vector. */
7884 native_encode_vector_part (const_tree expr
, unsigned char *ptr
, int len
,
7885 int off
, unsigned HOST_WIDE_INT count
)
7887 tree itype
= TREE_TYPE (TREE_TYPE (expr
));
7888 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr
))
7889 && TYPE_PRECISION (itype
) <= BITS_PER_UNIT
)
7891 /* This is the only case in which elements can be smaller than a byte.
7892 Element 0 is always in the lsb of the containing byte. */
7893 unsigned int elt_bits
= TYPE_PRECISION (itype
);
7894 int total_bytes
= CEIL (elt_bits
* count
, BITS_PER_UNIT
);
7895 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7901 /* Zero the buffer and then set bits later where necessary. */
7902 int extract_bytes
= MIN (len
, total_bytes
- off
);
7904 memset (ptr
, 0, extract_bytes
);
7906 unsigned int elts_per_byte
= BITS_PER_UNIT
/ elt_bits
;
7907 unsigned int first_elt
= off
* elts_per_byte
;
7908 unsigned int extract_elts
= extract_bytes
* elts_per_byte
;
7909 for (unsigned int i
= 0; i
< extract_elts
; ++i
)
7911 tree elt
= VECTOR_CST_ELT (expr
, first_elt
+ i
);
7912 if (TREE_CODE (elt
) != INTEGER_CST
)
7915 if (ptr
&& wi::extract_uhwi (wi::to_wide (elt
), 0, 1))
7917 unsigned int bit
= i
* elt_bits
;
7918 ptr
[bit
/ BITS_PER_UNIT
] |= 1 << (bit
% BITS_PER_UNIT
);
7921 return extract_bytes
;
7925 int size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (itype
));
7926 for (unsigned HOST_WIDE_INT i
= 0; i
< count
; i
++)
7933 tree elem
= VECTOR_CST_ELT (expr
, i
);
7934 int res
= native_encode_expr (elem
, ptr
? ptr
+ offset
: NULL
,
7936 if ((off
== -1 && res
!= size
) || res
== 0)
7940 return (off
== -1 && i
< count
- 1) ? 0 : offset
;
7947 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7948 specified by EXPR into the buffer PTR of length LEN bytes.
7949 Return the number of bytes placed in the buffer, or zero
7953 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7955 unsigned HOST_WIDE_INT count
;
7956 if (!VECTOR_CST_NELTS (expr
).is_constant (&count
))
7958 return native_encode_vector_part (expr
, ptr
, len
, off
, count
);
7962 /* Subroutine of native_encode_expr. Encode the STRING_CST
7963 specified by EXPR into the buffer PTR of length LEN bytes.
7964 Return the number of bytes placed in the buffer, or zero
7968 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7970 tree type
= TREE_TYPE (expr
);
7972 /* Wide-char strings are encoded in target byte-order so native
7973 encoding them is trivial. */
7974 if (BITS_PER_UNIT
!= CHAR_BIT
7975 || TREE_CODE (type
) != ARRAY_TYPE
7976 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7977 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7980 HOST_WIDE_INT total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
7981 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7985 len
= MIN (total_bytes
- off
, len
);
7991 if (off
< TREE_STRING_LENGTH (expr
))
7993 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7994 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7996 memset (ptr
+ written
, 0, len
- written
);
8002 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8003 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8004 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8005 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8006 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8007 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8008 Return the number of bytes placed in the buffer, or zero upon failure. */
8011 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
8013 /* We don't support starting at negative offset and -1 is special. */
8017 switch (TREE_CODE (expr
))
8020 return native_encode_int (expr
, ptr
, len
, off
);
8023 return native_encode_real (expr
, ptr
, len
, off
);
8026 return native_encode_fixed (expr
, ptr
, len
, off
);
8029 return native_encode_complex (expr
, ptr
, len
, off
);
8032 return native_encode_vector (expr
, ptr
, len
, off
);
8035 return native_encode_string (expr
, ptr
, len
, off
);
8042 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8043 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8044 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8045 machine modes, we can't just use build_nonstandard_integer_type. */
8048 find_bitfield_repr_type (int fieldsize
, int len
)
8051 for (int pass
= 0; pass
< 2; pass
++)
8053 enum mode_class mclass
= pass
? MODE_PARTIAL_INT
: MODE_INT
;
8054 FOR_EACH_MODE_IN_CLASS (mode
, mclass
)
8055 if (known_ge (GET_MODE_SIZE (mode
), fieldsize
)
8056 && known_eq (GET_MODE_PRECISION (mode
),
8057 GET_MODE_BITSIZE (mode
))
8058 && known_le (GET_MODE_SIZE (mode
), len
))
8060 tree ret
= lang_hooks
.types
.type_for_mode (mode
, 1);
8061 if (ret
&& TYPE_MODE (ret
) == mode
)
8066 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
8067 if (int_n_enabled_p
[i
]
8068 && int_n_data
[i
].bitsize
>= (unsigned) (BITS_PER_UNIT
* fieldsize
)
8069 && int_n_trees
[i
].unsigned_type
)
8071 tree ret
= int_n_trees
[i
].unsigned_type
;
8072 mode
= TYPE_MODE (ret
);
8073 if (known_ge (GET_MODE_SIZE (mode
), fieldsize
)
8074 && known_eq (GET_MODE_PRECISION (mode
),
8075 GET_MODE_BITSIZE (mode
))
8076 && known_le (GET_MODE_SIZE (mode
), len
))
8083 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8084 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8085 to be non-NULL and OFF zero), then in addition to filling the
8086 bytes pointed by PTR with the value also clear any bits pointed
8087 by MASK that are known to be initialized, keep them as is for
8088 e.g. uninitialized padding bits or uninitialized fields. */
8091 native_encode_initializer (tree init
, unsigned char *ptr
, int len
,
8092 int off
, unsigned char *mask
)
8096 /* We don't support starting at negative offset and -1 is special. */
8097 if (off
< -1 || init
== NULL_TREE
)
8100 gcc_assert (mask
== NULL
|| (off
== 0 && ptr
));
8103 switch (TREE_CODE (init
))
8105 case VIEW_CONVERT_EXPR
:
8106 case NON_LVALUE_EXPR
:
8107 return native_encode_initializer (TREE_OPERAND (init
, 0), ptr
, len
, off
,
8110 r
= native_encode_expr (init
, ptr
, len
, off
);
8112 memset (mask
, 0, r
);
8115 tree type
= TREE_TYPE (init
);
8116 HOST_WIDE_INT total_bytes
= int_size_in_bytes (type
);
8117 if (total_bytes
< 0)
8119 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
8121 int o
= off
== -1 ? 0 : off
;
8122 if (TREE_CODE (type
) == ARRAY_TYPE
)
8125 unsigned HOST_WIDE_INT cnt
;
8126 HOST_WIDE_INT curpos
= 0, fieldsize
, valueinit
= -1;
8127 constructor_elt
*ce
;
8129 if (!TYPE_DOMAIN (type
)
8130 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))) != INTEGER_CST
)
8133 fieldsize
= int_size_in_bytes (TREE_TYPE (type
));
8137 min_index
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
8139 memset (ptr
, '\0', MIN (total_bytes
- off
, len
));
8141 for (cnt
= 0; ; cnt
++)
8143 tree val
= NULL_TREE
, index
= NULL_TREE
;
8144 HOST_WIDE_INT pos
= curpos
, count
= 0;
8146 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init
), cnt
, &ce
))
8151 else if (mask
== NULL
8152 || CONSTRUCTOR_NO_CLEARING (init
)
8153 || curpos
>= total_bytes
)
8158 if (index
&& TREE_CODE (index
) == RANGE_EXPR
)
8160 if (TREE_CODE (TREE_OPERAND (index
, 0)) != INTEGER_CST
8161 || TREE_CODE (TREE_OPERAND (index
, 1)) != INTEGER_CST
)
8164 = wi::sext (wi::to_offset (TREE_OPERAND (index
, 0))
8165 - wi::to_offset (min_index
),
8166 TYPE_PRECISION (sizetype
));
8168 if (!wi::fits_shwi_p (pos
))
8170 pos
= wpos
.to_shwi ();
8172 = wi::sext (wi::to_offset (TREE_OPERAND (index
, 1))
8173 - wi::to_offset (TREE_OPERAND (index
, 0)),
8174 TYPE_PRECISION (sizetype
));
8175 if (!wi::fits_shwi_p (wcount
))
8177 count
= wcount
.to_shwi ();
8181 if (TREE_CODE (index
) != INTEGER_CST
)
8184 = wi::sext (wi::to_offset (index
)
8185 - wi::to_offset (min_index
),
8186 TYPE_PRECISION (sizetype
));
8188 if (!wi::fits_shwi_p (wpos
))
8190 pos
= wpos
.to_shwi ();
8193 if (mask
&& !CONSTRUCTOR_NO_CLEARING (init
) && curpos
!= pos
)
8195 if (valueinit
== -1)
8197 tree zero
= build_zero_cst (TREE_TYPE (type
));
8198 r
= native_encode_initializer (zero
, ptr
+ curpos
,
8201 if (TREE_CODE (zero
) == CONSTRUCTOR
)
8206 curpos
+= fieldsize
;
8208 while (curpos
!= pos
)
8210 memcpy (ptr
+ curpos
, ptr
+ valueinit
, fieldsize
);
8211 memcpy (mask
+ curpos
, mask
+ valueinit
, fieldsize
);
8212 curpos
+= fieldsize
;
8222 && (curpos
+ fieldsize
8223 <= (HOST_WIDE_INT
) off
+ len
)))
8228 memcpy (ptr
+ (curpos
- o
), ptr
+ (pos
- o
),
8231 memcpy (mask
+ curpos
, mask
+ pos
, fieldsize
);
8233 else if (!native_encode_initializer (val
,
8250 else if (curpos
+ fieldsize
> off
8251 && curpos
< (HOST_WIDE_INT
) off
+ len
)
8253 /* Partial overlap. */
8254 unsigned char *p
= NULL
;
8257 gcc_assert (mask
== NULL
);
8261 p
= ptr
+ curpos
- off
;
8262 l
= MIN ((HOST_WIDE_INT
) off
+ len
- curpos
,
8271 if (!native_encode_initializer (val
, p
, l
, no
, NULL
))
8274 curpos
+= fieldsize
;
8276 while (count
-- != 0);
8278 return MIN (total_bytes
- off
, len
);
8280 else if (TREE_CODE (type
) == RECORD_TYPE
8281 || TREE_CODE (type
) == UNION_TYPE
)
8283 unsigned HOST_WIDE_INT cnt
;
8284 constructor_elt
*ce
;
8285 tree fld_base
= TYPE_FIELDS (type
);
8286 tree to_free
= NULL_TREE
;
8288 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| mask
== NULL
);
8290 memset (ptr
, '\0', MIN (total_bytes
- o
, len
));
8291 for (cnt
= 0; ; cnt
++)
8293 tree val
= NULL_TREE
, field
= NULL_TREE
;
8294 HOST_WIDE_INT pos
= 0, fieldsize
;
8295 unsigned HOST_WIDE_INT bpos
= 0, epos
= 0;
8300 to_free
= NULL_TREE
;
8303 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init
), cnt
, &ce
))
8307 if (field
== NULL_TREE
)
8310 pos
= int_byte_position (field
);
8311 if (off
!= -1 && (HOST_WIDE_INT
) off
+ len
<= pos
)
8314 else if (mask
== NULL
8315 || CONSTRUCTOR_NO_CLEARING (init
))
8320 if (mask
&& !CONSTRUCTOR_NO_CLEARING (init
))
8323 for (fld
= fld_base
; fld
; fld
= DECL_CHAIN (fld
))
8325 if (TREE_CODE (fld
) != FIELD_DECL
)
8329 if (DECL_PADDING_P (fld
))
8331 if (DECL_SIZE_UNIT (fld
) == NULL_TREE
8332 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld
)))
8334 if (integer_zerop (DECL_SIZE_UNIT (fld
)))
8338 if (fld
== NULL_TREE
)
8344 fld_base
= DECL_CHAIN (fld
);
8349 pos
= int_byte_position (field
);
8350 val
= build_zero_cst (TREE_TYPE (fld
));
8351 if (TREE_CODE (val
) == CONSTRUCTOR
)
8356 if (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
8357 && TYPE_DOMAIN (TREE_TYPE (field
))
8358 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field
))))
8360 if (mask
|| off
!= -1)
8362 if (val
== NULL_TREE
)
8364 if (TREE_CODE (TREE_TYPE (val
)) != ARRAY_TYPE
)
8366 fieldsize
= int_size_in_bytes (TREE_TYPE (val
));
8368 || (int) fieldsize
!= fieldsize
8369 || (pos
+ fieldsize
) > INT_MAX
)
8371 if (pos
+ fieldsize
> total_bytes
)
8373 if (ptr
!= NULL
&& total_bytes
< len
)
8374 memset (ptr
+ total_bytes
, '\0',
8375 MIN (pos
+ fieldsize
, len
) - total_bytes
);
8376 total_bytes
= pos
+ fieldsize
;
8381 if (DECL_SIZE_UNIT (field
) == NULL_TREE
8382 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field
)))
8384 fieldsize
= tree_to_shwi (DECL_SIZE_UNIT (field
));
8389 /* Prepare to deal with integral bit-fields and filter out other
8390 bit-fields that do not start and end on a byte boundary. */
8391 if (DECL_BIT_FIELD (field
))
8393 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field
)))
8395 bpos
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
8396 if (INTEGRAL_TYPE_P (TREE_TYPE (field
)))
8398 bpos
%= BITS_PER_UNIT
;
8399 fieldsize
= TYPE_PRECISION (TREE_TYPE (field
)) + bpos
;
8400 epos
= fieldsize
% BITS_PER_UNIT
;
8401 fieldsize
+= BITS_PER_UNIT
- 1;
8402 fieldsize
/= BITS_PER_UNIT
;
8404 else if (bpos
% BITS_PER_UNIT
8405 || DECL_SIZE (field
) == NULL_TREE
8406 || !tree_fits_shwi_p (DECL_SIZE (field
))
8407 || tree_to_shwi (DECL_SIZE (field
)) % BITS_PER_UNIT
)
8411 if (off
!= -1 && pos
+ fieldsize
<= off
)
8414 if (val
== NULL_TREE
)
8417 if (DECL_BIT_FIELD (field
)
8418 && INTEGRAL_TYPE_P (TREE_TYPE (field
)))
8420 /* FIXME: Handle PDP endian. */
8421 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
8424 if (TREE_CODE (val
) != INTEGER_CST
)
8427 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8428 tree repr_type
= NULL_TREE
;
8429 HOST_WIDE_INT rpos
= 0;
8430 if (repr
&& INTEGRAL_TYPE_P (TREE_TYPE (repr
)))
8432 rpos
= int_byte_position (repr
);
8433 repr_type
= TREE_TYPE (repr
);
8437 repr_type
= find_bitfield_repr_type (fieldsize
, len
);
8438 if (repr_type
== NULL_TREE
)
8440 HOST_WIDE_INT repr_size
= int_size_in_bytes (repr_type
);
8441 gcc_assert (repr_size
> 0 && repr_size
<= len
);
8442 if (pos
+ repr_size
<= o
+ len
)
8446 rpos
= o
+ len
- repr_size
;
8447 gcc_assert (rpos
<= pos
);
8453 wide_int w
= wi::to_wide (val
, TYPE_PRECISION (repr_type
));
8454 int diff
= (TYPE_PRECISION (repr_type
)
8455 - TYPE_PRECISION (TREE_TYPE (field
)));
8456 HOST_WIDE_INT bitoff
= (pos
- rpos
) * BITS_PER_UNIT
+ bpos
;
8457 if (!BYTES_BIG_ENDIAN
)
8458 w
= wi::lshift (w
, bitoff
);
8460 w
= wi::lshift (w
, diff
- bitoff
);
8461 val
= wide_int_to_tree (repr_type
, w
);
8463 unsigned char buf
[MAX_BITSIZE_MODE_ANY_INT
8464 / BITS_PER_UNIT
+ 1];
8465 int l
= native_encode_int (val
, buf
, sizeof buf
, 0);
8466 if (l
* BITS_PER_UNIT
!= TYPE_PRECISION (repr_type
))
8472 /* If the bitfield does not start at byte boundary, handle
8473 the partial byte at the start. */
8475 && (off
== -1 || (pos
>= off
&& len
>= 1)))
8477 if (!BYTES_BIG_ENDIAN
)
8479 int msk
= (1 << bpos
) - 1;
8480 buf
[pos
- rpos
] &= ~msk
;
8481 buf
[pos
- rpos
] |= ptr
[pos
- o
] & msk
;
8484 if (fieldsize
> 1 || epos
== 0)
8487 mask
[pos
] &= (msk
| ~((1 << epos
) - 1));
8492 int msk
= (1 << (BITS_PER_UNIT
- bpos
)) - 1;
8493 buf
[pos
- rpos
] &= msk
;
8494 buf
[pos
- rpos
] |= ptr
[pos
- o
] & ~msk
;
8497 if (fieldsize
> 1 || epos
== 0)
8501 | ((1 << (BITS_PER_UNIT
- epos
))
8506 /* If the bitfield does not end at byte boundary, handle
8507 the partial byte at the end. */
8510 || pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
))
8512 if (!BYTES_BIG_ENDIAN
)
8514 int msk
= (1 << epos
) - 1;
8515 buf
[pos
- rpos
+ fieldsize
- 1] &= msk
;
8516 buf
[pos
- rpos
+ fieldsize
- 1]
8517 |= ptr
[pos
+ fieldsize
- 1 - o
] & ~msk
;
8518 if (mask
&& (fieldsize
> 1 || bpos
== 0))
8519 mask
[pos
+ fieldsize
- 1] &= ~msk
;
8523 int msk
= (1 << (BITS_PER_UNIT
- epos
)) - 1;
8524 buf
[pos
- rpos
+ fieldsize
- 1] &= ~msk
;
8525 buf
[pos
- rpos
+ fieldsize
- 1]
8526 |= ptr
[pos
+ fieldsize
- 1 - o
] & msk
;
8527 if (mask
&& (fieldsize
> 1 || bpos
== 0))
8528 mask
[pos
+ fieldsize
- 1] &= msk
;
8533 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8535 memcpy (ptr
+ pos
- o
, buf
+ (pos
- rpos
), fieldsize
);
8536 if (mask
&& (fieldsize
> (bpos
!= 0) + (epos
!= 0)))
8537 memset (mask
+ pos
+ (bpos
!= 0), 0,
8538 fieldsize
- (bpos
!= 0) - (epos
!= 0));
8542 /* Partial overlap. */
8543 HOST_WIDE_INT fsz
= fieldsize
;
8544 gcc_assert (mask
== NULL
);
8550 if (pos
+ fsz
> (HOST_WIDE_INT
) off
+ len
)
8551 fsz
= (HOST_WIDE_INT
) off
+ len
- pos
;
8552 memcpy (ptr
+ pos
- off
, buf
+ (pos
- rpos
), fsz
);
8559 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8561 int fldsize
= fieldsize
;
8564 tree fld
= DECL_CHAIN (field
);
8567 if (TREE_CODE (fld
) == FIELD_DECL
)
8569 fld
= DECL_CHAIN (fld
);
8571 if (fld
== NULL_TREE
)
8572 fldsize
= len
- pos
;
8574 r
= native_encode_initializer (val
, ptr
? ptr
+ pos
- o
8578 mask
? mask
+ pos
: NULL
);
8582 && fldsize
!= fieldsize
8584 && pos
+ r
> total_bytes
)
8585 total_bytes
= pos
+ r
;
8589 /* Partial overlap. */
8590 unsigned char *p
= NULL
;
8593 gcc_assert (mask
== NULL
);
8597 p
= ptr
+ pos
- off
;
8598 l
= MIN ((HOST_WIDE_INT
) off
+ len
- pos
,
8607 if (!native_encode_initializer (val
, p
, l
, no
, NULL
))
8611 return MIN (total_bytes
- off
, len
);
8618 /* Subroutine of native_interpret_expr. Interpret the contents of
8619 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8620 If the buffer cannot be interpreted, return NULL_TREE. */
8623 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
8625 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
8627 if (total_bytes
> len
8628 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8631 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
8633 return wide_int_to_tree (type
, result
);
8637 /* Subroutine of native_interpret_expr. Interpret the contents of
8638 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8639 If the buffer cannot be interpreted, return NULL_TREE. */
8642 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
8644 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
8645 int total_bytes
= GET_MODE_SIZE (mode
);
8647 FIXED_VALUE_TYPE fixed_value
;
8649 if (total_bytes
> len
8650 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8653 result
= double_int::from_buffer (ptr
, total_bytes
);
8654 fixed_value
= fixed_from_double_int (result
, mode
);
8656 return build_fixed (type
, fixed_value
);
8660 /* Subroutine of native_interpret_expr. Interpret the contents of
8661 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8662 If the buffer cannot be interpreted, return NULL_TREE. */
8665 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
8667 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
8668 int total_bytes
= GET_MODE_SIZE (mode
);
8669 unsigned char value
;
8670 /* There are always 32 bits in each long, no matter the size of
8671 the hosts long. We handle floating point representations with
8676 if (total_bytes
> len
|| total_bytes
> 24)
8678 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
8680 memset (tmp
, 0, sizeof (tmp
));
8681 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
8682 bitpos
+= BITS_PER_UNIT
)
8684 /* Both OFFSET and BYTE index within a long;
8685 bitpos indexes the whole float. */
8686 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
8687 if (UNITS_PER_WORD
< 4)
8689 int word
= byte
/ UNITS_PER_WORD
;
8690 if (WORDS_BIG_ENDIAN
)
8691 word
= (words
- 1) - word
;
8692 offset
= word
* UNITS_PER_WORD
;
8693 if (BYTES_BIG_ENDIAN
)
8694 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8696 offset
+= byte
% UNITS_PER_WORD
;
8701 if (BYTES_BIG_ENDIAN
)
8703 /* Reverse bytes within each long, or within the entire float
8704 if it's smaller than a long (for HFmode). */
8705 offset
= MIN (3, total_bytes
- 1) - offset
;
8706 gcc_assert (offset
>= 0);
8709 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
8711 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
8714 real_from_target (&r
, tmp
, mode
);
8715 return build_real (type
, r
);
8719 /* Subroutine of native_interpret_expr. Interpret the contents of
8720 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8721 If the buffer cannot be interpreted, return NULL_TREE. */
8724 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
8726 tree etype
, rpart
, ipart
;
8729 etype
= TREE_TYPE (type
);
8730 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8733 rpart
= native_interpret_expr (etype
, ptr
, size
);
8736 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
8739 return build_complex (type
, rpart
, ipart
);
8742 /* Read a vector of type TYPE from the target memory image given by BYTES,
8743 which contains LEN bytes. The vector is known to be encodable using
8744 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8746 Return the vector on success, otherwise return null. */
8749 native_interpret_vector_part (tree type
, const unsigned char *bytes
,
8750 unsigned int len
, unsigned int npatterns
,
8751 unsigned int nelts_per_pattern
)
8753 tree elt_type
= TREE_TYPE (type
);
8754 if (VECTOR_BOOLEAN_TYPE_P (type
)
8755 && TYPE_PRECISION (elt_type
) <= BITS_PER_UNIT
)
8757 /* This is the only case in which elements can be smaller than a byte.
8758 Element 0 is always in the lsb of the containing byte. */
8759 unsigned int elt_bits
= TYPE_PRECISION (elt_type
);
8760 if (elt_bits
* npatterns
* nelts_per_pattern
> len
* BITS_PER_UNIT
)
8763 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8764 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8766 unsigned int bit_index
= i
* elt_bits
;
8767 unsigned int byte_index
= bit_index
/ BITS_PER_UNIT
;
8768 unsigned int lsb
= bit_index
% BITS_PER_UNIT
;
8769 builder
.quick_push (bytes
[byte_index
] & (1 << lsb
)
8770 ? build_all_ones_cst (elt_type
)
8771 : build_zero_cst (elt_type
));
8773 return builder
.build ();
8776 unsigned int elt_bytes
= tree_to_uhwi (TYPE_SIZE_UNIT (elt_type
));
8777 if (elt_bytes
* npatterns
* nelts_per_pattern
> len
)
8780 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8781 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8783 tree elt
= native_interpret_expr (elt_type
, bytes
, elt_bytes
);
8786 builder
.quick_push (elt
);
8789 return builder
.build ();
8792 /* Subroutine of native_interpret_expr. Interpret the contents of
8793 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8794 If the buffer cannot be interpreted, return NULL_TREE. */
8797 native_interpret_vector (tree type
, const unsigned char *ptr
, unsigned int len
)
8799 unsigned HOST_WIDE_INT size
;
8801 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type
)).is_constant (&size
)
8805 unsigned HOST_WIDE_INT count
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8806 return native_interpret_vector_part (type
, ptr
, len
, count
, 1);
8810 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8811 the buffer PTR of length LEN as a constant of type TYPE. For
8812 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8813 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8814 return NULL_TREE. */
8817 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
8819 switch (TREE_CODE (type
))
8825 case REFERENCE_TYPE
:
8827 return native_interpret_int (type
, ptr
, len
);
8830 if (tree ret
= native_interpret_real (type
, ptr
, len
))
8832 /* For floating point values in composite modes, punt if this
8833 folding doesn't preserve bit representation. As the mode doesn't
8834 have fixed precision while GCC pretends it does, there could be
8835 valid values that GCC can't really represent accurately.
8836 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8837 bit combinationations which GCC doesn't preserve. */
8838 unsigned char buf
[24 * 2];
8839 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
8840 int total_bytes
= GET_MODE_SIZE (mode
);
8841 memcpy (buf
+ 24, ptr
, total_bytes
);
8842 clear_type_padding_in_mask (type
, buf
+ 24);
8843 if (native_encode_expr (ret
, buf
, total_bytes
, 0) != total_bytes
8844 || memcmp (buf
+ 24, buf
, total_bytes
) != 0)
8850 case FIXED_POINT_TYPE
:
8851 return native_interpret_fixed (type
, ptr
, len
);
8854 return native_interpret_complex (type
, ptr
, len
);
8857 return native_interpret_vector (type
, ptr
, len
);
8864 /* Returns true if we can interpret the contents of a native encoding
8868 can_native_interpret_type_p (tree type
)
8870 switch (TREE_CODE (type
))
8876 case REFERENCE_TYPE
:
8877 case FIXED_POINT_TYPE
:
8888 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8889 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8892 native_interpret_aggregate (tree type
, const unsigned char *ptr
, int off
,
8895 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
8896 if (TREE_CODE (type
) == ARRAY_TYPE
)
8898 HOST_WIDE_INT eltsz
= int_size_in_bytes (TREE_TYPE (type
));
8899 if (eltsz
< 0 || eltsz
> len
|| TYPE_DOMAIN (type
) == NULL_TREE
)
8902 HOST_WIDE_INT cnt
= 0;
8903 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)))
8905 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
8907 cnt
= tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))) + 1;
8911 HOST_WIDE_INT pos
= 0;
8912 for (HOST_WIDE_INT i
= 0; i
< cnt
; i
++, pos
+= eltsz
)
8915 if (pos
>= len
|| pos
+ eltsz
> len
)
8917 if (can_native_interpret_type_p (TREE_TYPE (type
)))
8919 v
= native_interpret_expr (TREE_TYPE (type
),
8920 ptr
+ off
+ pos
, eltsz
);
8924 else if (TREE_CODE (TREE_TYPE (type
)) == RECORD_TYPE
8925 || TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
8926 v
= native_interpret_aggregate (TREE_TYPE (type
), ptr
, off
+ pos
,
8930 CONSTRUCTOR_APPEND_ELT (elts
, size_int (i
), v
);
8932 return build_constructor (type
, elts
);
8934 if (TREE_CODE (type
) != RECORD_TYPE
)
8936 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
8938 if (TREE_CODE (field
) != FIELD_DECL
|| DECL_PADDING_P (field
))
8941 HOST_WIDE_INT bitoff
= 0, pos
= 0, sz
= 0;
8944 if (DECL_BIT_FIELD (field
))
8946 fld
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8947 if (fld
&& INTEGRAL_TYPE_P (TREE_TYPE (fld
)))
8949 poly_int64 bitoffset
;
8950 poly_uint64 field_offset
, fld_offset
;
8951 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8952 && poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &fld_offset
))
8953 bitoffset
= (field_offset
- fld_offset
) * BITS_PER_UNIT
;
8956 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8957 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)));
8958 diff
= (TYPE_PRECISION (TREE_TYPE (fld
))
8959 - TYPE_PRECISION (TREE_TYPE (field
)));
8960 if (!bitoffset
.is_constant (&bitoff
)
8967 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field
)))
8969 int fieldsize
= TYPE_PRECISION (TREE_TYPE (field
));
8970 int bpos
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
8971 bpos
%= BITS_PER_UNIT
;
8973 fieldsize
+= BITS_PER_UNIT
- 1;
8974 fieldsize
/= BITS_PER_UNIT
;
8975 tree repr_type
= find_bitfield_repr_type (fieldsize
, len
);
8976 if (repr_type
== NULL_TREE
)
8978 sz
= int_size_in_bytes (repr_type
);
8979 if (sz
< 0 || sz
> len
)
8981 pos
= int_byte_position (field
);
8982 if (pos
< 0 || pos
> len
|| pos
+ fieldsize
> len
)
8985 if (pos
+ sz
<= len
)
8990 gcc_assert (rpos
<= pos
);
8992 bitoff
= (HOST_WIDE_INT
) (pos
- rpos
) * BITS_PER_UNIT
+ bpos
;
8994 diff
= (TYPE_PRECISION (repr_type
)
8995 - TYPE_PRECISION (TREE_TYPE (field
)));
8996 v
= native_interpret_expr (repr_type
, ptr
+ off
+ pos
, sz
);
9005 sz
= int_size_in_bytes (TREE_TYPE (fld
));
9006 if (sz
< 0 || sz
> len
)
9008 tree byte_pos
= byte_position (fld
);
9009 if (!tree_fits_shwi_p (byte_pos
))
9011 pos
= tree_to_shwi (byte_pos
);
9012 if (pos
< 0 || pos
> len
|| pos
+ sz
> len
)
9015 if (fld
== NULL_TREE
)
9016 /* Already handled above. */;
9017 else if (can_native_interpret_type_p (TREE_TYPE (fld
)))
9019 v
= native_interpret_expr (TREE_TYPE (fld
),
9020 ptr
+ off
+ pos
, sz
);
9024 else if (TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
9025 || TREE_CODE (TREE_TYPE (fld
)) == ARRAY_TYPE
)
9026 v
= native_interpret_aggregate (TREE_TYPE (fld
), ptr
, off
+ pos
, sz
);
9031 if (TREE_CODE (v
) != INTEGER_CST
)
9034 /* FIXME: Figure out how to handle PDP endian bitfields. */
9035 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
9037 if (!BYTES_BIG_ENDIAN
)
9038 v
= wide_int_to_tree (TREE_TYPE (field
),
9039 wi::lrshift (wi::to_wide (v
), bitoff
));
9041 v
= wide_int_to_tree (TREE_TYPE (field
),
9042 wi::lrshift (wi::to_wide (v
),
9045 CONSTRUCTOR_APPEND_ELT (elts
, field
, v
);
9047 return build_constructor (type
, elts
);
9050 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9051 or extracted constant positions and/or sizes aren't byte aligned. */
9053 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9054 bits between adjacent elements. AMNT should be within
9057 00011111|11100000 << 2 = 01111111|10000000
9058 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9061 shift_bytes_in_array_left (unsigned char *ptr
, unsigned int sz
,
9067 unsigned char carry_over
= 0U;
9068 unsigned char carry_mask
= (~0U) << (unsigned char) (BITS_PER_UNIT
- amnt
);
9069 unsigned char clear_mask
= (~0U) << amnt
;
9071 for (unsigned int i
= 0; i
< sz
; i
++)
9073 unsigned prev_carry_over
= carry_over
;
9074 carry_over
= (ptr
[i
] & carry_mask
) >> (BITS_PER_UNIT
- amnt
);
9079 ptr
[i
] &= clear_mask
;
9080 ptr
[i
] |= prev_carry_over
;
9085 /* Like shift_bytes_in_array_left but for big-endian.
9086 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9087 bits between adjacent elements. AMNT should be within
9090 00011111|11100000 >> 2 = 00000111|11111000
9091 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9094 shift_bytes_in_array_right (unsigned char *ptr
, unsigned int sz
,
9100 unsigned char carry_over
= 0U;
9101 unsigned char carry_mask
= ~(~0U << amnt
);
9103 for (unsigned int i
= 0; i
< sz
; i
++)
9105 unsigned prev_carry_over
= carry_over
;
9106 carry_over
= ptr
[i
] & carry_mask
;
9108 carry_over
<<= (unsigned char) BITS_PER_UNIT
- amnt
;
9110 ptr
[i
] |= prev_carry_over
;
9114 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9115 directly on the VECTOR_CST encoding, in a way that works for variable-
9116 length vectors. Return the resulting VECTOR_CST on success or null
9120 fold_view_convert_vector_encoding (tree type
, tree expr
)
9122 tree expr_type
= TREE_TYPE (expr
);
9123 poly_uint64 type_bits
, expr_bits
;
9124 if (!poly_int_tree_p (TYPE_SIZE (type
), &type_bits
)
9125 || !poly_int_tree_p (TYPE_SIZE (expr_type
), &expr_bits
))
9128 poly_uint64 type_units
= TYPE_VECTOR_SUBPARTS (type
);
9129 poly_uint64 expr_units
= TYPE_VECTOR_SUBPARTS (expr_type
);
9130 unsigned int type_elt_bits
= vector_element_size (type_bits
, type_units
);
9131 unsigned int expr_elt_bits
= vector_element_size (expr_bits
, expr_units
);
9133 /* We can only preserve the semantics of a stepped pattern if the new
9134 vector element is an integer of the same size. */
9135 if (VECTOR_CST_STEPPED_P (expr
)
9136 && (!INTEGRAL_TYPE_P (type
) || type_elt_bits
!= expr_elt_bits
))
9139 /* The number of bits needed to encode one element from every pattern
9140 of the original vector. */
9141 unsigned int expr_sequence_bits
9142 = VECTOR_CST_NPATTERNS (expr
) * expr_elt_bits
;
9144 /* The number of bits needed to encode one element from every pattern
9146 unsigned int type_sequence_bits
9147 = least_common_multiple (expr_sequence_bits
, type_elt_bits
);
9149 /* Don't try to read more bytes than are available, which can happen
9150 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9151 The general VIEW_CONVERT handling can cope with that case, so there's
9152 no point complicating things here. */
9153 unsigned int nelts_per_pattern
= VECTOR_CST_NELTS_PER_PATTERN (expr
);
9154 unsigned int buffer_bytes
= CEIL (nelts_per_pattern
* type_sequence_bits
,
9156 unsigned int buffer_bits
= buffer_bytes
* BITS_PER_UNIT
;
9157 if (known_gt (buffer_bits
, expr_bits
))
9160 /* Get enough bytes of EXPR to form the new encoding. */
9161 auto_vec
<unsigned char, 128> buffer (buffer_bytes
);
9162 buffer
.quick_grow (buffer_bytes
);
9163 if (native_encode_vector_part (expr
, buffer
.address (), buffer_bytes
, 0,
9164 buffer_bits
/ expr_elt_bits
)
9165 != (int) buffer_bytes
)
9168 /* Reencode the bytes as TYPE. */
9169 unsigned int type_npatterns
= type_sequence_bits
/ type_elt_bits
;
9170 return native_interpret_vector_part (type
, &buffer
[0], buffer
.length (),
9171 type_npatterns
, nelts_per_pattern
);
9174 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9175 TYPE at compile-time. If we're unable to perform the conversion
9176 return NULL_TREE. */
9179 fold_view_convert_expr (tree type
, tree expr
)
9181 /* We support up to 512-bit values (for V8DFmode). */
9182 unsigned char buffer
[64];
9185 /* Check that the host and target are sane. */
9186 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
9189 if (VECTOR_TYPE_P (type
) && TREE_CODE (expr
) == VECTOR_CST
)
9190 if (tree res
= fold_view_convert_vector_encoding (type
, expr
))
9193 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
9197 return native_interpret_expr (type
, buffer
, len
);
9200 /* Build an expression for the address of T. Folds away INDIRECT_REF
9201 to avoid confusing the gimplify process. */
9204 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
9206 /* The size of the object is not relevant when talking about its address. */
9207 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
9208 t
= TREE_OPERAND (t
, 0);
9210 if (INDIRECT_REF_P (t
))
9212 t
= TREE_OPERAND (t
, 0);
9214 if (TREE_TYPE (t
) != ptrtype
)
9215 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
9217 else if (TREE_CODE (t
) == MEM_REF
9218 && integer_zerop (TREE_OPERAND (t
, 1)))
9220 t
= TREE_OPERAND (t
, 0);
9222 if (TREE_TYPE (t
) != ptrtype
)
9223 t
= fold_convert_loc (loc
, ptrtype
, t
);
9225 else if (TREE_CODE (t
) == MEM_REF
9226 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
9227 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
9228 TREE_OPERAND (t
, 0),
9229 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
9230 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
9232 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
9234 if (TREE_TYPE (t
) != ptrtype
)
9235 t
= fold_convert_loc (loc
, ptrtype
, t
);
9238 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
9243 /* Build an expression for the address of T. */
9246 build_fold_addr_expr_loc (location_t loc
, tree t
)
9248 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
9250 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
9253 /* Fold a unary expression of code CODE and type TYPE with operand
9254 OP0. Return the folded expression if folding is successful.
9255 Otherwise, return NULL_TREE. */
9258 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
9262 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9264 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9265 && TREE_CODE_LENGTH (code
) == 1);
9270 if (CONVERT_EXPR_CODE_P (code
)
9271 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
9273 /* Don't use STRIP_NOPS, because signedness of argument type
9275 STRIP_SIGN_NOPS (arg0
);
9279 /* Strip any conversions that don't change the mode. This
9280 is safe for every expression, except for a comparison
9281 expression because its signedness is derived from its
9284 Note that this is done as an internal manipulation within
9285 the constant folder, in order to find the simplest
9286 representation of the arguments so that their form can be
9287 studied. In any cases, the appropriate type conversions
9288 should be put back in the tree that will get out of the
9293 if (CONSTANT_CLASS_P (arg0
))
9295 tree tem
= const_unop (code
, type
, arg0
);
9298 if (TREE_TYPE (tem
) != type
)
9299 tem
= fold_convert_loc (loc
, type
, tem
);
9305 tem
= generic_simplify (loc
, code
, type
, op0
);
9309 if (TREE_CODE_CLASS (code
) == tcc_unary
)
9311 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9312 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9313 fold_build1_loc (loc
, code
, type
,
9314 fold_convert_loc (loc
, TREE_TYPE (op0
),
9315 TREE_OPERAND (arg0
, 1))));
9316 else if (TREE_CODE (arg0
) == COND_EXPR
)
9318 tree arg01
= TREE_OPERAND (arg0
, 1);
9319 tree arg02
= TREE_OPERAND (arg0
, 2);
9320 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
9321 arg01
= fold_build1_loc (loc
, code
, type
,
9322 fold_convert_loc (loc
,
9323 TREE_TYPE (op0
), arg01
));
9324 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
9325 arg02
= fold_build1_loc (loc
, code
, type
,
9326 fold_convert_loc (loc
,
9327 TREE_TYPE (op0
), arg02
));
9328 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9331 /* If this was a conversion, and all we did was to move into
9332 inside the COND_EXPR, bring it back out. But leave it if
9333 it is a conversion from integer to integer and the
9334 result precision is no wider than a word since such a
9335 conversion is cheap and may be optimized away by combine,
9336 while it couldn't if it were outside the COND_EXPR. Then return
9337 so we don't get into an infinite recursion loop taking the
9338 conversion out and then back in. */
9340 if ((CONVERT_EXPR_CODE_P (code
)
9341 || code
== NON_LVALUE_EXPR
)
9342 && TREE_CODE (tem
) == COND_EXPR
9343 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
9344 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
9345 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem
, 1)))
9346 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem
, 2)))
9347 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
9348 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
9349 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
9351 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
9352 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
9353 || flag_syntax_only
))
9354 tem
= build1_loc (loc
, code
, type
,
9356 TREE_TYPE (TREE_OPERAND
9357 (TREE_OPERAND (tem
, 1), 0)),
9358 TREE_OPERAND (tem
, 0),
9359 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
9360 TREE_OPERAND (TREE_OPERAND (tem
, 2),
9368 case NON_LVALUE_EXPR
:
9369 if (!maybe_lvalue_p (op0
))
9370 return fold_convert_loc (loc
, type
, op0
);
9375 case FIX_TRUNC_EXPR
:
9376 if (COMPARISON_CLASS_P (op0
))
9378 /* If we have (type) (a CMP b) and type is an integral type, return
9379 new expression involving the new type. Canonicalize
9380 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9382 Do not fold the result as that would not simplify further, also
9383 folding again results in recursions. */
9384 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
9385 return build2_loc (loc
, TREE_CODE (op0
), type
,
9386 TREE_OPERAND (op0
, 0),
9387 TREE_OPERAND (op0
, 1));
9388 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
9389 && TREE_CODE (type
) != VECTOR_TYPE
)
9390 return build3_loc (loc
, COND_EXPR
, type
, op0
,
9391 constant_boolean_node (true, type
),
9392 constant_boolean_node (false, type
));
9395 /* Handle (T *)&A.B.C for A being of type T and B and C
9396 living at offset zero. This occurs frequently in
9397 C++ upcasting and then accessing the base. */
9398 if (TREE_CODE (op0
) == ADDR_EXPR
9399 && POINTER_TYPE_P (type
)
9400 && handled_component_p (TREE_OPERAND (op0
, 0)))
9402 poly_int64 bitsize
, bitpos
;
9405 int unsignedp
, reversep
, volatilep
;
9407 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
9408 &offset
, &mode
, &unsignedp
, &reversep
,
9410 /* If the reference was to a (constant) zero offset, we can use
9411 the address of the base if it has the same base type
9412 as the result type and the pointer type is unqualified. */
9414 && known_eq (bitpos
, 0)
9415 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
9416 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
9417 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
9418 return fold_convert_loc (loc
, type
,
9419 build_fold_addr_expr_loc (loc
, base
));
9422 if (TREE_CODE (op0
) == MODIFY_EXPR
9423 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
9424 /* Detect assigning a bitfield. */
9425 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
9427 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
9429 /* Don't leave an assignment inside a conversion
9430 unless assigning a bitfield. */
9431 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
9432 /* First do the assignment, then return converted constant. */
9433 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
9434 suppress_warning (tem
/* What warning? */);
9435 TREE_USED (tem
) = 1;
9439 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9440 constants (if x has signed type, the sign bit cannot be set
9441 in c). This folds extension into the BIT_AND_EXPR.
9442 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9443 very likely don't have maximal range for their precision and this
9444 transformation effectively doesn't preserve non-maximal ranges. */
9445 if (TREE_CODE (type
) == INTEGER_TYPE
9446 && TREE_CODE (op0
) == BIT_AND_EXPR
9447 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
9449 tree and_expr
= op0
;
9450 tree and0
= TREE_OPERAND (and_expr
, 0);
9451 tree and1
= TREE_OPERAND (and_expr
, 1);
9454 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
9455 || (TYPE_PRECISION (type
)
9456 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
9458 else if (TYPE_PRECISION (TREE_TYPE (and1
))
9459 <= HOST_BITS_PER_WIDE_INT
9460 && tree_fits_uhwi_p (and1
))
9462 unsigned HOST_WIDE_INT cst
;
9464 cst
= tree_to_uhwi (and1
);
9465 cst
&= HOST_WIDE_INT_M1U
9466 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
9467 change
= (cst
== 0);
9469 && !flag_syntax_only
9470 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
9473 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
9474 and0
= fold_convert_loc (loc
, uns
, and0
);
9475 and1
= fold_convert_loc (loc
, uns
, and1
);
9480 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
9481 TREE_OVERFLOW (and1
));
9482 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9483 fold_convert_loc (loc
, type
, and0
), tem
);
9487 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9488 cast (T1)X will fold away. We assume that this happens when X itself
9490 if (POINTER_TYPE_P (type
)
9491 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9492 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
9494 tree arg00
= TREE_OPERAND (arg0
, 0);
9495 tree arg01
= TREE_OPERAND (arg0
, 1);
9497 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9498 when the pointed type needs higher alignment than
9499 the p+ first operand's pointed type. */
9501 && sanitize_flags_p (SANITIZE_ALIGNMENT
)
9502 && (min_align_of_type (TREE_TYPE (type
))
9503 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00
)))))
9506 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9507 when type is a reference type and arg00's type is not,
9508 because arg00 could be validly nullptr and if arg01 doesn't return,
9509 we don't want false positive binding of reference to nullptr. */
9510 if (TREE_CODE (type
) == REFERENCE_TYPE
9512 && sanitize_flags_p (SANITIZE_NULL
)
9513 && TREE_CODE (TREE_TYPE (arg00
)) != REFERENCE_TYPE
)
9516 arg00
= fold_convert_loc (loc
, type
, arg00
);
9517 return fold_build_pointer_plus_loc (loc
, arg00
, arg01
);
9520 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9521 of the same precision, and X is an integer type not narrower than
9522 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9523 if (INTEGRAL_TYPE_P (type
)
9524 && TREE_CODE (op0
) == BIT_NOT_EXPR
9525 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
9526 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
9527 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
9529 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
9530 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
9531 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
9532 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
9533 fold_convert_loc (loc
, type
, tem
));
9536 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9537 type of X and Y (integer types only). */
9538 if (INTEGRAL_TYPE_P (type
)
9539 && TREE_CODE (op0
) == MULT_EXPR
9540 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
9541 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
))
9542 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
9543 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW
)))
9545 /* Be careful not to introduce new overflows. */
9547 if (TYPE_OVERFLOW_WRAPS (type
))
9550 mult_type
= unsigned_type_for (type
);
9552 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
9554 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
9555 fold_convert_loc (loc
, mult_type
,
9556 TREE_OPERAND (op0
, 0)),
9557 fold_convert_loc (loc
, mult_type
,
9558 TREE_OPERAND (op0
, 1)));
9559 return fold_convert_loc (loc
, type
, tem
);
9565 case VIEW_CONVERT_EXPR
:
9566 if (TREE_CODE (op0
) == MEM_REF
)
9568 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
9569 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
9570 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
9571 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
9572 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
9579 tem
= fold_negate_expr (loc
, arg0
);
9581 return fold_convert_loc (loc
, type
, tem
);
9585 /* Convert fabs((double)float) into (double)fabsf(float). */
9586 if (TREE_CODE (arg0
) == NOP_EXPR
9587 && TREE_CODE (type
) == REAL_TYPE
)
9589 tree targ0
= strip_float_extensions (arg0
);
9591 return fold_convert_loc (loc
, type
,
9592 fold_build1_loc (loc
, ABS_EXPR
,
9599 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9600 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9601 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9602 fold_convert_loc (loc
, type
,
9603 TREE_OPERAND (arg0
, 0)))))
9604 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
9605 fold_convert_loc (loc
, type
,
9606 TREE_OPERAND (arg0
, 1)));
9607 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9608 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9609 fold_convert_loc (loc
, type
,
9610 TREE_OPERAND (arg0
, 1)))))
9611 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
9612 fold_convert_loc (loc
, type
,
9613 TREE_OPERAND (arg0
, 0)), tem
);
9617 case TRUTH_NOT_EXPR
:
9618 /* Note that the operand of this must be an int
9619 and its values must be 0 or 1.
9620 ("true" is a fixed value perhaps depending on the language,
9621 but we don't handle values other than 1 correctly yet.) */
9622 tem
= fold_truth_not_expr (loc
, arg0
);
9625 return fold_convert_loc (loc
, type
, tem
);
9628 /* Fold *&X to X if X is an lvalue. */
9629 if (TREE_CODE (op0
) == ADDR_EXPR
)
9631 tree op00
= TREE_OPERAND (op0
, 0);
9633 || TREE_CODE (op00
) == PARM_DECL
9634 || TREE_CODE (op00
) == RESULT_DECL
)
9635 && !TREE_READONLY (op00
))
9642 } /* switch (code) */
9646 /* If the operation was a conversion do _not_ mark a resulting constant
9647 with TREE_OVERFLOW if the original constant was not. These conversions
9648 have implementation defined behavior and retaining the TREE_OVERFLOW
9649 flag here would confuse later passes such as VRP. */
9651 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
9652 tree type
, tree op0
)
9654 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
9656 && TREE_CODE (res
) == INTEGER_CST
9657 && TREE_CODE (op0
) == INTEGER_CST
9658 && CONVERT_EXPR_CODE_P (code
))
9659 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
9664 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9665 operands OP0 and OP1. LOC is the location of the resulting expression.
9666 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9667 Return the folded expression if folding is successful. Otherwise,
9668 return NULL_TREE. */
9670 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
9671 tree arg0
, tree arg1
, tree op0
, tree op1
)
9675 /* We only do these simplifications if we are optimizing. */
9679 /* Check for things like (A || B) && (A || C). We can convert this
9680 to A || (B && C). Note that either operator can be any of the four
9681 truth and/or operations and the transformation will still be
9682 valid. Also note that we only care about order for the
9683 ANDIF and ORIF operators. If B contains side effects, this
9684 might change the truth-value of A. */
9685 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9686 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
9687 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
9688 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
9689 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
9690 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
9692 tree a00
= TREE_OPERAND (arg0
, 0);
9693 tree a01
= TREE_OPERAND (arg0
, 1);
9694 tree a10
= TREE_OPERAND (arg1
, 0);
9695 tree a11
= TREE_OPERAND (arg1
, 1);
9696 bool commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
9697 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
9698 && (code
== TRUTH_AND_EXPR
9699 || code
== TRUTH_OR_EXPR
));
9701 if (operand_equal_p (a00
, a10
, 0))
9702 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9703 fold_build2_loc (loc
, code
, type
, a01
, a11
));
9704 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
9705 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9706 fold_build2_loc (loc
, code
, type
, a01
, a10
));
9707 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
9708 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
9709 fold_build2_loc (loc
, code
, type
, a00
, a11
));
9711 /* This case if tricky because we must either have commutative
9712 operators or else A10 must not have side-effects. */
9714 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
9715 && operand_equal_p (a01
, a11
, 0))
9716 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
9717 fold_build2_loc (loc
, code
, type
, a00
, a10
),
9721 /* See if we can build a range comparison. */
9722 if ((tem
= fold_range_test (loc
, code
, type
, op0
, op1
)) != 0)
9725 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
9726 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
9728 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
9730 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
9733 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
9734 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
9736 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
9738 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
9741 /* Check for the possibility of merging component references. If our
9742 lhs is another similar operation, try to merge its rhs with our
9743 rhs. Then try to merge our lhs and rhs. */
9744 if (TREE_CODE (arg0
) == code
9745 && (tem
= fold_truth_andor_1 (loc
, code
, type
,
9746 TREE_OPERAND (arg0
, 1), arg1
)) != 0)
9747 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9749 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
9752 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
9753 if (param_logical_op_non_short_circuit
!= -1)
9754 logical_op_non_short_circuit
9755 = param_logical_op_non_short_circuit
;
9756 if (logical_op_non_short_circuit
9757 && !sanitize_coverage_p ()
9758 && (code
== TRUTH_AND_EXPR
9759 || code
== TRUTH_ANDIF_EXPR
9760 || code
== TRUTH_OR_EXPR
9761 || code
== TRUTH_ORIF_EXPR
))
9763 enum tree_code ncode
, icode
;
9765 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
9766 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
9767 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
9769 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9770 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9771 We don't want to pack more than two leafs to a non-IF AND/OR
9773 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9774 equal to IF-CODE, then we don't want to add right-hand operand.
9775 If the inner right-hand side of left-hand operand has
9776 side-effects, or isn't simple, then we can't add to it,
9777 as otherwise we might destroy if-sequence. */
9778 if (TREE_CODE (arg0
) == icode
9779 && simple_condition_p (arg1
)
9780 /* Needed for sequence points to handle trappings, and
9782 && simple_condition_p (TREE_OPERAND (arg0
, 1)))
9784 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
9786 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
9789 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9790 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9791 else if (TREE_CODE (arg1
) == icode
9792 && simple_condition_p (arg0
)
9793 /* Needed for sequence points to handle trappings, and
9795 && simple_condition_p (TREE_OPERAND (arg1
, 0)))
9797 tem
= fold_build2_loc (loc
, ncode
, type
,
9798 arg0
, TREE_OPERAND (arg1
, 0));
9799 return fold_build2_loc (loc
, icode
, type
, tem
,
9800 TREE_OPERAND (arg1
, 1));
9802 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9804 For sequence point consistancy, we need to check for trapping,
9805 and side-effects. */
9806 else if (code
== icode
&& simple_condition_p (arg0
)
9807 && simple_condition_p (arg1
))
9808 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
9814 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9815 by changing CODE to reduce the magnitude of constants involved in
9816 ARG0 of the comparison.
9817 Returns a canonicalized comparison tree if a simplification was
9818 possible, otherwise returns NULL_TREE.
9819 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9820 valid if signed overflow is undefined. */
9823 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
9824 tree arg0
, tree arg1
,
9825 bool *strict_overflow_p
)
9827 enum tree_code code0
= TREE_CODE (arg0
);
9828 tree t
, cst0
= NULL_TREE
;
9831 /* Match A +- CST code arg1. We can change this only if overflow
9833 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9834 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
9835 /* In principle pointers also have undefined overflow behavior,
9836 but that causes problems elsewhere. */
9837 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
9838 && (code0
== MINUS_EXPR
9839 || code0
== PLUS_EXPR
)
9840 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
9843 /* Identify the constant in arg0 and its sign. */
9844 cst0
= TREE_OPERAND (arg0
, 1);
9845 sgn0
= tree_int_cst_sgn (cst0
);
9847 /* Overflowed constants and zero will cause problems. */
9848 if (integer_zerop (cst0
)
9849 || TREE_OVERFLOW (cst0
))
9852 /* See if we can reduce the magnitude of the constant in
9853 arg0 by changing the comparison code. */
9854 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9856 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9858 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9859 else if (code
== GT_EXPR
9860 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9862 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9863 else if (code
== LE_EXPR
9864 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9866 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9867 else if (code
== GE_EXPR
9868 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9872 *strict_overflow_p
= true;
9874 /* Now build the constant reduced in magnitude. But not if that
9875 would produce one outside of its types range. */
9876 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
9878 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
9879 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
9881 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
9882 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
9885 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
9886 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
9887 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
9888 t
= fold_convert (TREE_TYPE (arg1
), t
);
9890 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
9893 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9894 overflow further. Try to decrease the magnitude of constants involved
9895 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9896 and put sole constants at the second argument position.
9897 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9900 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
9901 tree arg0
, tree arg1
)
9904 bool strict_overflow_p
;
9905 const char * const warnmsg
= G_("assuming signed overflow does not occur "
9906 "when reducing constant in comparison");
9908 /* Try canonicalization by simplifying arg0. */
9909 strict_overflow_p
= false;
9910 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
9911 &strict_overflow_p
);
9914 if (strict_overflow_p
)
9915 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9919 /* Try canonicalization by simplifying arg1 using the swapped
9921 code
= swap_tree_comparison (code
);
9922 strict_overflow_p
= false;
9923 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
9924 &strict_overflow_p
);
9925 if (t
&& strict_overflow_p
)
9926 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9930 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9931 space. This is used to avoid issuing overflow warnings for
9932 expressions like &p->x which cannot wrap. */
9935 pointer_may_wrap_p (tree base
, tree offset
, poly_int64 bitpos
)
9937 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
9940 if (maybe_lt (bitpos
, 0))
9943 poly_wide_int wi_offset
;
9944 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
9945 if (offset
== NULL_TREE
)
9946 wi_offset
= wi::zero (precision
);
9947 else if (!poly_int_tree_p (offset
) || TREE_OVERFLOW (offset
))
9950 wi_offset
= wi::to_poly_wide (offset
);
9952 wi::overflow_type overflow
;
9953 poly_wide_int units
= wi::shwi (bits_to_bytes_round_down (bitpos
),
9955 poly_wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
9959 poly_uint64 total_hwi
, size
;
9960 if (!total
.to_uhwi (&total_hwi
)
9961 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base
))),
9963 || known_eq (size
, 0U))
9966 if (known_le (total_hwi
, size
))
9969 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9971 if (TREE_CODE (base
) == ADDR_EXPR
9972 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base
, 0))),
9974 && maybe_ne (size
, 0U)
9975 && known_le (total_hwi
, size
))
9981 /* Return a positive integer when the symbol DECL is known to have
9982 a nonzero address, zero when it's known not to (e.g., it's a weak
9983 symbol), and a negative integer when the symbol is not yet in the
9984 symbol table and so whether or not its address is zero is unknown.
9985 For function local objects always return positive integer. */
9987 maybe_nonzero_address (tree decl
)
9989 /* Normally, don't do anything for variables and functions before symtab is
9990 built; it is quite possible that DECL will be declared weak later.
9991 But if folding_initializer, we need a constant answer now, so create
9992 the symtab entry and prevent later weak declaration. */
9993 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
9994 if (struct symtab_node
*symbol
9995 = (folding_initializer
9996 ? symtab_node::get_create (decl
)
9997 : symtab_node::get (decl
)))
9998 return symbol
->nonzero_address ();
10000 /* Function local objects are never NULL. */
10002 && (DECL_CONTEXT (decl
)
10003 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
10004 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
10010 /* Subroutine of fold_binary. This routine performs all of the
10011 transformations that are common to the equality/inequality
10012 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10013 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10014 fold_binary should call fold_binary. Fold a comparison with
10015 tree code CODE and type TYPE with operands OP0 and OP1. Return
10016 the folded comparison or NULL_TREE. */
10019 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
10020 tree op0
, tree op1
)
10022 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
10023 tree arg0
, arg1
, tem
;
10028 STRIP_SIGN_NOPS (arg0
);
10029 STRIP_SIGN_NOPS (arg1
);
10031 /* For comparisons of pointers we can decompose it to a compile time
10032 comparison of the base objects and the offsets into the object.
10033 This requires at least one operand being an ADDR_EXPR or a
10034 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10035 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
10036 && (TREE_CODE (arg0
) == ADDR_EXPR
10037 || TREE_CODE (arg1
) == ADDR_EXPR
10038 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10039 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
10041 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
10042 poly_int64 bitsize
, bitpos0
= 0, bitpos1
= 0;
10044 int volatilep
, reversep
, unsignedp
;
10045 bool indirect_base0
= false, indirect_base1
= false;
10047 /* Get base and offset for the access. Strip ADDR_EXPR for
10048 get_inner_reference, but put it back by stripping INDIRECT_REF
10049 off the base object if possible. indirect_baseN will be true
10050 if baseN is not an address but refers to the object itself. */
10052 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10055 = get_inner_reference (TREE_OPERAND (arg0
, 0),
10056 &bitsize
, &bitpos0
, &offset0
, &mode
,
10057 &unsignedp
, &reversep
, &volatilep
);
10058 if (INDIRECT_REF_P (base0
))
10059 base0
= TREE_OPERAND (base0
, 0);
10061 indirect_base0
= true;
10063 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10065 base0
= TREE_OPERAND (arg0
, 0);
10066 STRIP_SIGN_NOPS (base0
);
10067 if (TREE_CODE (base0
) == ADDR_EXPR
)
10070 = get_inner_reference (TREE_OPERAND (base0
, 0),
10071 &bitsize
, &bitpos0
, &offset0
, &mode
,
10072 &unsignedp
, &reversep
, &volatilep
);
10073 if (INDIRECT_REF_P (base0
))
10074 base0
= TREE_OPERAND (base0
, 0);
10076 indirect_base0
= true;
10078 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
10079 offset0
= TREE_OPERAND (arg0
, 1);
10081 offset0
= size_binop (PLUS_EXPR
, offset0
,
10082 TREE_OPERAND (arg0
, 1));
10083 if (poly_int_tree_p (offset0
))
10085 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset0
),
10086 TYPE_PRECISION (sizetype
));
10087 tem
<<= LOG2_BITS_PER_UNIT
;
10089 if (tem
.to_shwi (&bitpos0
))
10090 offset0
= NULL_TREE
;
10095 if (TREE_CODE (arg1
) == ADDR_EXPR
)
10098 = get_inner_reference (TREE_OPERAND (arg1
, 0),
10099 &bitsize
, &bitpos1
, &offset1
, &mode
,
10100 &unsignedp
, &reversep
, &volatilep
);
10101 if (INDIRECT_REF_P (base1
))
10102 base1
= TREE_OPERAND (base1
, 0);
10104 indirect_base1
= true;
10106 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10108 base1
= TREE_OPERAND (arg1
, 0);
10109 STRIP_SIGN_NOPS (base1
);
10110 if (TREE_CODE (base1
) == ADDR_EXPR
)
10113 = get_inner_reference (TREE_OPERAND (base1
, 0),
10114 &bitsize
, &bitpos1
, &offset1
, &mode
,
10115 &unsignedp
, &reversep
, &volatilep
);
10116 if (INDIRECT_REF_P (base1
))
10117 base1
= TREE_OPERAND (base1
, 0);
10119 indirect_base1
= true;
10121 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
10122 offset1
= TREE_OPERAND (arg1
, 1);
10124 offset1
= size_binop (PLUS_EXPR
, offset1
,
10125 TREE_OPERAND (arg1
, 1));
10126 if (poly_int_tree_p (offset1
))
10128 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset1
),
10129 TYPE_PRECISION (sizetype
));
10130 tem
<<= LOG2_BITS_PER_UNIT
;
10132 if (tem
.to_shwi (&bitpos1
))
10133 offset1
= NULL_TREE
;
10137 /* If we have equivalent bases we might be able to simplify. */
10138 if (indirect_base0
== indirect_base1
10139 && operand_equal_p (base0
, base1
,
10140 indirect_base0
? OEP_ADDRESS_OF
: 0))
10142 /* We can fold this expression to a constant if the non-constant
10143 offset parts are equal. */
10144 if ((offset0
== offset1
10145 || (offset0
&& offset1
10146 && operand_equal_p (offset0
, offset1
, 0)))
10149 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
10150 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10153 && maybe_ne (bitpos0
, bitpos1
)
10154 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
10155 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
10156 fold_overflow_warning (("assuming pointer wraparound does not "
10157 "occur when comparing P +- C1 with "
10159 WARN_STRICT_OVERFLOW_CONDITIONAL
);
10164 if (known_eq (bitpos0
, bitpos1
))
10165 return constant_boolean_node (true, type
);
10166 if (known_ne (bitpos0
, bitpos1
))
10167 return constant_boolean_node (false, type
);
10170 if (known_ne (bitpos0
, bitpos1
))
10171 return constant_boolean_node (true, type
);
10172 if (known_eq (bitpos0
, bitpos1
))
10173 return constant_boolean_node (false, type
);
10176 if (known_lt (bitpos0
, bitpos1
))
10177 return constant_boolean_node (true, type
);
10178 if (known_ge (bitpos0
, bitpos1
))
10179 return constant_boolean_node (false, type
);
10182 if (known_le (bitpos0
, bitpos1
))
10183 return constant_boolean_node (true, type
);
10184 if (known_gt (bitpos0
, bitpos1
))
10185 return constant_boolean_node (false, type
);
10188 if (known_ge (bitpos0
, bitpos1
))
10189 return constant_boolean_node (true, type
);
10190 if (known_lt (bitpos0
, bitpos1
))
10191 return constant_boolean_node (false, type
);
10194 if (known_gt (bitpos0
, bitpos1
))
10195 return constant_boolean_node (true, type
);
10196 if (known_le (bitpos0
, bitpos1
))
10197 return constant_boolean_node (false, type
);
10202 /* We can simplify the comparison to a comparison of the variable
10203 offset parts if the constant offset parts are equal.
10204 Be careful to use signed sizetype here because otherwise we
10205 mess with array offsets in the wrong way. This is possible
10206 because pointer arithmetic is restricted to retain within an
10207 object and overflow on pointer differences is undefined as of
10208 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10209 else if (known_eq (bitpos0
, bitpos1
)
10212 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
10213 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10215 /* By converting to signed sizetype we cover middle-end pointer
10216 arithmetic which operates on unsigned pointer types of size
10217 type size and ARRAY_REF offsets which are properly sign or
10218 zero extended from their type in case it is narrower than
10220 if (offset0
== NULL_TREE
)
10221 offset0
= build_int_cst (ssizetype
, 0);
10223 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
10224 if (offset1
== NULL_TREE
)
10225 offset1
= build_int_cst (ssizetype
, 0);
10227 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
10230 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
10231 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
10232 fold_overflow_warning (("assuming pointer wraparound does not "
10233 "occur when comparing P +- C1 with "
10235 WARN_STRICT_OVERFLOW_COMPARISON
);
10237 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
10240 /* For equal offsets we can simplify to a comparison of the
10242 else if (known_eq (bitpos0
, bitpos1
)
10244 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
10246 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
10247 && ((offset0
== offset1
)
10248 || (offset0
&& offset1
10249 && operand_equal_p (offset0
, offset1
, 0))))
10251 if (indirect_base0
)
10252 base0
= build_fold_addr_expr_loc (loc
, base0
);
10253 if (indirect_base1
)
10254 base1
= build_fold_addr_expr_loc (loc
, base1
);
10255 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
10257 /* Comparison between an ordinary (non-weak) symbol and a null
10258 pointer can be eliminated since such symbols must have a non
10259 null address. In C, relational expressions between pointers
10260 to objects and null pointers are undefined. The results
10261 below follow the C++ rules with the additional property that
10262 every object pointer compares greater than a null pointer.
10264 else if (((DECL_P (base0
)
10265 && maybe_nonzero_address (base0
) > 0
10266 /* Avoid folding references to struct members at offset 0 to
10267 prevent tests like '&ptr->firstmember == 0' from getting
10268 eliminated. When ptr is null, although the -> expression
10269 is strictly speaking invalid, GCC retains it as a matter
10270 of QoI. See PR c/44555. */
10271 && (offset0
== NULL_TREE
&& known_ne (bitpos0
, 0)))
10272 || CONSTANT_CLASS_P (base0
))
10274 /* The caller guarantees that when one of the arguments is
10275 constant (i.e., null in this case) it is second. */
10276 && integer_zerop (arg1
))
10283 return constant_boolean_node (false, type
);
10287 return constant_boolean_node (true, type
);
10289 gcc_unreachable ();
10294 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10295 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10296 the resulting offset is smaller in absolute value than the
10297 original one and has the same sign. */
10298 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10299 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
10300 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10301 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10302 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
10303 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
10304 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10305 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
10307 tree const1
= TREE_OPERAND (arg0
, 1);
10308 tree const2
= TREE_OPERAND (arg1
, 1);
10309 tree variable1
= TREE_OPERAND (arg0
, 0);
10310 tree variable2
= TREE_OPERAND (arg1
, 0);
10312 const char * const warnmsg
= G_("assuming signed overflow does not "
10313 "occur when combining constants around "
10316 /* Put the constant on the side where it doesn't overflow and is
10317 of lower absolute value and of same sign than before. */
10318 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10319 ? MINUS_EXPR
: PLUS_EXPR
,
10321 if (!TREE_OVERFLOW (cst
)
10322 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
10323 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
10325 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
10326 return fold_build2_loc (loc
, code
, type
,
10328 fold_build2_loc (loc
, TREE_CODE (arg1
),
10333 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10334 ? MINUS_EXPR
: PLUS_EXPR
,
10336 if (!TREE_OVERFLOW (cst
)
10337 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
10338 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
10340 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
10341 return fold_build2_loc (loc
, code
, type
,
10342 fold_build2_loc (loc
, TREE_CODE (arg0
),
10349 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
10353 /* If we are comparing an expression that just has comparisons
10354 of two integer values, arithmetic expressions of those comparisons,
10355 and constants, we can simplify it. There are only three cases
10356 to check: the two values can either be equal, the first can be
10357 greater, or the second can be greater. Fold the expression for
10358 those three values. Since each value must be 0 or 1, we have
10359 eight possibilities, each of which corresponds to the constant 0
10360 or 1 or one of the six possible comparisons.
10362 This handles common cases like (a > b) == 0 but also handles
10363 expressions like ((x > y) - (y > x)) > 0, which supposedly
10364 occur in macroized code. */
10366 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
10368 tree cval1
= 0, cval2
= 0;
10370 if (twoval_comparison_p (arg0
, &cval1
, &cval2
)
10371 /* Don't handle degenerate cases here; they should already
10372 have been handled anyway. */
10373 && cval1
!= 0 && cval2
!= 0
10374 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
10375 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
10377 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
10378 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
10379 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
10380 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
10382 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
10383 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
10385 /* We can't just pass T to eval_subst in case cval1 or cval2
10386 was the same as ARG1. */
10389 = fold_build2_loc (loc
, code
, type
,
10390 eval_subst (loc
, arg0
, cval1
, maxval
,
10394 = fold_build2_loc (loc
, code
, type
,
10395 eval_subst (loc
, arg0
, cval1
, maxval
,
10399 = fold_build2_loc (loc
, code
, type
,
10400 eval_subst (loc
, arg0
, cval1
, minval
,
10404 /* All three of these results should be 0 or 1. Confirm they are.
10405 Then use those values to select the proper code to use. */
10407 if (TREE_CODE (high_result
) == INTEGER_CST
10408 && TREE_CODE (equal_result
) == INTEGER_CST
10409 && TREE_CODE (low_result
) == INTEGER_CST
)
10411 /* Make a 3-bit mask with the high-order bit being the
10412 value for `>', the next for '=', and the low for '<'. */
10413 switch ((integer_onep (high_result
) * 4)
10414 + (integer_onep (equal_result
) * 2)
10415 + integer_onep (low_result
))
10418 /* Always false. */
10419 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10440 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10443 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
10452 /* Subroutine of fold_binary. Optimize complex multiplications of the
10453 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10454 argument EXPR represents the expression "z" of type TYPE. */
10457 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
10459 tree itype
= TREE_TYPE (type
);
10460 tree rpart
, ipart
, tem
;
10462 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
10464 rpart
= TREE_OPERAND (expr
, 0);
10465 ipart
= TREE_OPERAND (expr
, 1);
10467 else if (TREE_CODE (expr
) == COMPLEX_CST
)
10469 rpart
= TREE_REALPART (expr
);
10470 ipart
= TREE_IMAGPART (expr
);
10474 expr
= save_expr (expr
);
10475 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
10476 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
10479 rpart
= save_expr (rpart
);
10480 ipart
= save_expr (ipart
);
10481 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
10482 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
10483 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
10484 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
10485 build_zero_cst (itype
));
10489 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10490 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10491 true if successful. */
10494 vec_cst_ctor_to_array (tree arg
, unsigned int nelts
, tree
*elts
)
10496 unsigned HOST_WIDE_INT i
, nunits
;
10498 if (TREE_CODE (arg
) == VECTOR_CST
10499 && VECTOR_CST_NELTS (arg
).is_constant (&nunits
))
10501 for (i
= 0; i
< nunits
; ++i
)
10502 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
10504 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
10506 constructor_elt
*elt
;
10508 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
10509 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
10512 elts
[i
] = elt
->value
;
10516 for (; i
< nelts
; i
++)
10518 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
10522 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10523 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10524 NULL_TREE otherwise. */
10527 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const vec_perm_indices
&sel
)
10530 unsigned HOST_WIDE_INT nelts
;
10531 bool need_ctor
= false;
10533 if (!sel
.length ().is_constant (&nelts
))
10535 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
), nelts
)
10536 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)), nelts
)
10537 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)), nelts
));
10538 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
10539 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
10542 tree
*in_elts
= XALLOCAVEC (tree
, nelts
* 2);
10543 if (!vec_cst_ctor_to_array (arg0
, nelts
, in_elts
)
10544 || !vec_cst_ctor_to_array (arg1
, nelts
, in_elts
+ nelts
))
10547 tree_vector_builder
out_elts (type
, nelts
, 1);
10548 for (i
= 0; i
< nelts
; i
++)
10550 HOST_WIDE_INT index
;
10551 if (!sel
[i
].is_constant (&index
))
10553 if (!CONSTANT_CLASS_P (in_elts
[index
]))
10555 out_elts
.quick_push (unshare_expr (in_elts
[index
]));
10560 vec
<constructor_elt
, va_gc
> *v
;
10561 vec_alloc (v
, nelts
);
10562 for (i
= 0; i
< nelts
; i
++)
10563 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, out_elts
[i
]);
10564 return build_constructor (type
, v
);
10567 return out_elts
.build ();
10570 /* Try to fold a pointer difference of type TYPE two address expressions of
10571 array references AREF0 and AREF1 using location LOC. Return a
10572 simplified expression for the difference or NULL_TREE. */
10575 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
10576 tree aref0
, tree aref1
,
10577 bool use_pointer_diff
)
10579 tree base0
= TREE_OPERAND (aref0
, 0);
10580 tree base1
= TREE_OPERAND (aref1
, 0);
10581 tree base_offset
= build_int_cst (type
, 0);
10583 /* If the bases are array references as well, recurse. If the bases
10584 are pointer indirections compute the difference of the pointers.
10585 If the bases are equal, we are set. */
10586 if ((TREE_CODE (base0
) == ARRAY_REF
10587 && TREE_CODE (base1
) == ARRAY_REF
10589 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
,
10590 use_pointer_diff
)))
10591 || (INDIRECT_REF_P (base0
)
10592 && INDIRECT_REF_P (base1
)
10595 ? fold_binary_loc (loc
, POINTER_DIFF_EXPR
, type
,
10596 TREE_OPERAND (base0
, 0),
10597 TREE_OPERAND (base1
, 0))
10598 : fold_binary_loc (loc
, MINUS_EXPR
, type
,
10599 fold_convert (type
,
10600 TREE_OPERAND (base0
, 0)),
10601 fold_convert (type
,
10602 TREE_OPERAND (base1
, 0)))))
10603 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
10605 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10606 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10607 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
10608 tree diff
= fold_build2_loc (loc
, MINUS_EXPR
, type
, op0
, op1
);
10609 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10611 fold_build2_loc (loc
, MULT_EXPR
, type
,
10617 /* If the real or vector real constant CST of type TYPE has an exact
10618 inverse, return it, else return NULL. */
10621 exact_inverse (tree type
, tree cst
)
10627 switch (TREE_CODE (cst
))
10630 r
= TREE_REAL_CST (cst
);
10632 if (exact_real_inverse (TYPE_MODE (type
), &r
))
10633 return build_real (type
, r
);
10639 unit_type
= TREE_TYPE (type
);
10640 mode
= TYPE_MODE (unit_type
);
10642 tree_vector_builder elts
;
10643 if (!elts
.new_unary_operation (type
, cst
, false))
10645 unsigned int count
= elts
.encoded_nelts ();
10646 for (unsigned int i
= 0; i
< count
; ++i
)
10648 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
10649 if (!exact_real_inverse (mode
, &r
))
10651 elts
.quick_push (build_real (unit_type
, r
));
10654 return elts
.build ();
10662 /* Mask out the tz least significant bits of X of type TYPE where
10663 tz is the number of trailing zeroes in Y. */
10665 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
10667 int tz
= wi::ctz (y
);
10669 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
10673 /* Return true when T is an address and is known to be nonzero.
10674 For floating point we further ensure that T is not denormal.
10675 Similar logic is present in nonzero_address in rtlanal.h.
10677 If the return value is based on the assumption that signed overflow
10678 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10679 change *STRICT_OVERFLOW_P. */
10682 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
10684 tree type
= TREE_TYPE (t
);
10685 enum tree_code code
;
10687 /* Doing something useful for floating point would need more work. */
10688 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10691 code
= TREE_CODE (t
);
10692 switch (TREE_CODE_CLASS (code
))
10695 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10696 strict_overflow_p
);
10698 case tcc_comparison
:
10699 return tree_binary_nonzero_warnv_p (code
, type
,
10700 TREE_OPERAND (t
, 0),
10701 TREE_OPERAND (t
, 1),
10702 strict_overflow_p
);
10704 case tcc_declaration
:
10705 case tcc_reference
:
10706 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10714 case TRUTH_NOT_EXPR
:
10715 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10716 strict_overflow_p
);
10718 case TRUTH_AND_EXPR
:
10719 case TRUTH_OR_EXPR
:
10720 case TRUTH_XOR_EXPR
:
10721 return tree_binary_nonzero_warnv_p (code
, type
,
10722 TREE_OPERAND (t
, 0),
10723 TREE_OPERAND (t
, 1),
10724 strict_overflow_p
);
10730 case WITH_SIZE_EXPR
:
10732 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10734 case COMPOUND_EXPR
:
10737 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10738 strict_overflow_p
);
10741 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10742 strict_overflow_p
);
10746 tree fndecl
= get_callee_fndecl (t
);
10747 if (!fndecl
) return false;
10748 if (flag_delete_null_pointer_checks
&& !flag_check_new
10749 && DECL_IS_OPERATOR_NEW_P (fndecl
)
10750 && !TREE_NOTHROW (fndecl
))
10752 if (flag_delete_null_pointer_checks
10753 && lookup_attribute ("returns_nonnull",
10754 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10756 return alloca_call_p (t
);
10765 /* Return true when T is an address and is known to be nonzero.
10766 Handle warnings about undefined signed overflow. */
10769 tree_expr_nonzero_p (tree t
)
10771 bool ret
, strict_overflow_p
;
10773 strict_overflow_p
= false;
10774 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10775 if (strict_overflow_p
)
10776 fold_overflow_warning (("assuming signed overflow does not occur when "
10777 "determining that expression is always "
10779 WARN_STRICT_OVERFLOW_MISC
);
10783 /* Return true if T is known not to be equal to an integer W. */
10786 expr_not_equal_to (tree t
, const wide_int
&w
)
10789 switch (TREE_CODE (t
))
10792 return wi::to_wide (t
) != w
;
10795 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
10799 get_range_query (cfun
)->range_of_expr (vr
, t
);
10801 get_global_range_query ()->range_of_expr (vr
, t
);
10803 if (!vr
.undefined_p () && !vr
.contains_p (w
))
10805 /* If T has some known zero bits and W has any of those bits set,
10806 then T is known not to be equal to W. */
10807 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
10808 TYPE_PRECISION (TREE_TYPE (t
))), 0))
10817 /* Fold a binary expression of code CODE and type TYPE with operands
10818 OP0 and OP1. LOC is the location of the resulting expression.
10819 Return the folded expression if folding is successful. Otherwise,
10820 return NULL_TREE. */
10823 fold_binary_loc (location_t loc
, enum tree_code code
, tree type
,
10824 tree op0
, tree op1
)
10826 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10827 tree arg0
, arg1
, tem
;
10828 tree t1
= NULL_TREE
;
10829 bool strict_overflow_p
;
10832 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10833 && TREE_CODE_LENGTH (code
) == 2
10834 && op0
!= NULL_TREE
10835 && op1
!= NULL_TREE
);
10840 /* Strip any conversions that don't change the mode. This is
10841 safe for every expression, except for a comparison expression
10842 because its signedness is derived from its operands. So, in
10843 the latter case, only strip conversions that don't change the
10844 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10847 Note that this is done as an internal manipulation within the
10848 constant folder, in order to find the simplest representation
10849 of the arguments so that their form can be studied. In any
10850 cases, the appropriate type conversions should be put back in
10851 the tree that will get out of the constant folder. */
10853 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10855 STRIP_SIGN_NOPS (arg0
);
10856 STRIP_SIGN_NOPS (arg1
);
10864 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10865 constant but we can't do arithmetic on them. */
10866 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
10868 tem
= const_binop (code
, type
, arg0
, arg1
);
10869 if (tem
!= NULL_TREE
)
10871 if (TREE_TYPE (tem
) != type
)
10872 tem
= fold_convert_loc (loc
, type
, tem
);
10877 /* If this is a commutative operation, and ARG0 is a constant, move it
10878 to ARG1 to reduce the number of tests below. */
10879 if (commutative_tree_code (code
)
10880 && tree_swap_operands_p (arg0
, arg1
))
10881 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10883 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10884 to ARG1 to reduce the number of tests below. */
10885 if (kind
== tcc_comparison
10886 && tree_swap_operands_p (arg0
, arg1
))
10887 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
10889 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
10893 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10895 First check for cases where an arithmetic operation is applied to a
10896 compound, conditional, or comparison operation. Push the arithmetic
10897 operation inside the compound or conditional to see if any folding
10898 can then be done. Convert comparison to conditional for this purpose.
10899 The also optimizes non-constant cases that used to be done in
10902 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10903 one of the operands is a comparison and the other is a comparison, a
10904 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10905 code below would make the expression more complex. Change it to a
10906 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10907 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10909 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10910 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10911 && !VECTOR_TYPE_P (TREE_TYPE (arg0
))
10912 && ((truth_value_p (TREE_CODE (arg0
))
10913 && (truth_value_p (TREE_CODE (arg1
))
10914 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10915 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10916 || (truth_value_p (TREE_CODE (arg1
))
10917 && (truth_value_p (TREE_CODE (arg0
))
10918 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10919 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10921 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10922 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10925 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10926 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10928 if (code
== EQ_EXPR
)
10929 tem
= invert_truthvalue_loc (loc
, tem
);
10931 return fold_convert_loc (loc
, type
, tem
);
10934 if (TREE_CODE_CLASS (code
) == tcc_binary
10935 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10937 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10939 tem
= fold_build2_loc (loc
, code
, type
,
10940 fold_convert_loc (loc
, TREE_TYPE (op0
),
10941 TREE_OPERAND (arg0
, 1)), op1
);
10942 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10945 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
10947 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10948 fold_convert_loc (loc
, TREE_TYPE (op1
),
10949 TREE_OPERAND (arg1
, 1)));
10950 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10954 if (TREE_CODE (arg0
) == COND_EXPR
10955 || TREE_CODE (arg0
) == VEC_COND_EXPR
10956 || COMPARISON_CLASS_P (arg0
))
10958 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10960 /*cond_first_p=*/1);
10961 if (tem
!= NULL_TREE
)
10965 if (TREE_CODE (arg1
) == COND_EXPR
10966 || TREE_CODE (arg1
) == VEC_COND_EXPR
10967 || COMPARISON_CLASS_P (arg1
))
10969 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10971 /*cond_first_p=*/0);
10972 if (tem
!= NULL_TREE
)
10980 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10981 if (TREE_CODE (arg0
) == ADDR_EXPR
10982 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10984 tree iref
= TREE_OPERAND (arg0
, 0);
10985 return fold_build2 (MEM_REF
, type
,
10986 TREE_OPERAND (iref
, 0),
10987 int_const_binop (PLUS_EXPR
, arg1
,
10988 TREE_OPERAND (iref
, 1)));
10991 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10992 if (TREE_CODE (arg0
) == ADDR_EXPR
10993 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10996 poly_int64 coffset
;
10997 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
11001 return fold_build2 (MEM_REF
, type
,
11002 build1 (ADDR_EXPR
, TREE_TYPE (arg0
), base
),
11003 int_const_binop (PLUS_EXPR
, arg1
,
11004 size_int (coffset
)));
11009 case POINTER_PLUS_EXPR
:
11010 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11011 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
11012 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
11013 return fold_convert_loc (loc
, type
,
11014 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
11015 fold_convert_loc (loc
, sizetype
,
11017 fold_convert_loc (loc
, sizetype
,
11023 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
11025 /* X + (X / CST) * -CST is X % CST. */
11026 if (TREE_CODE (arg1
) == MULT_EXPR
11027 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
11028 && operand_equal_p (arg0
,
11029 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
11031 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
11032 tree cst1
= TREE_OPERAND (arg1
, 1);
11033 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
11035 if (sum
&& integer_zerop (sum
))
11036 return fold_convert_loc (loc
, type
,
11037 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
11038 TREE_TYPE (arg0
), arg0
,
11043 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11044 one. Make sure the type is not saturating and has the signedness of
11045 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11046 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11047 if ((TREE_CODE (arg0
) == MULT_EXPR
11048 || TREE_CODE (arg1
) == MULT_EXPR
)
11049 && !TYPE_SATURATING (type
)
11050 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11051 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11052 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11054 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11059 if (! FLOAT_TYPE_P (type
))
11061 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11062 (plus (plus (mult) (mult)) (foo)) so that we can
11063 take advantage of the factoring cases below. */
11064 if (ANY_INTEGRAL_TYPE_P (type
)
11065 && TYPE_OVERFLOW_WRAPS (type
)
11066 && (((TREE_CODE (arg0
) == PLUS_EXPR
11067 || TREE_CODE (arg0
) == MINUS_EXPR
)
11068 && TREE_CODE (arg1
) == MULT_EXPR
)
11069 || ((TREE_CODE (arg1
) == PLUS_EXPR
11070 || TREE_CODE (arg1
) == MINUS_EXPR
)
11071 && TREE_CODE (arg0
) == MULT_EXPR
)))
11073 tree parg0
, parg1
, parg
, marg
;
11074 enum tree_code pcode
;
11076 if (TREE_CODE (arg1
) == MULT_EXPR
)
11077 parg
= arg0
, marg
= arg1
;
11079 parg
= arg1
, marg
= arg0
;
11080 pcode
= TREE_CODE (parg
);
11081 parg0
= TREE_OPERAND (parg
, 0);
11082 parg1
= TREE_OPERAND (parg
, 1);
11083 STRIP_NOPS (parg0
);
11084 STRIP_NOPS (parg1
);
11086 if (TREE_CODE (parg0
) == MULT_EXPR
11087 && TREE_CODE (parg1
) != MULT_EXPR
)
11088 return fold_build2_loc (loc
, pcode
, type
,
11089 fold_build2_loc (loc
, PLUS_EXPR
, type
,
11090 fold_convert_loc (loc
, type
,
11092 fold_convert_loc (loc
, type
,
11094 fold_convert_loc (loc
, type
, parg1
));
11095 if (TREE_CODE (parg0
) != MULT_EXPR
11096 && TREE_CODE (parg1
) == MULT_EXPR
)
11098 fold_build2_loc (loc
, PLUS_EXPR
, type
,
11099 fold_convert_loc (loc
, type
, parg0
),
11100 fold_build2_loc (loc
, pcode
, type
,
11101 fold_convert_loc (loc
, type
, marg
),
11102 fold_convert_loc (loc
, type
,
11108 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11109 to __complex__ ( x, y ). This is not the same for SNaNs or
11110 if signed zeros are involved. */
11111 if (!HONOR_SNANS (arg0
)
11112 && !HONOR_SIGNED_ZEROS (arg0
)
11113 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11115 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11116 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
11117 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
11118 bool arg0rz
= false, arg0iz
= false;
11119 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
11120 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
11122 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
11123 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
11124 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
11126 tree rp
= arg1r
? arg1r
11127 : build1 (REALPART_EXPR
, rtype
, arg1
);
11128 tree ip
= arg0i
? arg0i
11129 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
11130 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11132 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
11134 tree rp
= arg0r
? arg0r
11135 : build1 (REALPART_EXPR
, rtype
, arg0
);
11136 tree ip
= arg1i
? arg1i
11137 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
11138 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11143 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11144 We associate floats only if the user has specified
11145 -fassociative-math. */
11146 if (flag_associative_math
11147 && TREE_CODE (arg1
) == PLUS_EXPR
11148 && TREE_CODE (arg0
) != MULT_EXPR
)
11150 tree tree10
= TREE_OPERAND (arg1
, 0);
11151 tree tree11
= TREE_OPERAND (arg1
, 1);
11152 if (TREE_CODE (tree11
) == MULT_EXPR
11153 && TREE_CODE (tree10
) == MULT_EXPR
)
11156 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
11157 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
11160 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11161 We associate floats only if the user has specified
11162 -fassociative-math. */
11163 if (flag_associative_math
11164 && TREE_CODE (arg0
) == PLUS_EXPR
11165 && TREE_CODE (arg1
) != MULT_EXPR
)
11167 tree tree00
= TREE_OPERAND (arg0
, 0);
11168 tree tree01
= TREE_OPERAND (arg0
, 1);
11169 if (TREE_CODE (tree01
) == MULT_EXPR
11170 && TREE_CODE (tree00
) == MULT_EXPR
)
11173 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
11174 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
11180 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11181 is a rotate of A by C1 bits. */
11182 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11183 is a rotate of A by B bits.
11184 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11185 though in this case CODE must be | and not + or ^, otherwise
11186 it doesn't return A when B is 0. */
11188 enum tree_code code0
, code1
;
11190 code0
= TREE_CODE (arg0
);
11191 code1
= TREE_CODE (arg1
);
11192 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
11193 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
11194 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11195 TREE_OPERAND (arg1
, 0), 0)
11196 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
11197 TYPE_UNSIGNED (rtype
))
11198 /* Only create rotates in complete modes. Other cases are not
11199 expanded properly. */
11200 && (element_precision (rtype
)
11201 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
11203 tree tree01
, tree11
;
11204 tree orig_tree01
, orig_tree11
;
11205 enum tree_code code01
, code11
;
11207 tree01
= orig_tree01
= TREE_OPERAND (arg0
, 1);
11208 tree11
= orig_tree11
= TREE_OPERAND (arg1
, 1);
11209 STRIP_NOPS (tree01
);
11210 STRIP_NOPS (tree11
);
11211 code01
= TREE_CODE (tree01
);
11212 code11
= TREE_CODE (tree11
);
11213 if (code11
!= MINUS_EXPR
11214 && (code01
== MINUS_EXPR
|| code01
== BIT_AND_EXPR
))
11216 std::swap (code0
, code1
);
11217 std::swap (code01
, code11
);
11218 std::swap (tree01
, tree11
);
11219 std::swap (orig_tree01
, orig_tree11
);
11221 if (code01
== INTEGER_CST
11222 && code11
== INTEGER_CST
11223 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
11224 == element_precision (rtype
)))
11226 tem
= build2_loc (loc
, LROTATE_EXPR
,
11227 rtype
, TREE_OPERAND (arg0
, 0),
11228 code0
== LSHIFT_EXPR
11229 ? orig_tree01
: orig_tree11
);
11230 return fold_convert_loc (loc
, type
, tem
);
11232 else if (code11
== MINUS_EXPR
)
11234 tree tree110
, tree111
;
11235 tree110
= TREE_OPERAND (tree11
, 0);
11236 tree111
= TREE_OPERAND (tree11
, 1);
11237 STRIP_NOPS (tree110
);
11238 STRIP_NOPS (tree111
);
11239 if (TREE_CODE (tree110
) == INTEGER_CST
11240 && compare_tree_int (tree110
,
11241 element_precision (rtype
)) == 0
11242 && operand_equal_p (tree01
, tree111
, 0))
11244 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
11245 ? LROTATE_EXPR
: RROTATE_EXPR
),
11246 rtype
, TREE_OPERAND (arg0
, 0),
11248 return fold_convert_loc (loc
, type
, tem
);
11251 else if (code
== BIT_IOR_EXPR
11252 && code11
== BIT_AND_EXPR
11253 && pow2p_hwi (element_precision (rtype
)))
11255 tree tree110
, tree111
;
11256 tree110
= TREE_OPERAND (tree11
, 0);
11257 tree111
= TREE_OPERAND (tree11
, 1);
11258 STRIP_NOPS (tree110
);
11259 STRIP_NOPS (tree111
);
11260 if (TREE_CODE (tree110
) == NEGATE_EXPR
11261 && TREE_CODE (tree111
) == INTEGER_CST
11262 && compare_tree_int (tree111
,
11263 element_precision (rtype
) - 1) == 0
11264 && operand_equal_p (tree01
, TREE_OPERAND (tree110
, 0), 0))
11266 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
11267 ? LROTATE_EXPR
: RROTATE_EXPR
),
11268 rtype
, TREE_OPERAND (arg0
, 0),
11270 return fold_convert_loc (loc
, type
, tem
);
11277 /* In most languages, can't associate operations on floats through
11278 parentheses. Rather than remember where the parentheses were, we
11279 don't associate floats at all, unless the user has specified
11280 -fassociative-math.
11281 And, we need to make sure type is not saturating. */
11283 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
11284 && !TYPE_SATURATING (type
)
11285 && !TYPE_OVERFLOW_SANITIZED (type
))
11287 tree var0
, minus_var0
, con0
, minus_con0
, lit0
, minus_lit0
;
11288 tree var1
, minus_var1
, con1
, minus_con1
, lit1
, minus_lit1
;
11292 /* Split both trees into variables, constants, and literals. Then
11293 associate each group together, the constants with literals,
11294 then the result with variables. This increases the chances of
11295 literals being recombined later and of generating relocatable
11296 expressions for the sum of a constant and literal. */
11297 var0
= split_tree (arg0
, type
, code
,
11298 &minus_var0
, &con0
, &minus_con0
,
11299 &lit0
, &minus_lit0
, 0);
11300 var1
= split_tree (arg1
, type
, code
,
11301 &minus_var1
, &con1
, &minus_con1
,
11302 &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
11304 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11305 if (code
== MINUS_EXPR
)
11308 /* With undefined overflow prefer doing association in a type
11309 which wraps on overflow, if that is one of the operand types. */
11310 if ((POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
11311 && !TYPE_OVERFLOW_WRAPS (type
))
11313 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11314 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11315 atype
= TREE_TYPE (arg0
);
11316 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
11317 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
11318 atype
= TREE_TYPE (arg1
);
11319 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
11322 /* With undefined overflow we can only associate constants with one
11323 variable, and constants whose association doesn't overflow. */
11324 if ((POINTER_TYPE_P (atype
) || INTEGRAL_TYPE_P (atype
))
11325 && !TYPE_OVERFLOW_WRAPS (atype
))
11327 if ((var0
&& var1
) || (minus_var0
&& minus_var1
))
11329 /* ??? If split_tree would handle NEGATE_EXPR we could
11330 simply reject these cases and the allowed cases would
11331 be the var0/minus_var1 ones. */
11332 tree tmp0
= var0
? var0
: minus_var0
;
11333 tree tmp1
= var1
? var1
: minus_var1
;
11334 bool one_neg
= false;
11336 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
11338 tmp0
= TREE_OPERAND (tmp0
, 0);
11339 one_neg
= !one_neg
;
11341 if (CONVERT_EXPR_P (tmp0
)
11342 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
11343 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
11344 <= TYPE_PRECISION (atype
)))
11345 tmp0
= TREE_OPERAND (tmp0
, 0);
11346 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
11348 tmp1
= TREE_OPERAND (tmp1
, 0);
11349 one_neg
= !one_neg
;
11351 if (CONVERT_EXPR_P (tmp1
)
11352 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
11353 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
11354 <= TYPE_PRECISION (atype
)))
11355 tmp1
= TREE_OPERAND (tmp1
, 0);
11356 /* The only case we can still associate with two variables
11357 is if they cancel out. */
11359 || !operand_equal_p (tmp0
, tmp1
, 0))
11362 else if ((var0
&& minus_var1
11363 && ! operand_equal_p (var0
, minus_var1
, 0))
11364 || (minus_var0
&& var1
11365 && ! operand_equal_p (minus_var0
, var1
, 0)))
11369 /* Only do something if we found more than two objects. Otherwise,
11370 nothing has changed and we risk infinite recursion. */
11372 && ((var0
!= 0) + (var1
!= 0)
11373 + (minus_var0
!= 0) + (minus_var1
!= 0)
11374 + (con0
!= 0) + (con1
!= 0)
11375 + (minus_con0
!= 0) + (minus_con1
!= 0)
11376 + (lit0
!= 0) + (lit1
!= 0)
11377 + (minus_lit0
!= 0) + (minus_lit1
!= 0)) > 2)
11379 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
11380 minus_var0
= associate_trees (loc
, minus_var0
, minus_var1
,
11382 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
11383 minus_con0
= associate_trees (loc
, minus_con0
, minus_con1
,
11385 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
11386 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
11389 if (minus_var0
&& var0
)
11391 var0
= associate_trees (loc
, var0
, minus_var0
,
11392 MINUS_EXPR
, atype
);
11395 if (minus_con0
&& con0
)
11397 con0
= associate_trees (loc
, con0
, minus_con0
,
11398 MINUS_EXPR
, atype
);
11402 /* Preserve the MINUS_EXPR if the negative part of the literal is
11403 greater than the positive part. Otherwise, the multiplicative
11404 folding code (i.e extract_muldiv) may be fooled in case
11405 unsigned constants are subtracted, like in the following
11406 example: ((X*2 + 4) - 8U)/2. */
11407 if (minus_lit0
&& lit0
)
11409 if (TREE_CODE (lit0
) == INTEGER_CST
11410 && TREE_CODE (minus_lit0
) == INTEGER_CST
11411 && tree_int_cst_lt (lit0
, minus_lit0
)
11412 /* But avoid ending up with only negated parts. */
11415 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
11416 MINUS_EXPR
, atype
);
11421 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
11422 MINUS_EXPR
, atype
);
11427 /* Don't introduce overflows through reassociation. */
11428 if ((lit0
&& TREE_OVERFLOW_P (lit0
))
11429 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
)))
11432 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11433 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
11435 minus_con0
= associate_trees (loc
, minus_con0
, minus_lit0
,
11439 /* Eliminate minus_con0. */
11443 con0
= associate_trees (loc
, con0
, minus_con0
,
11444 MINUS_EXPR
, atype
);
11446 var0
= associate_trees (loc
, var0
, minus_con0
,
11447 MINUS_EXPR
, atype
);
11449 gcc_unreachable ();
11453 /* Eliminate minus_var0. */
11457 con0
= associate_trees (loc
, con0
, minus_var0
,
11458 MINUS_EXPR
, atype
);
11460 gcc_unreachable ();
11465 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
11472 case POINTER_DIFF_EXPR
:
11474 /* Fold &a[i] - &a[j] to i-j. */
11475 if (TREE_CODE (arg0
) == ADDR_EXPR
11476 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11477 && TREE_CODE (arg1
) == ADDR_EXPR
11478 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11480 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11481 TREE_OPERAND (arg0
, 0),
11482 TREE_OPERAND (arg1
, 0),
11484 == POINTER_DIFF_EXPR
);
11489 /* Further transformations are not for pointers. */
11490 if (code
== POINTER_DIFF_EXPR
)
11493 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11494 if (TREE_CODE (arg0
) == NEGATE_EXPR
11495 && negate_expr_p (op1
)
11496 /* If arg0 is e.g. unsigned int and type is int, then this could
11497 introduce UB, because if A is INT_MIN at runtime, the original
11498 expression can be well defined while the latter is not.
11500 && !(ANY_INTEGRAL_TYPE_P (type
)
11501 && TYPE_OVERFLOW_UNDEFINED (type
)
11502 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11503 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
11504 return fold_build2_loc (loc
, MINUS_EXPR
, type
, negate_expr (op1
),
11505 fold_convert_loc (loc
, type
,
11506 TREE_OPERAND (arg0
, 0)));
11508 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11509 __complex__ ( x, -y ). This is not the same for SNaNs or if
11510 signed zeros are involved. */
11511 if (!HONOR_SNANS (arg0
)
11512 && !HONOR_SIGNED_ZEROS (arg0
)
11513 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11515 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11516 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
11517 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
11518 bool arg0rz
= false, arg0iz
= false;
11519 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
11520 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
11522 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
11523 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
11524 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
11526 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11528 : build1 (REALPART_EXPR
, rtype
, arg1
));
11529 tree ip
= arg0i
? arg0i
11530 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
11531 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11533 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
11535 tree rp
= arg0r
? arg0r
11536 : build1 (REALPART_EXPR
, rtype
, arg0
);
11537 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11539 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
11540 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11545 /* A - B -> A + (-B) if B is easily negatable. */
11546 if (negate_expr_p (op1
)
11547 && ! TYPE_OVERFLOW_SANITIZED (type
)
11548 && ((FLOAT_TYPE_P (type
)
11549 /* Avoid this transformation if B is a positive REAL_CST. */
11550 && (TREE_CODE (op1
) != REAL_CST
11551 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
11552 || INTEGRAL_TYPE_P (type
)))
11553 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11554 fold_convert_loc (loc
, type
, arg0
),
11555 negate_expr (op1
));
11557 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11558 one. Make sure the type is not saturating and has the signedness of
11559 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11560 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11561 if ((TREE_CODE (arg0
) == MULT_EXPR
11562 || TREE_CODE (arg1
) == MULT_EXPR
)
11563 && !TYPE_SATURATING (type
)
11564 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11565 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11566 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11568 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11576 if (! FLOAT_TYPE_P (type
))
11578 /* Transform x * -C into -x * C if x is easily negatable. */
11579 if (TREE_CODE (op1
) == INTEGER_CST
11580 && tree_int_cst_sgn (op1
) == -1
11581 && negate_expr_p (op0
)
11582 && negate_expr_p (op1
)
11583 && (tem
= negate_expr (op1
)) != op1
11584 && ! TREE_OVERFLOW (tem
))
11585 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11586 fold_convert_loc (loc
, type
,
11587 negate_expr (op0
)), tem
);
11589 strict_overflow_p
= false;
11590 if (TREE_CODE (arg1
) == INTEGER_CST
11591 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11592 &strict_overflow_p
)) != 0)
11594 if (strict_overflow_p
)
11595 fold_overflow_warning (("assuming signed overflow does not "
11596 "occur when simplifying "
11598 WARN_STRICT_OVERFLOW_MISC
);
11599 return fold_convert_loc (loc
, type
, tem
);
11602 /* Optimize z * conj(z) for integer complex numbers. */
11603 if (TREE_CODE (arg0
) == CONJ_EXPR
11604 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11605 return fold_mult_zconjz (loc
, type
, arg1
);
11606 if (TREE_CODE (arg1
) == CONJ_EXPR
11607 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11608 return fold_mult_zconjz (loc
, type
, arg0
);
11612 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11613 This is not the same for NaNs or if signed zeros are
11615 if (!HONOR_NANS (arg0
)
11616 && !HONOR_SIGNED_ZEROS (arg0
)
11617 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11618 && TREE_CODE (arg1
) == COMPLEX_CST
11619 && real_zerop (TREE_REALPART (arg1
)))
11621 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11622 if (real_onep (TREE_IMAGPART (arg1
)))
11624 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11625 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11627 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11628 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11630 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11631 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11632 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11636 /* Optimize z * conj(z) for floating point complex numbers.
11637 Guarded by flag_unsafe_math_optimizations as non-finite
11638 imaginary components don't produce scalar results. */
11639 if (flag_unsafe_math_optimizations
11640 && TREE_CODE (arg0
) == CONJ_EXPR
11641 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11642 return fold_mult_zconjz (loc
, type
, arg1
);
11643 if (flag_unsafe_math_optimizations
11644 && TREE_CODE (arg1
) == CONJ_EXPR
11645 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11646 return fold_mult_zconjz (loc
, type
, arg0
);
11651 /* Canonicalize (X & C1) | C2. */
11652 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11653 && TREE_CODE (arg1
) == INTEGER_CST
11654 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11656 int width
= TYPE_PRECISION (type
), w
;
11657 wide_int c1
= wi::to_wide (TREE_OPERAND (arg0
, 1));
11658 wide_int c2
= wi::to_wide (arg1
);
11660 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11661 if ((c1
& c2
) == c1
)
11662 return omit_one_operand_loc (loc
, type
, arg1
,
11663 TREE_OPERAND (arg0
, 0));
11665 wide_int msk
= wi::mask (width
, false,
11666 TYPE_PRECISION (TREE_TYPE (arg1
)));
11668 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11669 if (wi::bit_and_not (msk
, c1
| c2
) == 0)
11671 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11672 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11675 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11676 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11677 mode which allows further optimizations. */
11680 wide_int c3
= wi::bit_and_not (c1
, c2
);
11681 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11683 wide_int mask
= wi::mask (w
, false,
11684 TYPE_PRECISION (type
));
11685 if (((c1
| c2
) & mask
) == mask
11686 && wi::bit_and_not (c1
, mask
) == 0)
11695 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11696 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
,
11697 wide_int_to_tree (type
, c3
));
11698 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11702 /* See if this can be simplified into a rotate first. If that
11703 is unsuccessful continue in the association code. */
11707 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11708 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11709 && INTEGRAL_TYPE_P (type
)
11710 && integer_onep (TREE_OPERAND (arg0
, 1))
11711 && integer_onep (arg1
))
11712 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11713 build_zero_cst (TREE_TYPE (arg0
)));
11715 /* See if this can be simplified into a rotate first. If that
11716 is unsuccessful continue in the association code. */
11720 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11721 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11722 && INTEGRAL_TYPE_P (type
)
11723 && integer_onep (TREE_OPERAND (arg0
, 1))
11724 && integer_onep (arg1
))
11727 tem
= TREE_OPERAND (arg0
, 0);
11728 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11729 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11731 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11732 build_zero_cst (TREE_TYPE (tem
)));
11734 /* Fold ~X & 1 as (X & 1) == 0. */
11735 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11736 && INTEGRAL_TYPE_P (type
)
11737 && integer_onep (arg1
))
11740 tem
= TREE_OPERAND (arg0
, 0);
11741 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11742 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11744 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11745 build_zero_cst (TREE_TYPE (tem
)));
11747 /* Fold !X & 1 as X == 0. */
11748 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11749 && integer_onep (arg1
))
11751 tem
= TREE_OPERAND (arg0
, 0);
11752 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11753 build_zero_cst (TREE_TYPE (tem
)));
11756 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11757 multiple of 1 << CST. */
11758 if (TREE_CODE (arg1
) == INTEGER_CST
)
11760 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
11761 wide_int ncst1
= -cst1
;
11762 if ((cst1
& ncst1
) == ncst1
11763 && multiple_of_p (type
, arg0
,
11764 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11765 return fold_convert_loc (loc
, type
, arg0
);
11768 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11770 if (TREE_CODE (arg1
) == INTEGER_CST
11771 && TREE_CODE (arg0
) == MULT_EXPR
11772 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11774 wi::tree_to_wide_ref warg1
= wi::to_wide (arg1
);
11776 = mask_with_tz (type
, warg1
, wi::to_wide (TREE_OPERAND (arg0
, 1)));
11779 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11781 else if (masked
!= warg1
)
11783 /* Avoid the transform if arg1 is a mask of some
11784 mode which allows further optimizations. */
11785 int pop
= wi::popcount (warg1
);
11786 if (!(pop
>= BITS_PER_UNIT
11788 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11789 return fold_build2_loc (loc
, code
, type
, op0
,
11790 wide_int_to_tree (type
, masked
));
11794 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11795 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11796 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11798 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11800 wide_int mask
= wide_int::from (wi::to_wide (arg1
), prec
, UNSIGNED
);
11803 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11809 /* Don't touch a floating-point divide by zero unless the mode
11810 of the constant can represent infinity. */
11811 if (TREE_CODE (arg1
) == REAL_CST
11812 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11813 && real_zerop (arg1
))
11816 /* (-A) / (-B) -> A / B */
11817 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11818 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11819 TREE_OPERAND (arg0
, 0),
11820 negate_expr (arg1
));
11821 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11822 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11823 negate_expr (arg0
),
11824 TREE_OPERAND (arg1
, 0));
11827 case TRUNC_DIV_EXPR
:
11830 case FLOOR_DIV_EXPR
:
11831 /* Simplify A / (B << N) where A and B are positive and B is
11832 a power of 2, to A >> (N + log2(B)). */
11833 strict_overflow_p
= false;
11834 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11835 && (TYPE_UNSIGNED (type
)
11836 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11838 tree sval
= TREE_OPERAND (arg1
, 0);
11839 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11841 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11842 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11843 wi::exact_log2 (wi::to_wide (sval
)));
11845 if (strict_overflow_p
)
11846 fold_overflow_warning (("assuming signed overflow does not "
11847 "occur when simplifying A / (B << N)"),
11848 WARN_STRICT_OVERFLOW_MISC
);
11850 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11852 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11853 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11859 case ROUND_DIV_EXPR
:
11860 case CEIL_DIV_EXPR
:
11861 case EXACT_DIV_EXPR
:
11862 if (integer_zerop (arg1
))
11865 /* Convert -A / -B to A / B when the type is signed and overflow is
11867 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11868 && TREE_CODE (op0
) == NEGATE_EXPR
11869 && negate_expr_p (op1
))
11871 if (ANY_INTEGRAL_TYPE_P (type
))
11872 fold_overflow_warning (("assuming signed overflow does not occur "
11873 "when distributing negation across "
11875 WARN_STRICT_OVERFLOW_MISC
);
11876 return fold_build2_loc (loc
, code
, type
,
11877 fold_convert_loc (loc
, type
,
11878 TREE_OPERAND (arg0
, 0)),
11879 negate_expr (op1
));
11881 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11882 && TREE_CODE (arg1
) == NEGATE_EXPR
11883 && negate_expr_p (op0
))
11885 if (ANY_INTEGRAL_TYPE_P (type
))
11886 fold_overflow_warning (("assuming signed overflow does not occur "
11887 "when distributing negation across "
11889 WARN_STRICT_OVERFLOW_MISC
);
11890 return fold_build2_loc (loc
, code
, type
,
11892 fold_convert_loc (loc
, type
,
11893 TREE_OPERAND (arg1
, 0)));
11896 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11897 operation, EXACT_DIV_EXPR.
11899 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11900 At one time others generated faster code, it's not clear if they do
11901 after the last round to changes to the DIV code in expmed.cc. */
11902 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11903 && multiple_of_p (type
, arg0
, arg1
))
11904 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
11905 fold_convert (type
, arg0
),
11906 fold_convert (type
, arg1
));
11908 strict_overflow_p
= false;
11909 if (TREE_CODE (arg1
) == INTEGER_CST
11910 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11911 &strict_overflow_p
)) != 0)
11913 if (strict_overflow_p
)
11914 fold_overflow_warning (("assuming signed overflow does not occur "
11915 "when simplifying division"),
11916 WARN_STRICT_OVERFLOW_MISC
);
11917 return fold_convert_loc (loc
, type
, tem
);
11922 case CEIL_MOD_EXPR
:
11923 case FLOOR_MOD_EXPR
:
11924 case ROUND_MOD_EXPR
:
11925 case TRUNC_MOD_EXPR
:
11926 strict_overflow_p
= false;
11927 if (TREE_CODE (arg1
) == INTEGER_CST
11928 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11929 &strict_overflow_p
)) != 0)
11931 if (strict_overflow_p
)
11932 fold_overflow_warning (("assuming signed overflow does not occur "
11933 "when simplifying modulus"),
11934 WARN_STRICT_OVERFLOW_MISC
);
11935 return fold_convert_loc (loc
, type
, tem
);
11944 /* Since negative shift count is not well-defined,
11945 don't try to compute it in the compiler. */
11946 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11949 prec
= element_precision (type
);
11951 /* If we have a rotate of a bit operation with the rotate count and
11952 the second operand of the bit operation both constant,
11953 permute the two operations. */
11954 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11955 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11956 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11957 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11958 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11960 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11961 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11962 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11963 fold_build2_loc (loc
, code
, type
,
11965 fold_build2_loc (loc
, code
, type
,
11969 /* Two consecutive rotates adding up to the some integer
11970 multiple of the precision of the type can be ignored. */
11971 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11972 && TREE_CODE (arg0
) == RROTATE_EXPR
11973 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11974 && wi::umod_trunc (wi::to_wide (arg1
)
11975 + wi::to_wide (TREE_OPERAND (arg0
, 1)),
11977 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11985 case TRUTH_ANDIF_EXPR
:
11986 /* Note that the operands of this must be ints
11987 and their values must be 0 or 1.
11988 ("true" is a fixed value perhaps depending on the language.) */
11989 /* If first arg is constant zero, return it. */
11990 if (integer_zerop (arg0
))
11991 return fold_convert_loc (loc
, type
, arg0
);
11993 case TRUTH_AND_EXPR
:
11994 /* If either arg is constant true, drop it. */
11995 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11996 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11997 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11998 /* Preserve sequence points. */
11999 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12000 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12001 /* If second arg is constant zero, result is zero, but first arg
12002 must be evaluated. */
12003 if (integer_zerop (arg1
))
12004 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12005 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12006 case will be handled here. */
12007 if (integer_zerop (arg0
))
12008 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12010 /* !X && X is always false. */
12011 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12012 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12013 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12014 /* X && !X is always false. */
12015 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12016 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12017 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12019 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12020 means A >= Y && A != MAX, but in this case we know that
12023 if (!TREE_SIDE_EFFECTS (arg0
)
12024 && !TREE_SIDE_EFFECTS (arg1
))
12026 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12027 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12028 return fold_convert (type
,
12029 fold_build2_loc (loc
, code
, TREE_TYPE (arg1
),
12032 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12033 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12034 return fold_convert (type
,
12035 fold_build2_loc (loc
, code
, TREE_TYPE (arg0
),
12039 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12045 case TRUTH_ORIF_EXPR
:
12046 /* Note that the operands of this must be ints
12047 and their values must be 0 or true.
12048 ("true" is a fixed value perhaps depending on the language.) */
12049 /* If first arg is constant true, return it. */
12050 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12051 return fold_convert_loc (loc
, type
, arg0
);
12053 case TRUTH_OR_EXPR
:
12054 /* If either arg is constant zero, drop it. */
12055 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12056 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12057 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12058 /* Preserve sequence points. */
12059 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12060 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12061 /* If second arg is constant true, result is true, but we must
12062 evaluate first arg. */
12063 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12064 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12065 /* Likewise for first arg, but note this only occurs here for
12067 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12068 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12070 /* !X || X is always true. */
12071 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12072 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12073 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12074 /* X || !X is always true. */
12075 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12076 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12077 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12079 /* (X && !Y) || (!X && Y) is X ^ Y */
12080 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12081 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12083 tree a0
, a1
, l0
, l1
, n0
, n1
;
12085 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12086 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12088 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12089 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12091 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12092 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12094 if ((operand_equal_p (n0
, a0
, 0)
12095 && operand_equal_p (n1
, a1
, 0))
12096 || (operand_equal_p (n0
, a1
, 0)
12097 && operand_equal_p (n1
, a0
, 0)))
12098 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12101 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12107 case TRUTH_XOR_EXPR
:
12108 /* If the second arg is constant zero, drop it. */
12109 if (integer_zerop (arg1
))
12110 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12111 /* If the second arg is constant true, this is a logical inversion. */
12112 if (integer_onep (arg1
))
12114 tem
= invert_truthvalue_loc (loc
, arg0
);
12115 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12117 /* Identical arguments cancel to zero. */
12118 if (operand_equal_p (arg0
, arg1
, 0))
12119 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12121 /* !X ^ X is always true. */
12122 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12123 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12124 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12126 /* X ^ !X is always true. */
12127 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12128 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12129 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12138 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12139 if (tem
!= NULL_TREE
)
12142 /* bool_var != 1 becomes !bool_var. */
12143 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12144 && code
== NE_EXPR
)
12145 return fold_convert_loc (loc
, type
,
12146 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12147 TREE_TYPE (arg0
), arg0
));
12149 /* bool_var == 0 becomes !bool_var. */
12150 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12151 && code
== EQ_EXPR
)
12152 return fold_convert_loc (loc
, type
,
12153 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12154 TREE_TYPE (arg0
), arg0
));
12156 /* !exp != 0 becomes !exp */
12157 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12158 && code
== NE_EXPR
)
12159 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12161 /* If this is an EQ or NE comparison with zero and ARG0 is
12162 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12163 two operations, but the latter can be done in one less insn
12164 on machines that have only two-operand insns or on which a
12165 constant cannot be the first operand. */
12166 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12167 && integer_zerop (arg1
))
12169 tree arg00
= TREE_OPERAND (arg0
, 0);
12170 tree arg01
= TREE_OPERAND (arg0
, 1);
12171 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12172 && integer_onep (TREE_OPERAND (arg00
, 0)))
12174 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12175 arg01
, TREE_OPERAND (arg00
, 1));
12176 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12177 build_one_cst (TREE_TYPE (arg0
)));
12178 return fold_build2_loc (loc
, code
, type
,
12179 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12182 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12183 && integer_onep (TREE_OPERAND (arg01
, 0)))
12185 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12186 arg00
, TREE_OPERAND (arg01
, 1));
12187 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12188 build_one_cst (TREE_TYPE (arg0
)));
12189 return fold_build2_loc (loc
, code
, type
,
12190 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12195 /* If this is a comparison of a field, we may be able to simplify it. */
12196 if ((TREE_CODE (arg0
) == COMPONENT_REF
12197 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12198 /* Handle the constant case even without -O
12199 to make sure the warnings are given. */
12200 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12202 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12207 /* Optimize comparisons of strlen vs zero to a compare of the
12208 first character of the string vs zero. To wit,
12209 strlen(ptr) == 0 => *ptr == 0
12210 strlen(ptr) != 0 => *ptr != 0
12211 Other cases should reduce to one of these two (or a constant)
12212 due to the return value of strlen being unsigned. */
12213 if (TREE_CODE (arg0
) == CALL_EXPR
&& integer_zerop (arg1
))
12215 tree fndecl
= get_callee_fndecl (arg0
);
12218 && fndecl_built_in_p (fndecl
, BUILT_IN_STRLEN
)
12219 && call_expr_nargs (arg0
) == 1
12220 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0)))
12224 = build_pointer_type (build_qualified_type (char_type_node
,
12226 tree ptr
= fold_convert_loc (loc
, ptrtype
,
12227 CALL_EXPR_ARG (arg0
, 0));
12228 tree iref
= build_fold_indirect_ref_loc (loc
, ptr
);
12229 return fold_build2_loc (loc
, code
, type
, iref
,
12230 build_int_cst (TREE_TYPE (iref
), 0));
12234 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12235 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12236 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12237 && integer_zerop (arg1
)
12238 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12240 tree arg00
= TREE_OPERAND (arg0
, 0);
12241 tree arg01
= TREE_OPERAND (arg0
, 1);
12242 tree itype
= TREE_TYPE (arg00
);
12243 if (wi::to_wide (arg01
) == element_precision (itype
) - 1)
12245 if (TYPE_UNSIGNED (itype
))
12247 itype
= signed_type_for (itype
);
12248 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12250 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12251 type
, arg00
, build_zero_cst (itype
));
12255 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12256 (X & C) == 0 when C is a single bit. */
12257 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12258 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12259 && integer_zerop (arg1
)
12260 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12262 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12263 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12264 TREE_OPERAND (arg0
, 1));
12265 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12267 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12271 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12272 constant C is a power of two, i.e. a single bit. */
12273 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12274 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12275 && integer_zerop (arg1
)
12276 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12277 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12278 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12280 tree arg00
= TREE_OPERAND (arg0
, 0);
12281 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12282 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12285 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12286 when is C is a power of two, i.e. a single bit. */
12287 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12288 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12289 && integer_zerop (arg1
)
12290 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12291 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12292 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12294 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12295 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12296 arg000
, TREE_OPERAND (arg0
, 1));
12297 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12298 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12301 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12302 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12304 tree arg00
= TREE_OPERAND (arg0
, 0);
12305 tree arg01
= TREE_OPERAND (arg0
, 1);
12306 tree arg10
= TREE_OPERAND (arg1
, 0);
12307 tree arg11
= TREE_OPERAND (arg1
, 1);
12308 tree itype
= TREE_TYPE (arg0
);
12310 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12311 operand_equal_p guarantees no side-effects so we don't need
12312 to use omit_one_operand on Z. */
12313 if (operand_equal_p (arg01
, arg11
, 0))
12314 return fold_build2_loc (loc
, code
, type
, arg00
,
12315 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12317 if (operand_equal_p (arg01
, arg10
, 0))
12318 return fold_build2_loc (loc
, code
, type
, arg00
,
12319 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12321 if (operand_equal_p (arg00
, arg11
, 0))
12322 return fold_build2_loc (loc
, code
, type
, arg01
,
12323 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12325 if (operand_equal_p (arg00
, arg10
, 0))
12326 return fold_build2_loc (loc
, code
, type
, arg01
,
12327 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12330 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12331 if (TREE_CODE (arg01
) == INTEGER_CST
12332 && TREE_CODE (arg11
) == INTEGER_CST
)
12334 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12335 fold_convert_loc (loc
, itype
, arg11
));
12336 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12337 return fold_build2_loc (loc
, code
, type
, tem
,
12338 fold_convert_loc (loc
, itype
, arg10
));
12342 /* Attempt to simplify equality/inequality comparisons of complex
12343 values. Only lower the comparison if the result is known or
12344 can be simplified to a single scalar comparison. */
12345 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12346 || TREE_CODE (arg0
) == COMPLEX_CST
)
12347 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12348 || TREE_CODE (arg1
) == COMPLEX_CST
))
12350 tree real0
, imag0
, real1
, imag1
;
12353 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12355 real0
= TREE_OPERAND (arg0
, 0);
12356 imag0
= TREE_OPERAND (arg0
, 1);
12360 real0
= TREE_REALPART (arg0
);
12361 imag0
= TREE_IMAGPART (arg0
);
12364 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12366 real1
= TREE_OPERAND (arg1
, 0);
12367 imag1
= TREE_OPERAND (arg1
, 1);
12371 real1
= TREE_REALPART (arg1
);
12372 imag1
= TREE_IMAGPART (arg1
);
12375 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12376 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12378 if (integer_zerop (rcond
))
12380 if (code
== EQ_EXPR
)
12381 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12383 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12387 if (code
== NE_EXPR
)
12388 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12390 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12394 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12395 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12397 if (integer_zerop (icond
))
12399 if (code
== EQ_EXPR
)
12400 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12402 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12406 if (code
== NE_EXPR
)
12407 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12409 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12420 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12421 if (tem
!= NULL_TREE
)
12424 /* Transform comparisons of the form X +- C CMP X. */
12425 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12426 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12427 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12428 && !HONOR_SNANS (arg0
))
12430 tree arg01
= TREE_OPERAND (arg0
, 1);
12431 enum tree_code code0
= TREE_CODE (arg0
);
12432 int is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12434 /* (X - c) > X becomes false. */
12435 if (code
== GT_EXPR
12436 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12437 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12438 return constant_boolean_node (0, type
);
12440 /* Likewise (X + c) < X becomes false. */
12441 if (code
== LT_EXPR
12442 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12443 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12444 return constant_boolean_node (0, type
);
12446 /* Convert (X - c) <= X to true. */
12447 if (!HONOR_NANS (arg1
)
12449 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12450 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12451 return constant_boolean_node (1, type
);
12453 /* Convert (X + c) >= X to true. */
12454 if (!HONOR_NANS (arg1
)
12456 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12457 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12458 return constant_boolean_node (1, type
);
12461 /* If we are comparing an ABS_EXPR with a constant, we can
12462 convert all the cases into explicit comparisons, but they may
12463 well not be faster than doing the ABS and one comparison.
12464 But ABS (X) <= C is a range comparison, which becomes a subtraction
12465 and a comparison, and is probably faster. */
12466 if (code
== LE_EXPR
12467 && TREE_CODE (arg1
) == INTEGER_CST
12468 && TREE_CODE (arg0
) == ABS_EXPR
12469 && ! TREE_SIDE_EFFECTS (arg0
)
12470 && (tem
= negate_expr (arg1
)) != 0
12471 && TREE_CODE (tem
) == INTEGER_CST
12472 && !TREE_OVERFLOW (tem
))
12473 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12474 build2 (GE_EXPR
, type
,
12475 TREE_OPERAND (arg0
, 0), tem
),
12476 build2 (LE_EXPR
, type
,
12477 TREE_OPERAND (arg0
, 0), arg1
));
12479 /* Convert ABS_EXPR<x> >= 0 to true. */
12480 strict_overflow_p
= false;
12481 if (code
== GE_EXPR
12482 && (integer_zerop (arg1
)
12483 || (! HONOR_NANS (arg0
)
12484 && real_zerop (arg1
)))
12485 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12487 if (strict_overflow_p
)
12488 fold_overflow_warning (("assuming signed overflow does not occur "
12489 "when simplifying comparison of "
12490 "absolute value and zero"),
12491 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12492 return omit_one_operand_loc (loc
, type
,
12493 constant_boolean_node (true, type
),
12497 /* Convert ABS_EXPR<x> < 0 to false. */
12498 strict_overflow_p
= false;
12499 if (code
== LT_EXPR
12500 && (integer_zerop (arg1
) || real_zerop (arg1
))
12501 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12503 if (strict_overflow_p
)
12504 fold_overflow_warning (("assuming signed overflow does not occur "
12505 "when simplifying comparison of "
12506 "absolute value and zero"),
12507 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12508 return omit_one_operand_loc (loc
, type
,
12509 constant_boolean_node (false, type
),
12513 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12514 and similarly for >= into !=. */
12515 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12516 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12517 && TREE_CODE (arg1
) == LSHIFT_EXPR
12518 && integer_onep (TREE_OPERAND (arg1
, 0)))
12519 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12520 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12521 TREE_OPERAND (arg1
, 1)),
12522 build_zero_cst (TREE_TYPE (arg0
)));
12524 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12525 otherwise Y might be >= # of bits in X's type and thus e.g.
12526 (unsigned char) (1 << Y) for Y 15 might be 0.
12527 If the cast is widening, then 1 << Y should have unsigned type,
12528 otherwise if Y is number of bits in the signed shift type minus 1,
12529 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12530 31 might be 0xffffffff80000000. */
12531 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12532 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12533 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0
)))
12534 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12535 && CONVERT_EXPR_P (arg1
)
12536 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12537 && (element_precision (TREE_TYPE (arg1
))
12538 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12539 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12540 || (element_precision (TREE_TYPE (arg1
))
12541 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12542 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12544 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12545 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12546 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12547 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
12548 build_zero_cst (TREE_TYPE (arg0
)));
12553 case UNORDERED_EXPR
:
12561 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12563 tree targ0
= strip_float_extensions (arg0
);
12564 tree targ1
= strip_float_extensions (arg1
);
12565 tree newtype
= TREE_TYPE (targ0
);
12567 if (element_precision (TREE_TYPE (targ1
)) > element_precision (newtype
))
12568 newtype
= TREE_TYPE (targ1
);
12570 if (element_precision (newtype
) < element_precision (TREE_TYPE (arg0
)))
12571 return fold_build2_loc (loc
, code
, type
,
12572 fold_convert_loc (loc
, newtype
, targ0
),
12573 fold_convert_loc (loc
, newtype
, targ1
));
12578 case COMPOUND_EXPR
:
12579 /* When pedantic, a compound expression can be neither an lvalue
12580 nor an integer constant expression. */
12581 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12583 /* Don't let (0, 0) be null pointer constant. */
12584 tem
= integer_zerop (arg1
) ? build1_loc (loc
, NOP_EXPR
, type
, arg1
)
12585 : fold_convert_loc (loc
, type
, arg1
);
12590 } /* switch (code) */
12593 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12594 ((A & N) + B) & M -> (A + B) & M
12595 Similarly if (N & M) == 0,
12596 ((A | N) + B) & M -> (A + B) & M
12597 and for - instead of + (or unary - instead of +)
12598 and/or ^ instead of |.
12599 If B is constant and (B & M) == 0, fold into A & M.
12601 This function is a helper for match.pd patterns. Return non-NULL
12602 type in which the simplified operation should be performed only
12603 if any optimization is possible.
12605 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12606 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12607 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12610 fold_bit_and_mask (tree type
, tree arg1
, enum tree_code code
,
12611 tree arg00
, enum tree_code code00
, tree arg000
, tree arg001
,
12612 tree arg01
, enum tree_code code01
, tree arg010
, tree arg011
,
12615 gcc_assert (TREE_CODE (arg1
) == INTEGER_CST
);
12616 gcc_assert (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== NEGATE_EXPR
);
12617 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
12619 || (cst1
& (cst1
+ 1)) != 0
12620 || !INTEGRAL_TYPE_P (type
)
12621 || (!TYPE_OVERFLOW_WRAPS (type
)
12622 && TREE_CODE (type
) != INTEGER_TYPE
)
12623 || (wi::max_value (type
) & cst1
) != cst1
)
12626 enum tree_code codes
[2] = { code00
, code01
};
12627 tree arg0xx
[4] = { arg000
, arg001
, arg010
, arg011
};
12631 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12632 arg1 (M) is == (1LL << cst) - 1.
12633 Store C into PMOP[0] and D into PMOP[1]. */
12636 which
= code
!= NEGATE_EXPR
;
12638 for (; which
>= 0; which
--)
12639 switch (codes
[which
])
12644 gcc_assert (TREE_CODE (arg0xx
[2 * which
+ 1]) == INTEGER_CST
);
12645 cst0
= wi::to_wide (arg0xx
[2 * which
+ 1]) & cst1
;
12646 if (codes
[which
] == BIT_AND_EXPR
)
12651 else if (cst0
!= 0)
12653 /* If C or D is of the form (A & N) where
12654 (N & M) == M, or of the form (A | N) or
12655 (A ^ N) where (N & M) == 0, replace it with A. */
12656 pmop
[which
] = arg0xx
[2 * which
];
12659 if (TREE_CODE (pmop
[which
]) != INTEGER_CST
)
12661 /* If C or D is a N where (N & M) == 0, it can be
12662 omitted (replaced with 0). */
12663 if ((code
== PLUS_EXPR
12664 || (code
== MINUS_EXPR
&& which
== 0))
12665 && (cst1
& wi::to_wide (pmop
[which
])) == 0)
12666 pmop
[which
] = build_int_cst (type
, 0);
12667 /* Similarly, with C - N where (-N & M) == 0. */
12668 if (code
== MINUS_EXPR
12670 && (cst1
& -wi::to_wide (pmop
[which
])) == 0)
12671 pmop
[which
] = build_int_cst (type
, 0);
12674 gcc_unreachable ();
12677 /* Only build anything new if we optimized one or both arguments above. */
12678 if (pmop
[0] == arg00
&& pmop
[1] == arg01
)
12681 if (TYPE_OVERFLOW_WRAPS (type
))
12684 return unsigned_type_for (type
);
12687 /* Used by contains_label_[p1]. */
12689 struct contains_label_data
12691 hash_set
<tree
> *pset
;
12692 bool inside_switch_p
;
12695 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12696 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12697 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12700 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data
)
12702 contains_label_data
*d
= (contains_label_data
*) data
;
12703 switch (TREE_CODE (*tp
))
12708 case CASE_LABEL_EXPR
:
12709 if (!d
->inside_switch_p
)
12714 if (!d
->inside_switch_p
)
12716 if (walk_tree (&SWITCH_COND (*tp
), contains_label_1
, data
, d
->pset
))
12718 d
->inside_switch_p
= true;
12719 if (walk_tree (&SWITCH_BODY (*tp
), contains_label_1
, data
, d
->pset
))
12721 d
->inside_switch_p
= false;
12722 *walk_subtrees
= 0;
12727 *walk_subtrees
= 0;
12735 /* Return whether the sub-tree ST contains a label which is accessible from
12736 outside the sub-tree. */
12739 contains_label_p (tree st
)
12741 hash_set
<tree
> pset
;
12742 contains_label_data data
= { &pset
, false };
12743 return walk_tree (&st
, contains_label_1
, &data
, &pset
) != NULL_TREE
;
12746 /* Fold a ternary expression of code CODE and type TYPE with operands
12747 OP0, OP1, and OP2. Return the folded expression if folding is
12748 successful. Otherwise, return NULL_TREE. */
12751 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
12752 tree op0
, tree op1
, tree op2
)
12755 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
12756 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12758 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12759 && TREE_CODE_LENGTH (code
) == 3);
12761 /* If this is a commutative operation, and OP0 is a constant, move it
12762 to OP1 to reduce the number of tests below. */
12763 if (commutative_ternary_tree_code (code
)
12764 && tree_swap_operands_p (op0
, op1
))
12765 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
12767 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
12771 /* Strip any conversions that don't change the mode. This is safe
12772 for every expression, except for a comparison expression because
12773 its signedness is derived from its operands. So, in the latter
12774 case, only strip conversions that don't change the signedness.
12776 Note that this is done as an internal manipulation within the
12777 constant folder, in order to find the simplest representation of
12778 the arguments so that their form can be studied. In any cases,
12779 the appropriate type conversions should be put back in the tree
12780 that will get out of the constant folder. */
12801 case COMPONENT_REF
:
12802 if (TREE_CODE (arg0
) == CONSTRUCTOR
12803 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12805 unsigned HOST_WIDE_INT idx
;
12807 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12814 case VEC_COND_EXPR
:
12815 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12816 so all simple results must be passed through pedantic_non_lvalue. */
12817 if (TREE_CODE (arg0
) == INTEGER_CST
)
12819 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12820 tem
= integer_zerop (arg0
) ? op2
: op1
;
12821 /* Only optimize constant conditions when the selected branch
12822 has the same type as the COND_EXPR. This avoids optimizing
12823 away "c ? x : throw", where the throw has a void type.
12824 Avoid throwing away that operand which contains label. */
12825 if ((!TREE_SIDE_EFFECTS (unused_op
)
12826 || !contains_label_p (unused_op
))
12827 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12828 || VOID_TYPE_P (type
)))
12829 return protected_set_expr_location_unshare (tem
, loc
);
12832 else if (TREE_CODE (arg0
) == VECTOR_CST
)
12834 unsigned HOST_WIDE_INT nelts
;
12835 if ((TREE_CODE (arg1
) == VECTOR_CST
12836 || TREE_CODE (arg1
) == CONSTRUCTOR
)
12837 && (TREE_CODE (arg2
) == VECTOR_CST
12838 || TREE_CODE (arg2
) == CONSTRUCTOR
)
12839 && TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
12841 vec_perm_builder
sel (nelts
, nelts
, 1);
12842 for (unsigned int i
= 0; i
< nelts
; i
++)
12844 tree val
= VECTOR_CST_ELT (arg0
, i
);
12845 if (integer_all_onesp (val
))
12846 sel
.quick_push (i
);
12847 else if (integer_zerop (val
))
12848 sel
.quick_push (nelts
+ i
);
12849 else /* Currently unreachable. */
12852 vec_perm_indices
indices (sel
, 2, nelts
);
12853 tree t
= fold_vec_perm (type
, arg1
, arg2
, indices
);
12854 if (t
!= NULL_TREE
)
12859 /* If we have A op B ? A : C, we may be able to convert this to a
12860 simpler expression, depending on the operation and the values
12861 of B and C. Signed zeros prevent all of these transformations,
12862 for reasons given above each one.
12864 Also try swapping the arguments and inverting the conditional. */
12865 if (COMPARISON_CLASS_P (arg0
)
12866 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op1
)
12867 && !HONOR_SIGNED_ZEROS (op1
))
12869 tem
= fold_cond_expr_with_comparison (loc
, type
, TREE_CODE (arg0
),
12870 TREE_OPERAND (arg0
, 0),
12871 TREE_OPERAND (arg0
, 1),
12877 if (COMPARISON_CLASS_P (arg0
)
12878 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op2
)
12879 && !HONOR_SIGNED_ZEROS (op2
))
12881 enum tree_code comp_code
= TREE_CODE (arg0
);
12882 tree arg00
= TREE_OPERAND (arg0
, 0);
12883 tree arg01
= TREE_OPERAND (arg0
, 1);
12884 comp_code
= invert_tree_comparison (comp_code
, HONOR_NANS (arg00
));
12885 if (comp_code
!= ERROR_MARK
)
12886 tem
= fold_cond_expr_with_comparison (loc
, type
, comp_code
,
12894 /* If the second operand is simpler than the third, swap them
12895 since that produces better jump optimization results. */
12896 if (truth_value_p (TREE_CODE (arg0
))
12897 && tree_swap_operands_p (op1
, op2
))
12899 location_t loc0
= expr_location_or (arg0
, loc
);
12900 /* See if this can be inverted. If it can't, possibly because
12901 it was a floating-point inequality comparison, don't do
12903 tem
= fold_invert_truthvalue (loc0
, arg0
);
12905 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
12908 /* Convert A ? 1 : 0 to simply A. */
12909 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
12910 : (integer_onep (op1
)
12911 && !VECTOR_TYPE_P (type
)))
12912 && integer_zerop (op2
)
12913 /* If we try to convert OP0 to our type, the
12914 call to fold will try to move the conversion inside
12915 a COND, which will recurse. In that case, the COND_EXPR
12916 is probably the best choice, so leave it alone. */
12917 && type
== TREE_TYPE (arg0
))
12918 return protected_set_expr_location_unshare (arg0
, loc
);
12920 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12921 over COND_EXPR in cases such as floating point comparisons. */
12922 if (integer_zerop (op1
)
12923 && code
== COND_EXPR
12924 && integer_onep (op2
)
12925 && !VECTOR_TYPE_P (type
)
12926 && truth_value_p (TREE_CODE (arg0
)))
12927 return fold_convert_loc (loc
, type
,
12928 invert_truthvalue_loc (loc
, arg0
));
12930 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12931 if (TREE_CODE (arg0
) == LT_EXPR
12932 && integer_zerop (TREE_OPERAND (arg0
, 1))
12933 && integer_zerop (op2
)
12934 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12936 /* sign_bit_p looks through both zero and sign extensions,
12937 but for this optimization only sign extensions are
12939 tree tem2
= TREE_OPERAND (arg0
, 0);
12940 while (tem
!= tem2
)
12942 if (TREE_CODE (tem2
) != NOP_EXPR
12943 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
12948 tem2
= TREE_OPERAND (tem2
, 0);
12950 /* sign_bit_p only checks ARG1 bits within A's precision.
12951 If <sign bit of A> has wider type than A, bits outside
12952 of A's precision in <sign bit of A> need to be checked.
12953 If they are all 0, this optimization needs to be done
12954 in unsigned A's type, if they are all 1 in signed A's type,
12955 otherwise this can't be done. */
12957 && TYPE_PRECISION (TREE_TYPE (tem
))
12958 < TYPE_PRECISION (TREE_TYPE (arg1
))
12959 && TYPE_PRECISION (TREE_TYPE (tem
))
12960 < TYPE_PRECISION (type
))
12962 int inner_width
, outer_width
;
12965 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12966 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12967 if (outer_width
> TYPE_PRECISION (type
))
12968 outer_width
= TYPE_PRECISION (type
);
12970 wide_int mask
= wi::shifted_mask
12971 (inner_width
, outer_width
- inner_width
, false,
12972 TYPE_PRECISION (TREE_TYPE (arg1
)));
12974 wide_int common
= mask
& wi::to_wide (arg1
);
12975 if (common
== mask
)
12977 tem_type
= signed_type_for (TREE_TYPE (tem
));
12978 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12980 else if (common
== 0)
12982 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12983 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12991 fold_convert_loc (loc
, type
,
12992 fold_build2_loc (loc
, BIT_AND_EXPR
,
12993 TREE_TYPE (tem
), tem
,
12994 fold_convert_loc (loc
,
12999 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13000 already handled above. */
13001 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13002 && integer_onep (TREE_OPERAND (arg0
, 1))
13003 && integer_zerop (op2
)
13004 && integer_pow2p (arg1
))
13006 tree tem
= TREE_OPERAND (arg0
, 0);
13008 if (TREE_CODE (tem
) == RSHIFT_EXPR
13009 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13010 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
)
13011 == tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13012 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13013 fold_convert_loc (loc
, type
,
13014 TREE_OPERAND (tem
, 0)),
13018 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13019 is probably obsolete because the first operand should be a
13020 truth value (that's why we have the two cases above), but let's
13021 leave it in until we can confirm this for all front-ends. */
13022 if (integer_zerop (op2
)
13023 && TREE_CODE (arg0
) == NE_EXPR
13024 && integer_zerop (TREE_OPERAND (arg0
, 1))
13025 && integer_pow2p (arg1
)
13026 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13028 arg1
, OEP_ONLY_CONST
)
13029 /* operand_equal_p compares just value, not precision, so e.g.
13030 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13031 second operand 32-bit -128, which is not a power of two (or vice
13033 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)))
13034 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
13036 /* Disable the transformations below for vectors, since
13037 fold_binary_op_with_conditional_arg may undo them immediately,
13038 yielding an infinite loop. */
13039 if (code
== VEC_COND_EXPR
)
13042 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13043 if (integer_zerop (op2
)
13044 && truth_value_p (TREE_CODE (arg0
))
13045 && truth_value_p (TREE_CODE (arg1
))
13046 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13047 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13048 : TRUTH_ANDIF_EXPR
,
13049 type
, fold_convert_loc (loc
, type
, arg0
), op1
);
13051 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13052 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13053 && truth_value_p (TREE_CODE (arg0
))
13054 && truth_value_p (TREE_CODE (arg1
))
13055 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13057 location_t loc0
= expr_location_or (arg0
, loc
);
13058 /* Only perform transformation if ARG0 is easily inverted. */
13059 tem
= fold_invert_truthvalue (loc0
, arg0
);
13061 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13064 type
, fold_convert_loc (loc
, type
, tem
),
13068 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13069 if (integer_zerop (arg1
)
13070 && truth_value_p (TREE_CODE (arg0
))
13071 && truth_value_p (TREE_CODE (op2
))
13072 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13074 location_t loc0
= expr_location_or (arg0
, loc
);
13075 /* Only perform transformation if ARG0 is easily inverted. */
13076 tem
= fold_invert_truthvalue (loc0
, arg0
);
13078 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13079 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13080 type
, fold_convert_loc (loc
, type
, tem
),
13084 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13085 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13086 && truth_value_p (TREE_CODE (arg0
))
13087 && truth_value_p (TREE_CODE (op2
))
13088 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13089 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13090 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13091 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13096 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13097 of fold_ternary on them. */
13098 gcc_unreachable ();
13100 case BIT_FIELD_REF
:
13101 if (TREE_CODE (arg0
) == VECTOR_CST
13102 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13103 || (VECTOR_TYPE_P (type
)
13104 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
))))
13105 && tree_fits_uhwi_p (op1
)
13106 && tree_fits_uhwi_p (op2
))
13108 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13109 unsigned HOST_WIDE_INT width
13110 = (TREE_CODE (eltype
) == BOOLEAN_TYPE
13111 ? TYPE_PRECISION (eltype
) : tree_to_uhwi (TYPE_SIZE (eltype
)));
13112 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13113 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13116 && (idx
% width
) == 0
13117 && (n
% width
) == 0
13118 && known_le ((idx
+ n
) / width
,
13119 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))))
13124 if (TREE_CODE (arg0
) == VECTOR_CST
)
13128 tem
= VECTOR_CST_ELT (arg0
, idx
);
13129 if (VECTOR_TYPE_P (type
))
13130 tem
= fold_build1 (VIEW_CONVERT_EXPR
, type
, tem
);
13134 tree_vector_builder
vals (type
, n
, 1);
13135 for (unsigned i
= 0; i
< n
; ++i
)
13136 vals
.quick_push (VECTOR_CST_ELT (arg0
, idx
+ i
));
13137 return vals
.build ();
13142 /* On constants we can use native encode/interpret to constant
13143 fold (nearly) all BIT_FIELD_REFs. */
13144 if (CONSTANT_CLASS_P (arg0
)
13145 && can_native_interpret_type_p (type
)
13146 && BITS_PER_UNIT
== 8
13147 && tree_fits_uhwi_p (op1
)
13148 && tree_fits_uhwi_p (op2
))
13150 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13151 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13152 /* Limit us to a reasonable amount of work. To relax the
13153 other limitations we need bit-shifting of the buffer
13154 and rounding up the size. */
13155 if (bitpos
% BITS_PER_UNIT
== 0
13156 && bitsize
% BITS_PER_UNIT
== 0
13157 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
13159 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
13160 unsigned HOST_WIDE_INT len
13161 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
13162 bitpos
/ BITS_PER_UNIT
);
13164 && len
* BITS_PER_UNIT
>= bitsize
)
13166 tree v
= native_interpret_expr (type
, b
,
13167 bitsize
/ BITS_PER_UNIT
);
13176 case VEC_PERM_EXPR
:
13177 /* Perform constant folding of BIT_INSERT_EXPR. */
13178 if (TREE_CODE (arg2
) == VECTOR_CST
13179 && TREE_CODE (op0
) == VECTOR_CST
13180 && TREE_CODE (op1
) == VECTOR_CST
)
13182 /* Build a vector of integers from the tree mask. */
13183 vec_perm_builder builder
;
13184 if (!tree_to_vec_perm_builder (&builder
, arg2
))
13187 /* Create a vec_perm_indices for the integer vector. */
13188 poly_uint64 nelts
= TYPE_VECTOR_SUBPARTS (type
);
13189 bool single_arg
= (op0
== op1
);
13190 vec_perm_indices
sel (builder
, single_arg
? 1 : 2, nelts
);
13191 return fold_vec_perm (type
, op0
, op1
, sel
);
13195 case BIT_INSERT_EXPR
:
13196 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13197 if (TREE_CODE (arg0
) == INTEGER_CST
13198 && TREE_CODE (arg1
) == INTEGER_CST
)
13200 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13201 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
13202 wide_int tem
= (wi::to_wide (arg0
)
13203 & wi::shifted_mask (bitpos
, bitsize
, true,
13204 TYPE_PRECISION (type
)));
13206 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
13208 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
13210 else if (TREE_CODE (arg0
) == VECTOR_CST
13211 && CONSTANT_CLASS_P (arg1
)
13212 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
13215 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13216 unsigned HOST_WIDE_INT elsize
13217 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
13218 if (bitpos
% elsize
== 0)
13220 unsigned k
= bitpos
/ elsize
;
13221 unsigned HOST_WIDE_INT nelts
;
13222 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
13224 else if (VECTOR_CST_NELTS (arg0
).is_constant (&nelts
))
13226 tree_vector_builder
elts (type
, nelts
, 1);
13227 elts
.quick_grow (nelts
);
13228 for (unsigned HOST_WIDE_INT i
= 0; i
< nelts
; ++i
)
13229 elts
[i
] = (i
== k
? arg1
: VECTOR_CST_ELT (arg0
, i
));
13230 return elts
.build ();
13238 } /* switch (code) */
13241 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13242 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13243 constructor element index of the value returned. If the element is
13244 not found NULL_TREE is returned and *CTOR_IDX is updated to
13245 the index of the element after the ACCESS_INDEX position (which
13246 may be outside of the CTOR array). */
13249 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
,
13250 unsigned *ctor_idx
)
13252 tree index_type
= NULL_TREE
;
13253 signop index_sgn
= UNSIGNED
;
13254 offset_int low_bound
= 0;
13256 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
13258 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
13259 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
13261 /* Static constructors for variably sized objects makes no sense. */
13262 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
13263 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
13264 /* ??? When it is obvious that the range is signed, treat it so. */
13265 if (TYPE_UNSIGNED (index_type
)
13266 && TYPE_MAX_VALUE (domain_type
)
13267 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
),
13268 TYPE_MIN_VALUE (domain_type
)))
13270 index_sgn
= SIGNED
;
13272 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type
)),
13277 index_sgn
= TYPE_SIGN (index_type
);
13278 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
13284 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
13287 offset_int index
= low_bound
;
13289 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
13291 offset_int max_index
= index
;
13294 bool first_p
= true;
13296 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
13298 /* Array constructor might explicitly set index, or specify a range,
13299 or leave index NULL meaning that it is next index after previous
13303 if (TREE_CODE (cfield
) == INTEGER_CST
)
13305 = offset_int::from (wi::to_wide (cfield
), index_sgn
);
13308 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
13309 index
= offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 0)),
13312 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 1)),
13314 gcc_checking_assert (wi::le_p (index
, max_index
, index_sgn
));
13319 index
= max_index
+ 1;
13321 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
13322 gcc_checking_assert (wi::gt_p (index
, max_index
, index_sgn
));
13328 /* Do we have match? */
13329 if (wi::cmp (access_index
, index
, index_sgn
) >= 0)
13331 if (wi::cmp (access_index
, max_index
, index_sgn
) <= 0)
13338 else if (in_gimple_form
)
13339 /* We're past the element we search for. Note during parsing
13340 the elements might not be sorted.
13341 ??? We should use a binary search and a flag on the
13342 CONSTRUCTOR as to whether elements are sorted in declaration
13351 /* Perform constant folding and related simplification of EXPR.
13352 The related simplifications include x*1 => x, x*0 => 0, etc.,
13353 and application of the associative law.
13354 NOP_EXPR conversions may be removed freely (as long as we
13355 are careful not to change the type of the overall expression).
13356 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13357 but we can constant-fold them if they have constant operands. */
13359 #ifdef ENABLE_FOLD_CHECKING
13360 # define fold(x) fold_1 (x)
13361 static tree
fold_1 (tree
);
13367 const tree t
= expr
;
13368 enum tree_code code
= TREE_CODE (t
);
13369 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13371 location_t loc
= EXPR_LOCATION (expr
);
13373 /* Return right away if a constant. */
13374 if (kind
== tcc_constant
)
13377 /* CALL_EXPR-like objects with variable numbers of operands are
13378 treated specially. */
13379 if (kind
== tcc_vl_exp
)
13381 if (code
== CALL_EXPR
)
13383 tem
= fold_call_expr (loc
, expr
, false);
13384 return tem
? tem
: expr
;
13389 if (IS_EXPR_CODE_CLASS (kind
))
13391 tree type
= TREE_TYPE (t
);
13392 tree op0
, op1
, op2
;
13394 switch (TREE_CODE_LENGTH (code
))
13397 op0
= TREE_OPERAND (t
, 0);
13398 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13399 return tem
? tem
: expr
;
13401 op0
= TREE_OPERAND (t
, 0);
13402 op1
= TREE_OPERAND (t
, 1);
13403 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13404 return tem
? tem
: expr
;
13406 op0
= TREE_OPERAND (t
, 0);
13407 op1
= TREE_OPERAND (t
, 1);
13408 op2
= TREE_OPERAND (t
, 2);
13409 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13410 return tem
? tem
: expr
;
13420 tree op0
= TREE_OPERAND (t
, 0);
13421 tree op1
= TREE_OPERAND (t
, 1);
13423 if (TREE_CODE (op1
) == INTEGER_CST
13424 && TREE_CODE (op0
) == CONSTRUCTOR
13425 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13427 tree val
= get_array_ctor_element_at_index (op0
,
13428 wi::to_offset (op1
));
13436 /* Return a VECTOR_CST if possible. */
13439 tree type
= TREE_TYPE (t
);
13440 if (TREE_CODE (type
) != VECTOR_TYPE
)
13445 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
13446 if (! CONSTANT_CLASS_P (val
))
13449 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
13453 return fold (DECL_INITIAL (t
));
13457 } /* switch (code) */
13460 #ifdef ENABLE_FOLD_CHECKING
13463 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13464 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
13465 static void fold_check_failed (const_tree
, const_tree
);
13466 void print_fold_checksum (const_tree
);
13468 /* When --enable-checking=fold, compute a digest of expr before
13469 and after actual fold call to see if fold did not accidentally
13470 change original expr. */
13476 struct md5_ctx ctx
;
13477 unsigned char checksum_before
[16], checksum_after
[16];
13478 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13480 md5_init_ctx (&ctx
);
13481 fold_checksum_tree (expr
, &ctx
, &ht
);
13482 md5_finish_ctx (&ctx
, checksum_before
);
13485 ret
= fold_1 (expr
);
13487 md5_init_ctx (&ctx
);
13488 fold_checksum_tree (expr
, &ctx
, &ht
);
13489 md5_finish_ctx (&ctx
, checksum_after
);
13491 if (memcmp (checksum_before
, checksum_after
, 16))
13492 fold_check_failed (expr
, ret
);
13498 print_fold_checksum (const_tree expr
)
13500 struct md5_ctx ctx
;
13501 unsigned char checksum
[16], cnt
;
13502 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13504 md5_init_ctx (&ctx
);
13505 fold_checksum_tree (expr
, &ctx
, &ht
);
13506 md5_finish_ctx (&ctx
, checksum
);
13507 for (cnt
= 0; cnt
< 16; ++cnt
)
13508 fprintf (stderr
, "%02x", checksum
[cnt
]);
13509 putc ('\n', stderr
);
13513 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13515 internal_error ("fold check: original tree changed by fold");
13519 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
13520 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
13522 const tree_node
**slot
;
13523 enum tree_code code
;
13524 union tree_node
*buf
;
13530 slot
= ht
->find_slot (expr
, INSERT
);
13534 code
= TREE_CODE (expr
);
13535 if (TREE_CODE_CLASS (code
) == tcc_declaration
13536 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
13538 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13539 size_t sz
= tree_size (expr
);
13540 buf
= XALLOCAVAR (union tree_node
, sz
);
13541 memcpy ((char *) buf
, expr
, sz
);
13542 SET_DECL_ASSEMBLER_NAME ((tree
) buf
, NULL
);
13543 buf
->decl_with_vis
.symtab_node
= NULL
;
13544 buf
->base
.nowarning_flag
= 0;
13547 else if (TREE_CODE_CLASS (code
) == tcc_type
13548 && (TYPE_POINTER_TO (expr
)
13549 || TYPE_REFERENCE_TO (expr
)
13550 || TYPE_CACHED_VALUES_P (expr
)
13551 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13552 || TYPE_NEXT_VARIANT (expr
)
13553 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
13555 /* Allow these fields to be modified. */
13557 size_t sz
= tree_size (expr
);
13558 buf
= XALLOCAVAR (union tree_node
, sz
);
13559 memcpy ((char *) buf
, expr
, sz
);
13560 expr
= tmp
= (tree
) buf
;
13561 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13562 TYPE_POINTER_TO (tmp
) = NULL
;
13563 TYPE_REFERENCE_TO (tmp
) = NULL
;
13564 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13565 TYPE_ALIAS_SET (tmp
) = -1;
13566 if (TYPE_CACHED_VALUES_P (tmp
))
13568 TYPE_CACHED_VALUES_P (tmp
) = 0;
13569 TYPE_CACHED_VALUES (tmp
) = NULL
;
13572 else if (warning_suppressed_p (expr
) && (DECL_P (expr
) || EXPR_P (expr
)))
13574 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13575 that and change builtins.cc etc. instead - see PR89543. */
13576 size_t sz
= tree_size (expr
);
13577 buf
= XALLOCAVAR (union tree_node
, sz
);
13578 memcpy ((char *) buf
, expr
, sz
);
13579 buf
->base
.nowarning_flag
= 0;
13582 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13583 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
13584 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13585 if (TREE_CODE_CLASS (code
) != tcc_type
13586 && TREE_CODE_CLASS (code
) != tcc_declaration
13587 && code
!= TREE_LIST
13588 && code
!= SSA_NAME
13589 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13590 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13591 switch (TREE_CODE_CLASS (code
))
13597 md5_process_bytes (TREE_STRING_POINTER (expr
),
13598 TREE_STRING_LENGTH (expr
), ctx
);
13601 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13602 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13605 len
= vector_cst_encoded_nelts (expr
);
13606 for (i
= 0; i
< len
; ++i
)
13607 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr
, i
), ctx
, ht
);
13613 case tcc_exceptional
:
13617 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13618 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13619 expr
= TREE_CHAIN (expr
);
13620 goto recursive_label
;
13623 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13624 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13630 case tcc_expression
:
13631 case tcc_reference
:
13632 case tcc_comparison
:
13635 case tcc_statement
:
13637 len
= TREE_OPERAND_LENGTH (expr
);
13638 for (i
= 0; i
< len
; ++i
)
13639 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13641 case tcc_declaration
:
13642 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13643 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13644 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13646 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13647 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13648 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13649 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13650 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13653 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13655 if (TREE_CODE (expr
) == FUNCTION_DECL
)
13657 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13658 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
13660 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13664 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13665 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13666 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13667 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13668 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13669 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13670 if (INTEGRAL_TYPE_P (expr
)
13671 || SCALAR_FLOAT_TYPE_P (expr
))
13673 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13674 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13676 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13677 if (RECORD_OR_UNION_TYPE_P (expr
))
13678 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13679 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13686 /* Helper function for outputting the checksum of a tree T. When
13687 debugging with gdb, you can "define mynext" to be "next" followed
13688 by "call debug_fold_checksum (op0)", then just trace down till the
13691 DEBUG_FUNCTION
void
13692 debug_fold_checksum (const_tree t
)
13695 unsigned char checksum
[16];
13696 struct md5_ctx ctx
;
13697 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13699 md5_init_ctx (&ctx
);
13700 fold_checksum_tree (t
, &ctx
, &ht
);
13701 md5_finish_ctx (&ctx
, checksum
);
13704 for (i
= 0; i
< 16; i
++)
13705 fprintf (stderr
, "%d ", checksum
[i
]);
13707 fprintf (stderr
, "\n");
13712 /* Fold a unary tree expression with code CODE of type TYPE with an
13713 operand OP0. LOC is the location of the resulting expression.
13714 Return a folded expression if successful. Otherwise, return a tree
13715 expression with code CODE of type TYPE with an operand OP0. */
13718 fold_build1_loc (location_t loc
,
13719 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13722 #ifdef ENABLE_FOLD_CHECKING
13723 unsigned char checksum_before
[16], checksum_after
[16];
13724 struct md5_ctx ctx
;
13725 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13727 md5_init_ctx (&ctx
);
13728 fold_checksum_tree (op0
, &ctx
, &ht
);
13729 md5_finish_ctx (&ctx
, checksum_before
);
13733 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13735 tem
= build1_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
13737 #ifdef ENABLE_FOLD_CHECKING
13738 md5_init_ctx (&ctx
);
13739 fold_checksum_tree (op0
, &ctx
, &ht
);
13740 md5_finish_ctx (&ctx
, checksum_after
);
13742 if (memcmp (checksum_before
, checksum_after
, 16))
13743 fold_check_failed (op0
, tem
);
13748 /* Fold a binary tree expression with code CODE of type TYPE with
13749 operands OP0 and OP1. LOC is the location of the resulting
13750 expression. Return a folded expression if successful. Otherwise,
13751 return a tree expression with code CODE of type TYPE with operands
13755 fold_build2_loc (location_t loc
,
13756 enum tree_code code
, tree type
, tree op0
, tree op1
13760 #ifdef ENABLE_FOLD_CHECKING
13761 unsigned char checksum_before_op0
[16],
13762 checksum_before_op1
[16],
13763 checksum_after_op0
[16],
13764 checksum_after_op1
[16];
13765 struct md5_ctx ctx
;
13766 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13768 md5_init_ctx (&ctx
);
13769 fold_checksum_tree (op0
, &ctx
, &ht
);
13770 md5_finish_ctx (&ctx
, checksum_before_op0
);
13773 md5_init_ctx (&ctx
);
13774 fold_checksum_tree (op1
, &ctx
, &ht
);
13775 md5_finish_ctx (&ctx
, checksum_before_op1
);
13779 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13781 tem
= build2_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
13783 #ifdef ENABLE_FOLD_CHECKING
13784 md5_init_ctx (&ctx
);
13785 fold_checksum_tree (op0
, &ctx
, &ht
);
13786 md5_finish_ctx (&ctx
, checksum_after_op0
);
13789 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13790 fold_check_failed (op0
, tem
);
13792 md5_init_ctx (&ctx
);
13793 fold_checksum_tree (op1
, &ctx
, &ht
);
13794 md5_finish_ctx (&ctx
, checksum_after_op1
);
13796 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13797 fold_check_failed (op1
, tem
);
13802 /* Fold a ternary tree expression with code CODE of type TYPE with
13803 operands OP0, OP1, and OP2. Return a folded expression if
13804 successful. Otherwise, return a tree expression with code CODE of
13805 type TYPE with operands OP0, OP1, and OP2. */
13808 fold_build3_loc (location_t loc
, enum tree_code code
, tree type
,
13809 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13812 #ifdef ENABLE_FOLD_CHECKING
13813 unsigned char checksum_before_op0
[16],
13814 checksum_before_op1
[16],
13815 checksum_before_op2
[16],
13816 checksum_after_op0
[16],
13817 checksum_after_op1
[16],
13818 checksum_after_op2
[16];
13819 struct md5_ctx ctx
;
13820 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13822 md5_init_ctx (&ctx
);
13823 fold_checksum_tree (op0
, &ctx
, &ht
);
13824 md5_finish_ctx (&ctx
, checksum_before_op0
);
13827 md5_init_ctx (&ctx
);
13828 fold_checksum_tree (op1
, &ctx
, &ht
);
13829 md5_finish_ctx (&ctx
, checksum_before_op1
);
13832 md5_init_ctx (&ctx
);
13833 fold_checksum_tree (op2
, &ctx
, &ht
);
13834 md5_finish_ctx (&ctx
, checksum_before_op2
);
13838 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13839 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13841 tem
= build3_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13843 #ifdef ENABLE_FOLD_CHECKING
13844 md5_init_ctx (&ctx
);
13845 fold_checksum_tree (op0
, &ctx
, &ht
);
13846 md5_finish_ctx (&ctx
, checksum_after_op0
);
13849 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13850 fold_check_failed (op0
, tem
);
13852 md5_init_ctx (&ctx
);
13853 fold_checksum_tree (op1
, &ctx
, &ht
);
13854 md5_finish_ctx (&ctx
, checksum_after_op1
);
13857 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13858 fold_check_failed (op1
, tem
);
13860 md5_init_ctx (&ctx
);
13861 fold_checksum_tree (op2
, &ctx
, &ht
);
13862 md5_finish_ctx (&ctx
, checksum_after_op2
);
13864 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13865 fold_check_failed (op2
, tem
);
13870 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13871 arguments in ARGARRAY, and a null static chain.
13872 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13873 of type TYPE from the given operands as constructed by build_call_array. */
13876 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13877 int nargs
, tree
*argarray
)
13880 #ifdef ENABLE_FOLD_CHECKING
13881 unsigned char checksum_before_fn
[16],
13882 checksum_before_arglist
[16],
13883 checksum_after_fn
[16],
13884 checksum_after_arglist
[16];
13885 struct md5_ctx ctx
;
13886 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13889 md5_init_ctx (&ctx
);
13890 fold_checksum_tree (fn
, &ctx
, &ht
);
13891 md5_finish_ctx (&ctx
, checksum_before_fn
);
13894 md5_init_ctx (&ctx
);
13895 for (i
= 0; i
< nargs
; i
++)
13896 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13897 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13901 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13903 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13905 #ifdef ENABLE_FOLD_CHECKING
13906 md5_init_ctx (&ctx
);
13907 fold_checksum_tree (fn
, &ctx
, &ht
);
13908 md5_finish_ctx (&ctx
, checksum_after_fn
);
13911 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13912 fold_check_failed (fn
, tem
);
13914 md5_init_ctx (&ctx
);
13915 for (i
= 0; i
< nargs
; i
++)
13916 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13917 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13919 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13920 fold_check_failed (NULL_TREE
, tem
);
13925 /* Perform constant folding and related simplification of initializer
13926 expression EXPR. These behave identically to "fold_buildN" but ignore
13927 potential run-time traps and exceptions that fold must preserve. */
13929 #define START_FOLD_INIT \
13930 int saved_signaling_nans = flag_signaling_nans;\
13931 int saved_trapping_math = flag_trapping_math;\
13932 int saved_rounding_math = flag_rounding_math;\
13933 int saved_trapv = flag_trapv;\
13934 int saved_folding_initializer = folding_initializer;\
13935 flag_signaling_nans = 0;\
13936 flag_trapping_math = 0;\
13937 flag_rounding_math = 0;\
13939 folding_initializer = 1;
13941 #define END_FOLD_INIT \
13942 flag_signaling_nans = saved_signaling_nans;\
13943 flag_trapping_math = saved_trapping_math;\
13944 flag_rounding_math = saved_rounding_math;\
13945 flag_trapv = saved_trapv;\
13946 folding_initializer = saved_folding_initializer;
13949 fold_init (tree expr
)
13954 result
= fold (expr
);
13961 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
13962 tree type
, tree op
)
13967 result
= fold_build1_loc (loc
, code
, type
, op
);
13974 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
13975 tree type
, tree op0
, tree op1
)
13980 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
13987 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
13988 int nargs
, tree
*argarray
)
13993 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14000 fold_binary_initializer_loc (location_t loc
, tree_code code
, tree type
,
14001 tree lhs
, tree rhs
)
14006 result
= fold_binary_loc (loc
, code
, type
, lhs
, rhs
);
14012 #undef START_FOLD_INIT
14013 #undef END_FOLD_INIT
14015 /* Determine if first argument is a multiple of second argument. Return
14016 false if it is not, or we cannot easily determined it to be.
14018 An example of the sort of thing we care about (at this point; this routine
14019 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14020 fold cases do now) is discovering that
14022 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14028 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14030 This code also handles discovering that
14032 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14034 is a multiple of 8 so we don't have to worry about dealing with a
14035 possible remainder.
14037 Note that we *look* inside a SAVE_EXPR only to determine how it was
14038 calculated; it is not safe for fold to do much of anything else with the
14039 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14040 at run time. For example, the latter example above *cannot* be implemented
14041 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14042 evaluation time of the original SAVE_EXPR is not necessarily the same at
14043 the time the new expression is evaluated. The only optimization of this
14044 sort that would be valid is changing
14046 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14050 SAVE_EXPR (I) * SAVE_EXPR (J)
14052 (where the same SAVE_EXPR (J) is used in the original and the
14053 transformed version).
14055 NOWRAP specifies whether all outer operations in TYPE should
14056 be considered not wrapping. Any type conversion within TOP acts
14057 as a barrier and we will fall back to NOWRAP being false.
14058 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14059 as not wrapping even though they are generally using unsigned arithmetic. */
14062 multiple_of_p (tree type
, const_tree top
, const_tree bottom
, bool nowrap
)
14067 if (operand_equal_p (top
, bottom
, 0))
14070 if (TREE_CODE (type
) != INTEGER_TYPE
)
14073 switch (TREE_CODE (top
))
14076 /* Bitwise and provides a power of two multiple. If the mask is
14077 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14078 if (!integer_pow2p (bottom
))
14080 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
, nowrap
)
14081 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
));
14084 /* If the multiplication can wrap we cannot recurse further unless
14085 the bottom is a power of two which is where wrapping does not
14088 && !TYPE_OVERFLOW_UNDEFINED (type
)
14089 && !integer_pow2p (bottom
))
14091 if (TREE_CODE (bottom
) == INTEGER_CST
)
14093 op1
= TREE_OPERAND (top
, 0);
14094 op2
= TREE_OPERAND (top
, 1);
14095 if (TREE_CODE (op1
) == INTEGER_CST
)
14096 std::swap (op1
, op2
);
14097 if (TREE_CODE (op2
) == INTEGER_CST
)
14099 if (multiple_of_p (type
, op2
, bottom
, nowrap
))
14101 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14102 if (multiple_of_p (type
, bottom
, op2
, nowrap
))
14104 widest_int w
= wi::sdiv_trunc (wi::to_widest (bottom
),
14105 wi::to_widest (op2
));
14106 if (wi::fits_to_tree_p (w
, TREE_TYPE (bottom
)))
14108 op2
= wide_int_to_tree (TREE_TYPE (bottom
), w
);
14109 return multiple_of_p (type
, op1
, op2
, nowrap
);
14112 return multiple_of_p (type
, op1
, bottom
, nowrap
);
14115 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
, nowrap
)
14116 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
));
14119 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14120 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14122 op1
= TREE_OPERAND (top
, 1);
14123 if (wi::to_widest (op1
) < TYPE_PRECISION (type
))
14126 = wi::one (TYPE_PRECISION (type
)) << wi::to_wide (op1
);
14127 return multiple_of_p (type
,
14128 wide_int_to_tree (type
, mul_op
), bottom
,
14136 /* If the addition or subtraction can wrap we cannot recurse further
14137 unless bottom is a power of two which is where wrapping does not
14140 && !TYPE_OVERFLOW_UNDEFINED (type
)
14141 && !integer_pow2p (bottom
))
14144 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14145 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14146 but 0xfffffffd is not. */
14147 op1
= TREE_OPERAND (top
, 1);
14148 if (TREE_CODE (top
) == PLUS_EXPR
14150 && TYPE_UNSIGNED (type
)
14151 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
14152 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
14154 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14155 precisely, so be conservative here checking if both op0 and op1
14156 are multiple of bottom. Note we check the second operand first
14157 since it's usually simpler. */
14158 return (multiple_of_p (type
, op1
, bottom
, nowrap
)
14159 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
));
14162 /* Can't handle conversions from non-integral or wider integral type. */
14163 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14164 || (TYPE_PRECISION (type
)
14165 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14167 /* NOWRAP only extends to operations in the outermost type so
14168 make sure to strip it off here. */
14169 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top
, 0)),
14170 TREE_OPERAND (top
, 0), bottom
, false);
14173 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
);
14176 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
, nowrap
)
14177 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
, nowrap
));
14180 if (TREE_CODE (bottom
) != INTEGER_CST
|| integer_zerop (bottom
))
14182 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14186 if (TREE_CODE (bottom
) == INTEGER_CST
14187 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
14188 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
14190 enum tree_code code
= gimple_assign_rhs_code (stmt
);
14192 /* Check for special cases to see if top is defined as multiple
14195 top = (X & ~(bottom - 1) ; bottom is power of 2
14201 if (code
== BIT_AND_EXPR
14202 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
14203 && TREE_CODE (op2
) == INTEGER_CST
14204 && integer_pow2p (bottom
)
14205 && wi::multiple_of_p (wi::to_widest (op2
),
14206 wi::to_widest (bottom
), UNSIGNED
))
14209 op1
= gimple_assign_rhs1 (stmt
);
14210 if (code
== MINUS_EXPR
14211 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
14212 && TREE_CODE (op2
) == SSA_NAME
14213 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
14214 && gimple_code (stmt
) == GIMPLE_ASSIGN
14215 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
14216 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
14217 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
14224 if (POLY_INT_CST_P (top
) && poly_int_tree_p (bottom
))
14225 return multiple_p (wi::to_poly_widest (top
),
14226 wi::to_poly_widest (bottom
));
14232 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14233 This function returns true for integer expressions, and returns
14234 false if uncertain. */
14237 tree_expr_finite_p (const_tree x
)
14239 machine_mode mode
= element_mode (x
);
14240 if (!HONOR_NANS (mode
) && !HONOR_INFINITIES (mode
))
14242 switch (TREE_CODE (x
))
14245 return real_isfinite (TREE_REAL_CST_PTR (x
));
14247 return tree_expr_finite_p (TREE_REALPART (x
))
14248 && tree_expr_finite_p (TREE_IMAGPART (x
));
14253 case NON_LVALUE_EXPR
:
14256 return tree_expr_finite_p (TREE_OPERAND (x
, 0));
14259 return tree_expr_finite_p (TREE_OPERAND (x
, 0))
14260 && tree_expr_finite_p (TREE_OPERAND (x
, 1));
14262 return tree_expr_finite_p (TREE_OPERAND (x
, 1))
14263 && tree_expr_finite_p (TREE_OPERAND (x
, 2));
14265 switch (get_call_combined_fn (x
))
14269 return tree_expr_finite_p (CALL_EXPR_ARG (x
, 0));
14274 return tree_expr_finite_p (CALL_EXPR_ARG (x
, 0))
14275 && tree_expr_finite_p (CALL_EXPR_ARG (x
, 1));
14285 /* Return true if expression X evaluates to an infinity.
14286 This function returns false for integer expressions. */
14289 tree_expr_infinite_p (const_tree x
)
14291 if (!HONOR_INFINITIES (x
))
14293 switch (TREE_CODE (x
))
14296 return real_isinf (TREE_REAL_CST_PTR (x
));
14299 case NON_LVALUE_EXPR
:
14301 return tree_expr_infinite_p (TREE_OPERAND (x
, 0));
14303 return tree_expr_infinite_p (TREE_OPERAND (x
, 1))
14304 && tree_expr_infinite_p (TREE_OPERAND (x
, 2));
14310 /* Return true if expression X could evaluate to an infinity.
14311 This function returns false for integer expressions, and returns
14312 true if uncertain. */
14315 tree_expr_maybe_infinite_p (const_tree x
)
14317 if (!HONOR_INFINITIES (x
))
14319 switch (TREE_CODE (x
))
14322 return real_isinf (TREE_REAL_CST_PTR (x
));
14327 return tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 0));
14329 return tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 1))
14330 || tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 2));
14336 /* Return true if expression X evaluates to a signaling NaN.
14337 This function returns false for integer expressions. */
14340 tree_expr_signaling_nan_p (const_tree x
)
14342 if (!HONOR_SNANS (x
))
14344 switch (TREE_CODE (x
))
14347 return real_issignaling_nan (TREE_REAL_CST_PTR (x
));
14348 case NON_LVALUE_EXPR
:
14350 return tree_expr_signaling_nan_p (TREE_OPERAND (x
, 0));
14352 return tree_expr_signaling_nan_p (TREE_OPERAND (x
, 1))
14353 && tree_expr_signaling_nan_p (TREE_OPERAND (x
, 2));
14359 /* Return true if expression X could evaluate to a signaling NaN.
14360 This function returns false for integer expressions, and returns
14361 true if uncertain. */
14364 tree_expr_maybe_signaling_nan_p (const_tree x
)
14366 if (!HONOR_SNANS (x
))
14368 switch (TREE_CODE (x
))
14371 return real_issignaling_nan (TREE_REAL_CST_PTR (x
));
14377 case NON_LVALUE_EXPR
:
14379 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 0));
14382 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 0))
14383 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 1));
14385 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 1))
14386 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 2));
14388 switch (get_call_combined_fn (x
))
14392 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 0));
14397 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 0))
14398 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 1));
14407 /* Return true if expression X evaluates to a NaN.
14408 This function returns false for integer expressions. */
14411 tree_expr_nan_p (const_tree x
)
14413 if (!HONOR_NANS (x
))
14415 switch (TREE_CODE (x
))
14418 return real_isnan (TREE_REAL_CST_PTR (x
));
14419 case NON_LVALUE_EXPR
:
14421 return tree_expr_nan_p (TREE_OPERAND (x
, 0));
14423 return tree_expr_nan_p (TREE_OPERAND (x
, 1))
14424 && tree_expr_nan_p (TREE_OPERAND (x
, 2));
14430 /* Return true if expression X could evaluate to a NaN.
14431 This function returns false for integer expressions, and returns
14432 true if uncertain. */
14435 tree_expr_maybe_nan_p (const_tree x
)
14437 if (!HONOR_NANS (x
))
14439 switch (TREE_CODE (x
))
14442 return real_isnan (TREE_REAL_CST_PTR (x
));
14448 return !tree_expr_finite_p (TREE_OPERAND (x
, 0))
14449 || !tree_expr_finite_p (TREE_OPERAND (x
, 1));
14453 case NON_LVALUE_EXPR
:
14455 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 0));
14458 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 0))
14459 || tree_expr_maybe_nan_p (TREE_OPERAND (x
, 1));
14461 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 1))
14462 || tree_expr_maybe_nan_p (TREE_OPERAND (x
, 2));
14464 switch (get_call_combined_fn (x
))
14468 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 0));
14473 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 0))
14474 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 1));
14483 /* Return true if expression X could evaluate to -0.0.
14484 This function returns true if uncertain. */
14487 tree_expr_maybe_real_minus_zero_p (const_tree x
)
14489 if (!HONOR_SIGNED_ZEROS (x
))
14491 switch (TREE_CODE (x
))
14494 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x
));
14499 case NON_LVALUE_EXPR
:
14501 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x
, 0));
14503 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x
, 1))
14504 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x
, 2));
14506 switch (get_call_combined_fn (x
))
14517 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14518 * but currently those predicates require tree and not const_tree. */
14522 #define tree_expr_nonnegative_warnv_p(X, Y) \
14523 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14525 #define RECURSE(X) \
14526 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14528 /* Return true if CODE or TYPE is known to be non-negative. */
14531 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14533 if (!VECTOR_TYPE_P (type
)
14534 && (TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14535 && truth_value_p (code
))
14536 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14537 have a signed:1 type (where the value is -1 and 0). */
14542 /* Return true if (CODE OP0) is known to be non-negative. If the return
14543 value is based on the assumption that signed overflow is undefined,
14544 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14545 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14548 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14549 bool *strict_overflow_p
, int depth
)
14551 if (TYPE_UNSIGNED (type
))
14557 /* We can't return 1 if flag_wrapv is set because
14558 ABS_EXPR<INT_MIN> = INT_MIN. */
14559 if (!ANY_INTEGRAL_TYPE_P (type
))
14561 if (TYPE_OVERFLOW_UNDEFINED (type
))
14563 *strict_overflow_p
= true;
14568 case NON_LVALUE_EXPR
:
14570 case FIX_TRUNC_EXPR
:
14571 return RECURSE (op0
);
14575 tree inner_type
= TREE_TYPE (op0
);
14576 tree outer_type
= type
;
14578 if (SCALAR_FLOAT_TYPE_P (outer_type
))
14580 if (SCALAR_FLOAT_TYPE_P (inner_type
))
14581 return RECURSE (op0
);
14582 if (INTEGRAL_TYPE_P (inner_type
))
14584 if (TYPE_UNSIGNED (inner_type
))
14586 return RECURSE (op0
);
14589 else if (INTEGRAL_TYPE_P (outer_type
))
14591 if (SCALAR_FLOAT_TYPE_P (inner_type
))
14592 return RECURSE (op0
);
14593 if (INTEGRAL_TYPE_P (inner_type
))
14594 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14595 && TYPE_UNSIGNED (inner_type
);
14601 return tree_simple_nonnegative_warnv_p (code
, type
);
14604 /* We don't know sign of `t', so be conservative and return false. */
14608 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14609 value is based on the assumption that signed overflow is undefined,
14610 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14611 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14614 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14615 tree op1
, bool *strict_overflow_p
,
14618 if (TYPE_UNSIGNED (type
))
14623 case POINTER_PLUS_EXPR
:
14625 if (FLOAT_TYPE_P (type
))
14626 return RECURSE (op0
) && RECURSE (op1
);
14628 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14629 both unsigned and at least 2 bits shorter than the result. */
14630 if (TREE_CODE (type
) == INTEGER_TYPE
14631 && TREE_CODE (op0
) == NOP_EXPR
14632 && TREE_CODE (op1
) == NOP_EXPR
)
14634 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14635 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14636 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14637 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14639 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14640 TYPE_PRECISION (inner2
)) + 1;
14641 return prec
< TYPE_PRECISION (type
);
14647 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14649 /* x * x is always non-negative for floating point x
14650 or without overflow. */
14651 if (operand_equal_p (op0
, op1
, 0)
14652 || (RECURSE (op0
) && RECURSE (op1
)))
14654 if (ANY_INTEGRAL_TYPE_P (type
)
14655 && TYPE_OVERFLOW_UNDEFINED (type
))
14656 *strict_overflow_p
= true;
14661 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14662 both unsigned and their total bits is shorter than the result. */
14663 if (TREE_CODE (type
) == INTEGER_TYPE
14664 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14665 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14667 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14668 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14670 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14671 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14674 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14675 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14677 if (TREE_CODE (op0
) == INTEGER_CST
)
14678 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14680 if (TREE_CODE (op1
) == INTEGER_CST
)
14681 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14683 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14684 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14686 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14687 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14688 : TYPE_PRECISION (inner0
);
14690 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14691 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14692 : TYPE_PRECISION (inner1
);
14694 return precision0
+ precision1
< TYPE_PRECISION (type
);
14700 return RECURSE (op0
) || RECURSE (op1
);
14703 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14705 if (tree_expr_maybe_nan_p (op0
) || tree_expr_maybe_nan_p (op1
))
14706 return RECURSE (op0
) && RECURSE (op1
);
14707 return RECURSE (op0
) || RECURSE (op1
);
14713 case TRUNC_DIV_EXPR
:
14714 case CEIL_DIV_EXPR
:
14715 case FLOOR_DIV_EXPR
:
14716 case ROUND_DIV_EXPR
:
14717 return RECURSE (op0
) && RECURSE (op1
);
14719 case TRUNC_MOD_EXPR
:
14720 return RECURSE (op0
);
14722 case FLOOR_MOD_EXPR
:
14723 return RECURSE (op1
);
14725 case CEIL_MOD_EXPR
:
14726 case ROUND_MOD_EXPR
:
14728 return tree_simple_nonnegative_warnv_p (code
, type
);
14731 /* We don't know sign of `t', so be conservative and return false. */
14735 /* Return true if T is known to be non-negative. If the return
14736 value is based on the assumption that signed overflow is undefined,
14737 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14738 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14741 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14743 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14746 switch (TREE_CODE (t
))
14749 return tree_int_cst_sgn (t
) >= 0;
14752 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14755 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14758 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
14761 /* Limit the depth of recursion to avoid quadratic behavior.
14762 This is expected to catch almost all occurrences in practice.
14763 If this code misses important cases that unbounded recursion
14764 would not, passes that need this information could be revised
14765 to provide it through dataflow propagation. */
14766 return (!name_registered_for_update_p (t
)
14767 && depth
< param_max_ssa_name_query_depth
14768 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
14769 strict_overflow_p
, depth
));
14772 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
14776 /* Return true if T is known to be non-negative. If the return
14777 value is based on the assumption that signed overflow is undefined,
14778 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14779 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14782 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
14783 bool *strict_overflow_p
, int depth
)
14814 case CFN_BUILT_IN_BSWAP16
:
14815 case CFN_BUILT_IN_BSWAP32
:
14816 case CFN_BUILT_IN_BSWAP64
:
14817 case CFN_BUILT_IN_BSWAP128
:
14823 /* sqrt(-0.0) is -0.0. */
14824 if (!HONOR_SIGNED_ZEROS (type
))
14826 return RECURSE (arg0
);
14858 CASE_CFN_LLRINT_FN
:
14860 CASE_CFN_LLROUND_FN
:
14864 CASE_CFN_LROUND_FN
:
14867 CASE_CFN_NEARBYINT
:
14868 CASE_CFN_NEARBYINT_FN
:
14873 CASE_CFN_ROUNDEVEN
:
14874 CASE_CFN_ROUNDEVEN_FN
:
14877 CASE_CFN_SCALBLN_FN
:
14879 CASE_CFN_SCALBN_FN
:
14881 CASE_CFN_SIGNIFICAND
:
14888 /* True if the 1st argument is nonnegative. */
14889 return RECURSE (arg0
);
14893 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14894 things. In the presence of sNaNs, we're only guaranteed to be
14895 non-negative if both operands are non-negative. In the presence
14896 of qNaNs, we're non-negative if either operand is non-negative
14897 and can't be a qNaN, or if both operands are non-negative. */
14898 if (tree_expr_maybe_signaling_nan_p (arg0
) ||
14899 tree_expr_maybe_signaling_nan_p (arg1
))
14900 return RECURSE (arg0
) && RECURSE (arg1
);
14901 return RECURSE (arg0
) ? (!tree_expr_maybe_nan_p (arg0
)
14904 && !tree_expr_maybe_nan_p (arg1
));
14908 /* True if the 1st AND 2nd arguments are nonnegative. */
14909 return RECURSE (arg0
) && RECURSE (arg1
);
14912 CASE_CFN_COPYSIGN_FN
:
14913 /* True if the 2nd argument is nonnegative. */
14914 return RECURSE (arg1
);
14917 /* True if the 1st argument is nonnegative or the second
14918 argument is an even integer. */
14919 if (TREE_CODE (arg1
) == INTEGER_CST
14920 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14922 return RECURSE (arg0
);
14926 /* True if the 1st argument is nonnegative or the second
14927 argument is an even integer valued real. */
14928 if (TREE_CODE (arg1
) == REAL_CST
)
14933 c
= TREE_REAL_CST (arg1
);
14934 n
= real_to_integer (&c
);
14937 REAL_VALUE_TYPE cint
;
14938 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14939 if (real_identical (&c
, &cint
))
14943 return RECURSE (arg0
);
14948 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
14951 /* Return true if T is known to be non-negative. If the return
14952 value is based on the assumption that signed overflow is undefined,
14953 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14954 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14957 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14959 enum tree_code code
= TREE_CODE (t
);
14960 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14967 tree temp
= TARGET_EXPR_SLOT (t
);
14968 t
= TARGET_EXPR_INITIAL (t
);
14970 /* If the initializer is non-void, then it's a normal expression
14971 that will be assigned to the slot. */
14972 if (!VOID_TYPE_P (TREE_TYPE (t
)))
14973 return RECURSE (t
);
14975 /* Otherwise, the initializer sets the slot in some way. One common
14976 way is an assignment statement at the end of the initializer. */
14979 if (TREE_CODE (t
) == BIND_EXPR
)
14980 t
= expr_last (BIND_EXPR_BODY (t
));
14981 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14982 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14983 t
= expr_last (TREE_OPERAND (t
, 0));
14984 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14989 if (TREE_CODE (t
) == MODIFY_EXPR
14990 && TREE_OPERAND (t
, 0) == temp
)
14991 return RECURSE (TREE_OPERAND (t
, 1));
14998 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14999 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15001 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15002 get_call_combined_fn (t
),
15005 strict_overflow_p
, depth
);
15007 case COMPOUND_EXPR
:
15009 return RECURSE (TREE_OPERAND (t
, 1));
15012 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
15015 return RECURSE (TREE_OPERAND (t
, 0));
15018 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
15023 #undef tree_expr_nonnegative_warnv_p
15025 /* Return true if T is known to be non-negative. If the return
15026 value is based on the assumption that signed overflow is undefined,
15027 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15028 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15031 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
15033 enum tree_code code
;
15034 if (t
== error_mark_node
)
15037 code
= TREE_CODE (t
);
15038 switch (TREE_CODE_CLASS (code
))
15041 case tcc_comparison
:
15042 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15044 TREE_OPERAND (t
, 0),
15045 TREE_OPERAND (t
, 1),
15046 strict_overflow_p
, depth
);
15049 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15051 TREE_OPERAND (t
, 0),
15052 strict_overflow_p
, depth
);
15055 case tcc_declaration
:
15056 case tcc_reference
:
15057 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
15065 case TRUTH_AND_EXPR
:
15066 case TRUTH_OR_EXPR
:
15067 case TRUTH_XOR_EXPR
:
15068 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15070 TREE_OPERAND (t
, 0),
15071 TREE_OPERAND (t
, 1),
15072 strict_overflow_p
, depth
);
15073 case TRUTH_NOT_EXPR
:
15074 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15076 TREE_OPERAND (t
, 0),
15077 strict_overflow_p
, depth
);
15083 case WITH_SIZE_EXPR
:
15085 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
15088 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
15092 /* Return true if `t' is known to be non-negative. Handle warnings
15093 about undefined signed overflow. */
15096 tree_expr_nonnegative_p (tree t
)
15098 bool ret
, strict_overflow_p
;
15100 strict_overflow_p
= false;
15101 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15102 if (strict_overflow_p
)
15103 fold_overflow_warning (("assuming signed overflow does not occur when "
15104 "determining that expression is always "
15106 WARN_STRICT_OVERFLOW_MISC
);
15111 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15112 For floating point we further ensure that T is not denormal.
15113 Similar logic is present in nonzero_address in rtlanal.h.
15115 If the return value is based on the assumption that signed overflow
15116 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15117 change *STRICT_OVERFLOW_P. */
15120 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15121 bool *strict_overflow_p
)
15126 return tree_expr_nonzero_warnv_p (op0
,
15127 strict_overflow_p
);
15131 tree inner_type
= TREE_TYPE (op0
);
15132 tree outer_type
= type
;
15134 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15135 && tree_expr_nonzero_warnv_p (op0
,
15136 strict_overflow_p
));
15140 case NON_LVALUE_EXPR
:
15141 return tree_expr_nonzero_warnv_p (op0
,
15142 strict_overflow_p
);
15151 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15152 For floating point we further ensure that T is not denormal.
15153 Similar logic is present in nonzero_address in rtlanal.h.
15155 If the return value is based on the assumption that signed overflow
15156 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15157 change *STRICT_OVERFLOW_P. */
15160 tree_binary_nonzero_warnv_p (enum tree_code code
,
15163 tree op1
, bool *strict_overflow_p
)
15165 bool sub_strict_overflow_p
;
15168 case POINTER_PLUS_EXPR
:
15170 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
15172 /* With the presence of negative values it is hard
15173 to say something. */
15174 sub_strict_overflow_p
= false;
15175 if (!tree_expr_nonnegative_warnv_p (op0
,
15176 &sub_strict_overflow_p
)
15177 || !tree_expr_nonnegative_warnv_p (op1
,
15178 &sub_strict_overflow_p
))
15180 /* One of operands must be positive and the other non-negative. */
15181 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15182 overflows, on a twos-complement machine the sum of two
15183 nonnegative numbers can never be zero. */
15184 return (tree_expr_nonzero_warnv_p (op0
,
15186 || tree_expr_nonzero_warnv_p (op1
,
15187 strict_overflow_p
));
15192 if (TYPE_OVERFLOW_UNDEFINED (type
))
15194 if (tree_expr_nonzero_warnv_p (op0
,
15196 && tree_expr_nonzero_warnv_p (op1
,
15197 strict_overflow_p
))
15199 *strict_overflow_p
= true;
15206 sub_strict_overflow_p
= false;
15207 if (tree_expr_nonzero_warnv_p (op0
,
15208 &sub_strict_overflow_p
)
15209 && tree_expr_nonzero_warnv_p (op1
,
15210 &sub_strict_overflow_p
))
15212 if (sub_strict_overflow_p
)
15213 *strict_overflow_p
= true;
15218 sub_strict_overflow_p
= false;
15219 if (tree_expr_nonzero_warnv_p (op0
,
15220 &sub_strict_overflow_p
))
15222 if (sub_strict_overflow_p
)
15223 *strict_overflow_p
= true;
15225 /* When both operands are nonzero, then MAX must be too. */
15226 if (tree_expr_nonzero_warnv_p (op1
,
15227 strict_overflow_p
))
15230 /* MAX where operand 0 is positive is positive. */
15231 return tree_expr_nonnegative_warnv_p (op0
,
15232 strict_overflow_p
);
15234 /* MAX where operand 1 is positive is positive. */
15235 else if (tree_expr_nonzero_warnv_p (op1
,
15236 &sub_strict_overflow_p
)
15237 && tree_expr_nonnegative_warnv_p (op1
,
15238 &sub_strict_overflow_p
))
15240 if (sub_strict_overflow_p
)
15241 *strict_overflow_p
= true;
15247 return (tree_expr_nonzero_warnv_p (op1
,
15249 || tree_expr_nonzero_warnv_p (op0
,
15250 strict_overflow_p
));
15259 /* Return true when T is an address and is known to be nonzero.
15260 For floating point we further ensure that T is not denormal.
15261 Similar logic is present in nonzero_address in rtlanal.h.
15263 If the return value is based on the assumption that signed overflow
15264 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15265 change *STRICT_OVERFLOW_P. */
15268 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15270 bool sub_strict_overflow_p
;
15271 switch (TREE_CODE (t
))
15274 return !integer_zerop (t
);
15278 tree base
= TREE_OPERAND (t
, 0);
15280 if (!DECL_P (base
))
15281 base
= get_base_address (base
);
15283 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
15284 base
= TARGET_EXPR_SLOT (base
);
15289 /* For objects in symbol table check if we know they are non-zero.
15290 Don't do anything for variables and functions before symtab is built;
15291 it is quite possible that they will be declared weak later. */
15292 int nonzero_addr
= maybe_nonzero_address (base
);
15293 if (nonzero_addr
>= 0)
15294 return nonzero_addr
;
15296 /* Constants are never weak. */
15297 if (CONSTANT_CLASS_P (base
))
15304 sub_strict_overflow_p
= false;
15305 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15306 &sub_strict_overflow_p
)
15307 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15308 &sub_strict_overflow_p
))
15310 if (sub_strict_overflow_p
)
15311 *strict_overflow_p
= true;
15317 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
15319 return expr_not_equal_to (t
, wi::zero (TYPE_PRECISION (TREE_TYPE (t
))));
15327 #define integer_valued_real_p(X) \
15328 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15330 #define RECURSE(X) \
15331 ((integer_valued_real_p) (X, depth + 1))
15333 /* Return true if the floating point result of (CODE OP0) has an
15334 integer value. We also allow +Inf, -Inf and NaN to be considered
15335 integer values. Return false for signaling NaN.
15337 DEPTH is the current nesting depth of the query. */
15340 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
15348 return RECURSE (op0
);
15352 tree type
= TREE_TYPE (op0
);
15353 if (TREE_CODE (type
) == INTEGER_TYPE
)
15355 if (SCALAR_FLOAT_TYPE_P (type
))
15356 return RECURSE (op0
);
15366 /* Return true if the floating point result of (CODE OP0 OP1) has an
15367 integer value. We also allow +Inf, -Inf and NaN to be considered
15368 integer values. Return false for signaling NaN.
15370 DEPTH is the current nesting depth of the query. */
15373 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
15382 return RECURSE (op0
) && RECURSE (op1
);
15390 /* Return true if the floating point result of calling FNDECL with arguments
15391 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15392 considered integer values. Return false for signaling NaN. If FNDECL
15393 takes fewer than 2 arguments, the remaining ARGn are null.
15395 DEPTH is the current nesting depth of the query. */
15398 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
15406 CASE_CFN_NEARBYINT
:
15407 CASE_CFN_NEARBYINT_FN
:
15412 CASE_CFN_ROUNDEVEN
:
15413 CASE_CFN_ROUNDEVEN_FN
:
15422 return RECURSE (arg0
) && RECURSE (arg1
);
15430 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15431 has an integer value. We also allow +Inf, -Inf and NaN to be
15432 considered integer values. Return false for signaling NaN.
15434 DEPTH is the current nesting depth of the query. */
15437 integer_valued_real_single_p (tree t
, int depth
)
15439 switch (TREE_CODE (t
))
15442 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
15445 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
15448 /* Limit the depth of recursion to avoid quadratic behavior.
15449 This is expected to catch almost all occurrences in practice.
15450 If this code misses important cases that unbounded recursion
15451 would not, passes that need this information could be revised
15452 to provide it through dataflow propagation. */
15453 return (!name_registered_for_update_p (t
)
15454 && depth
< param_max_ssa_name_query_depth
15455 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
15464 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15465 has an integer value. We also allow +Inf, -Inf and NaN to be
15466 considered integer values. Return false for signaling NaN.
15468 DEPTH is the current nesting depth of the query. */
15471 integer_valued_real_invalid_p (tree t
, int depth
)
15473 switch (TREE_CODE (t
))
15475 case COMPOUND_EXPR
:
15478 return RECURSE (TREE_OPERAND (t
, 1));
15481 return RECURSE (TREE_OPERAND (t
, 0));
15490 #undef integer_valued_real_p
15492 /* Return true if the floating point expression T has an integer value.
15493 We also allow +Inf, -Inf and NaN to be considered integer values.
15494 Return false for signaling NaN.
15496 DEPTH is the current nesting depth of the query. */
15499 integer_valued_real_p (tree t
, int depth
)
15501 if (t
== error_mark_node
)
15504 STRIP_ANY_LOCATION_WRAPPER (t
);
15506 tree_code code
= TREE_CODE (t
);
15507 switch (TREE_CODE_CLASS (code
))
15510 case tcc_comparison
:
15511 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
15512 TREE_OPERAND (t
, 1), depth
);
15515 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
15518 case tcc_declaration
:
15519 case tcc_reference
:
15520 return integer_valued_real_single_p (t
, depth
);
15530 return integer_valued_real_single_p (t
, depth
);
15534 tree arg0
= (call_expr_nargs (t
) > 0
15535 ? CALL_EXPR_ARG (t
, 0)
15537 tree arg1
= (call_expr_nargs (t
) > 1
15538 ? CALL_EXPR_ARG (t
, 1)
15540 return integer_valued_real_call_p (get_call_combined_fn (t
),
15541 arg0
, arg1
, depth
);
15545 return integer_valued_real_invalid_p (t
, depth
);
15549 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15550 attempt to fold the expression to a constant without modifying TYPE,
15553 If the expression could be simplified to a constant, then return
15554 the constant. If the expression would not be simplified to a
15555 constant, then return NULL_TREE. */
15558 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15560 tree tem
= fold_binary (code
, type
, op0
, op1
);
15561 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15564 /* Given the components of a unary expression CODE, TYPE and OP0,
15565 attempt to fold the expression to a constant without modifying
15568 If the expression could be simplified to a constant, then return
15569 the constant. If the expression would not be simplified to a
15570 constant, then return NULL_TREE. */
15573 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15575 tree tem
= fold_unary (code
, type
, op0
);
15576 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15579 /* If EXP represents referencing an element in a constant string
15580 (either via pointer arithmetic or array indexing), return the
15581 tree representing the value accessed, otherwise return NULL. */
15584 fold_read_from_constant_string (tree exp
)
15586 if ((INDIRECT_REF_P (exp
)
15587 || TREE_CODE (exp
) == ARRAY_REF
)
15588 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15590 tree exp1
= TREE_OPERAND (exp
, 0);
15593 location_t loc
= EXPR_LOCATION (exp
);
15595 if (INDIRECT_REF_P (exp
))
15596 string
= string_constant (exp1
, &index
, NULL
, NULL
);
15599 tree low_bound
= array_ref_low_bound (exp
);
15600 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15602 /* Optimize the special-case of a zero lower bound.
15604 We convert the low_bound to sizetype to avoid some problems
15605 with constant folding. (E.g. suppose the lower bound is 1,
15606 and its mode is QI. Without the conversion,l (ARRAY
15607 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15608 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15609 if (! integer_zerop (low_bound
))
15610 index
= size_diffop_loc (loc
, index
,
15611 fold_convert_loc (loc
, sizetype
, low_bound
));
15616 scalar_int_mode char_mode
;
15618 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15619 && TREE_CODE (string
) == STRING_CST
15620 && tree_fits_uhwi_p (index
)
15621 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15622 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))),
15624 && GET_MODE_SIZE (char_mode
) == 1)
15625 return build_int_cst_type (TREE_TYPE (exp
),
15626 (TREE_STRING_POINTER (string
)
15627 [TREE_INT_CST_LOW (index
)]));
15632 /* Folds a read from vector element at IDX of vector ARG. */
15635 fold_read_from_vector (tree arg
, poly_uint64 idx
)
15637 unsigned HOST_WIDE_INT i
;
15638 if (known_lt (idx
, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)))
15639 && known_ge (idx
, 0u)
15640 && idx
.is_constant (&i
))
15642 if (TREE_CODE (arg
) == VECTOR_CST
)
15643 return VECTOR_CST_ELT (arg
, i
);
15644 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
15646 if (CONSTRUCTOR_NELTS (arg
)
15647 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg
, 0)->value
)))
15649 if (i
>= CONSTRUCTOR_NELTS (arg
))
15650 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg
)));
15651 return CONSTRUCTOR_ELT (arg
, i
)->value
;
15657 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15658 an integer constant, real, or fixed-point constant.
15660 TYPE is the type of the result. */
15663 fold_negate_const (tree arg0
, tree type
)
15665 tree t
= NULL_TREE
;
15667 switch (TREE_CODE (arg0
))
15670 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15675 FIXED_VALUE_TYPE f
;
15676 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15677 &(TREE_FIXED_CST (arg0
)), NULL
,
15678 TYPE_SATURATING (type
));
15679 t
= build_fixed (type
, f
);
15680 /* Propagate overflow flags. */
15681 if (overflow_p
| TREE_OVERFLOW (arg0
))
15682 TREE_OVERFLOW (t
) = 1;
15687 if (poly_int_tree_p (arg0
))
15689 wi::overflow_type overflow
;
15690 poly_wide_int res
= wi::neg (wi::to_poly_wide (arg0
), &overflow
);
15691 t
= force_fit_type (type
, res
, 1,
15692 (overflow
&& ! TYPE_UNSIGNED (type
))
15693 || TREE_OVERFLOW (arg0
));
15697 gcc_unreachable ();
15703 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15704 an integer constant or real constant.
15706 TYPE is the type of the result. */
15709 fold_abs_const (tree arg0
, tree type
)
15711 tree t
= NULL_TREE
;
15713 switch (TREE_CODE (arg0
))
15717 /* If the value is unsigned or non-negative, then the absolute value
15718 is the same as the ordinary value. */
15719 wide_int val
= wi::to_wide (arg0
);
15720 wi::overflow_type overflow
= wi::OVF_NONE
;
15721 if (!wi::neg_p (val
, TYPE_SIGN (TREE_TYPE (arg0
))))
15724 /* If the value is negative, then the absolute value is
15727 val
= wi::neg (val
, &overflow
);
15729 /* Force to the destination type, set TREE_OVERFLOW for signed
15731 t
= force_fit_type (type
, val
, 1, overflow
| TREE_OVERFLOW (arg0
));
15736 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15737 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15743 gcc_unreachable ();
15749 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15750 constant. TYPE is the type of the result. */
15753 fold_not_const (const_tree arg0
, tree type
)
15755 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15757 return force_fit_type (type
, ~wi::to_wide (arg0
), 0, TREE_OVERFLOW (arg0
));
15760 /* Given CODE, a relational operator, the target type, TYPE and two
15761 constant operands OP0 and OP1, return the result of the
15762 relational operation. If the result is not a compile time
15763 constant, then return NULL_TREE. */
15766 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15768 int result
, invert
;
15770 /* From here on, the only cases we handle are when the result is
15771 known to be a constant. */
15773 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15775 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15776 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15778 /* Handle the cases where either operand is a NaN. */
15779 if (real_isnan (c0
) || real_isnan (c1
))
15789 case UNORDERED_EXPR
:
15803 if (flag_trapping_math
)
15809 gcc_unreachable ();
15812 return constant_boolean_node (result
, type
);
15815 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15818 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15820 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15821 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15822 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15825 /* Handle equality/inequality of complex constants. */
15826 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15828 tree rcond
= fold_relational_const (code
, type
,
15829 TREE_REALPART (op0
),
15830 TREE_REALPART (op1
));
15831 tree icond
= fold_relational_const (code
, type
,
15832 TREE_IMAGPART (op0
),
15833 TREE_IMAGPART (op1
));
15834 if (code
== EQ_EXPR
)
15835 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15836 else if (code
== NE_EXPR
)
15837 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15842 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15844 if (!VECTOR_TYPE_P (type
))
15846 /* Have vector comparison with scalar boolean result. */
15847 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
15848 && known_eq (VECTOR_CST_NELTS (op0
),
15849 VECTOR_CST_NELTS (op1
)));
15850 unsigned HOST_WIDE_INT nunits
;
15851 if (!VECTOR_CST_NELTS (op0
).is_constant (&nunits
))
15853 for (unsigned i
= 0; i
< nunits
; i
++)
15855 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15856 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15857 tree tmp
= fold_relational_const (EQ_EXPR
, type
, elem0
, elem1
);
15858 if (tmp
== NULL_TREE
)
15860 if (integer_zerop (tmp
))
15861 return constant_boolean_node (code
== NE_EXPR
, type
);
15863 return constant_boolean_node (code
== EQ_EXPR
, type
);
15865 tree_vector_builder elts
;
15866 if (!elts
.new_binary_operation (type
, op0
, op1
, false))
15868 unsigned int count
= elts
.encoded_nelts ();
15869 for (unsigned i
= 0; i
< count
; i
++)
15871 tree elem_type
= TREE_TYPE (type
);
15872 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15873 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15875 tree tem
= fold_relational_const (code
, elem_type
,
15878 if (tem
== NULL_TREE
)
15881 elts
.quick_push (build_int_cst (elem_type
,
15882 integer_zerop (tem
) ? 0 : -1));
15885 return elts
.build ();
15888 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15890 To compute GT, swap the arguments and do LT.
15891 To compute GE, do LT and invert the result.
15892 To compute LE, swap the arguments, do LT and invert the result.
15893 To compute NE, do EQ and invert the result.
15895 Therefore, the code below must handle only EQ and LT. */
15897 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15899 std::swap (op0
, op1
);
15900 code
= swap_tree_comparison (code
);
15903 /* Note that it is safe to invert for real values here because we
15904 have already handled the one case that it matters. */
15907 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15910 code
= invert_tree_comparison (code
, false);
15913 /* Compute a result for LT or EQ if args permit;
15914 Otherwise return T. */
15915 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15917 if (code
== EQ_EXPR
)
15918 result
= tree_int_cst_equal (op0
, op1
);
15920 result
= tree_int_cst_lt (op0
, op1
);
15927 return constant_boolean_node (result
, type
);
15930 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15931 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15935 fold_build_cleanup_point_expr (tree type
, tree expr
)
15937 /* If the expression does not have side effects then we don't have to wrap
15938 it with a cleanup point expression. */
15939 if (!TREE_SIDE_EFFECTS (expr
))
15942 /* If the expression is a return, check to see if the expression inside the
15943 return has no side effects or the right hand side of the modify expression
15944 inside the return. If either don't have side effects set we don't need to
15945 wrap the expression in a cleanup point expression. Note we don't check the
15946 left hand side of the modify because it should always be a return decl. */
15947 if (TREE_CODE (expr
) == RETURN_EXPR
)
15949 tree op
= TREE_OPERAND (expr
, 0);
15950 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15952 op
= TREE_OPERAND (op
, 1);
15953 if (!TREE_SIDE_EFFECTS (op
))
15957 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
15960 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15961 of an indirection through OP0, or NULL_TREE if no simplification is
15965 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15969 poly_uint64 const_op01
;
15972 subtype
= TREE_TYPE (sub
);
15973 if (!POINTER_TYPE_P (subtype
)
15974 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0
)))
15977 if (TREE_CODE (sub
) == ADDR_EXPR
)
15979 tree op
= TREE_OPERAND (sub
, 0);
15980 tree optype
= TREE_TYPE (op
);
15982 /* *&CONST_DECL -> to the value of the const decl. */
15983 if (TREE_CODE (op
) == CONST_DECL
)
15984 return DECL_INITIAL (op
);
15985 /* *&p => p; make sure to handle *&"str"[cst] here. */
15986 if (type
== optype
)
15988 tree fop
= fold_read_from_constant_string (op
);
15994 /* *(foo *)&fooarray => fooarray[0] */
15995 else if (TREE_CODE (optype
) == ARRAY_TYPE
15996 && type
== TREE_TYPE (optype
)
15997 && (!in_gimple_form
15998 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16000 tree type_domain
= TYPE_DOMAIN (optype
);
16001 tree min_val
= size_zero_node
;
16002 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16003 min_val
= TYPE_MIN_VALUE (type_domain
);
16005 && TREE_CODE (min_val
) != INTEGER_CST
)
16007 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16008 NULL_TREE
, NULL_TREE
);
16010 /* *(foo *)&complexfoo => __real__ complexfoo */
16011 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16012 && type
== TREE_TYPE (optype
))
16013 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16014 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16015 else if (VECTOR_TYPE_P (optype
)
16016 && type
== TREE_TYPE (optype
))
16018 tree part_width
= TYPE_SIZE (type
);
16019 tree index
= bitsize_int (0);
16020 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
,
16025 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16026 && poly_int_tree_p (TREE_OPERAND (sub
, 1), &const_op01
))
16028 tree op00
= TREE_OPERAND (sub
, 0);
16029 tree op01
= TREE_OPERAND (sub
, 1);
16032 if (TREE_CODE (op00
) == ADDR_EXPR
)
16035 op00
= TREE_OPERAND (op00
, 0);
16036 op00type
= TREE_TYPE (op00
);
16038 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16039 if (VECTOR_TYPE_P (op00type
)
16040 && type
== TREE_TYPE (op00type
)
16041 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16042 but we want to treat offsets with MSB set as negative.
16043 For the code below negative offsets are invalid and
16044 TYPE_SIZE of the element is something unsigned, so
16045 check whether op01 fits into poly_int64, which implies
16046 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16047 then just use poly_uint64 because we want to treat the
16048 value as unsigned. */
16049 && tree_fits_poly_int64_p (op01
))
16051 tree part_width
= TYPE_SIZE (type
);
16052 poly_uint64 max_offset
16053 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
16054 * TYPE_VECTOR_SUBPARTS (op00type
));
16055 if (known_lt (const_op01
, max_offset
))
16057 tree index
= bitsize_int (const_op01
* BITS_PER_UNIT
);
16058 return fold_build3_loc (loc
,
16059 BIT_FIELD_REF
, type
, op00
,
16060 part_width
, index
);
16063 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16064 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16065 && type
== TREE_TYPE (op00type
))
16067 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type
)),
16069 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16071 /* ((foo *)&fooarray)[1] => fooarray[1] */
16072 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16073 && type
== TREE_TYPE (op00type
))
16075 tree type_domain
= TYPE_DOMAIN (op00type
);
16076 tree min_val
= size_zero_node
;
16077 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16078 min_val
= TYPE_MIN_VALUE (type_domain
);
16079 poly_uint64 type_size
, index
;
16080 if (poly_int_tree_p (min_val
)
16081 && poly_int_tree_p (TYPE_SIZE_UNIT (type
), &type_size
)
16082 && multiple_p (const_op01
, type_size
, &index
))
16084 poly_offset_int off
= index
+ wi::to_poly_offset (min_val
);
16085 op01
= wide_int_to_tree (sizetype
, off
);
16086 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16087 NULL_TREE
, NULL_TREE
);
16093 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16094 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16095 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16096 && (!in_gimple_form
16097 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16100 tree min_val
= size_zero_node
;
16101 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16102 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16103 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16104 min_val
= TYPE_MIN_VALUE (type_domain
);
16106 && TREE_CODE (min_val
) != INTEGER_CST
)
16108 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16115 /* Builds an expression for an indirection through T, simplifying some
16119 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16121 tree type
= TREE_TYPE (TREE_TYPE (t
));
16122 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16127 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16130 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16133 fold_indirect_ref_loc (location_t loc
, tree t
)
16135 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16143 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16144 whose result is ignored. The type of the returned tree need not be
16145 the same as the original expression. */
16148 fold_ignored_result (tree t
)
16150 if (!TREE_SIDE_EFFECTS (t
))
16151 return integer_zero_node
;
16154 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16157 t
= TREE_OPERAND (t
, 0);
16161 case tcc_comparison
:
16162 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16163 t
= TREE_OPERAND (t
, 0);
16164 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16165 t
= TREE_OPERAND (t
, 1);
16170 case tcc_expression
:
16171 switch (TREE_CODE (t
))
16173 case COMPOUND_EXPR
:
16174 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16176 t
= TREE_OPERAND (t
, 0);
16180 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16181 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16183 t
= TREE_OPERAND (t
, 0);
16196 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16199 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16201 tree div
= NULL_TREE
;
16206 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16207 have to do anything. Only do this when we are not given a const,
16208 because in that case, this check is more expensive than just
16210 if (TREE_CODE (value
) != INTEGER_CST
)
16212 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16214 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16218 /* If divisor is a power of two, simplify this to bit manipulation. */
16219 if (pow2_or_zerop (divisor
))
16221 if (TREE_CODE (value
) == INTEGER_CST
)
16223 wide_int val
= wi::to_wide (value
);
16226 if ((val
& (divisor
- 1)) == 0)
16229 overflow_p
= TREE_OVERFLOW (value
);
16230 val
+= divisor
- 1;
16231 val
&= (int) -divisor
;
16235 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16241 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16242 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16243 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
16244 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16250 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16251 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16252 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16258 /* Likewise, but round down. */
16261 round_down_loc (location_t loc
, tree value
, int divisor
)
16263 tree div
= NULL_TREE
;
16265 gcc_assert (divisor
> 0);
16269 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16270 have to do anything. Only do this when we are not given a const,
16271 because in that case, this check is more expensive than just
16273 if (TREE_CODE (value
) != INTEGER_CST
)
16275 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16277 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16281 /* If divisor is a power of two, simplify this to bit manipulation. */
16282 if (pow2_or_zerop (divisor
))
16286 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16287 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16292 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16293 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16294 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16300 /* Returns the pointer to the base of the object addressed by EXP and
16301 extracts the information about the offset of the access, storing it
16302 to PBITPOS and POFFSET. */
16305 split_address_to_core_and_offset (tree exp
,
16306 poly_int64_pod
*pbitpos
, tree
*poffset
)
16310 int unsignedp
, reversep
, volatilep
;
16311 poly_int64 bitsize
;
16312 location_t loc
= EXPR_LOCATION (exp
);
16314 if (TREE_CODE (exp
) == SSA_NAME
)
16315 if (gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (exp
)))
16316 if (gimple_assign_rhs_code (def
) == ADDR_EXPR
)
16317 exp
= gimple_assign_rhs1 (def
);
16319 if (TREE_CODE (exp
) == ADDR_EXPR
)
16321 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16322 poffset
, &mode
, &unsignedp
, &reversep
,
16324 core
= build_fold_addr_expr_loc (loc
, core
);
16326 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
16328 core
= TREE_OPERAND (exp
, 0);
16331 *poffset
= TREE_OPERAND (exp
, 1);
16332 if (poly_int_tree_p (*poffset
))
16334 poly_offset_int tem
16335 = wi::sext (wi::to_poly_offset (*poffset
),
16336 TYPE_PRECISION (TREE_TYPE (*poffset
)));
16337 tem
<<= LOG2_BITS_PER_UNIT
;
16338 if (tem
.to_shwi (pbitpos
))
16339 *poffset
= NULL_TREE
;
16346 *poffset
= NULL_TREE
;
16352 /* Returns true if addresses of E1 and E2 differ by a constant, false
16353 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16356 ptr_difference_const (tree e1
, tree e2
, poly_int64_pod
*diff
)
16359 poly_int64 bitpos1
, bitpos2
;
16360 tree toffset1
, toffset2
, tdiff
, type
;
16362 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16363 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16365 poly_int64 bytepos1
, bytepos2
;
16366 if (!multiple_p (bitpos1
, BITS_PER_UNIT
, &bytepos1
)
16367 || !multiple_p (bitpos2
, BITS_PER_UNIT
, &bytepos2
)
16368 || !operand_equal_p (core1
, core2
, 0))
16371 if (toffset1
&& toffset2
)
16373 type
= TREE_TYPE (toffset1
);
16374 if (type
!= TREE_TYPE (toffset2
))
16375 toffset2
= fold_convert (type
, toffset2
);
16377 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16378 if (!cst_and_fits_in_hwi (tdiff
))
16381 *diff
= int_cst_value (tdiff
);
16383 else if (toffset1
|| toffset2
)
16385 /* If only one of the offsets is non-constant, the difference cannot
16392 *diff
+= bytepos1
- bytepos2
;
16396 /* Return OFF converted to a pointer offset type suitable as offset for
16397 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16399 convert_to_ptrofftype_loc (location_t loc
, tree off
)
16401 if (ptrofftype_p (TREE_TYPE (off
)))
16403 return fold_convert_loc (loc
, sizetype
, off
);
16406 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16408 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
16410 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16411 ptr
, convert_to_ptrofftype_loc (loc
, off
));
16414 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16416 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
16418 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16419 ptr
, size_int (off
));
16422 /* Return a pointer to a NUL-terminated string containing the sequence
16423 of bytes corresponding to the representation of the object referred to
16424 by SRC (or a subsequence of such bytes within it if SRC is a reference
16425 to an initialized constant array plus some constant offset).
16426 Set *STRSIZE the number of bytes in the constant sequence including
16427 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16428 where A is the array that stores the constant sequence that SRC points
16429 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16430 need not point to a string or even an array of characters but may point
16431 to an object of any type. */
16434 getbyterep (tree src
, unsigned HOST_WIDE_INT
*strsize
)
16436 /* The offset into the array A storing the string, and A's byte size. */
16444 src
= byte_representation (src
, &offset_node
, &mem_size
, NULL
);
16446 src
= string_constant (src
, &offset_node
, &mem_size
, NULL
);
16450 unsigned HOST_WIDE_INT offset
= 0;
16451 if (offset_node
!= NULL_TREE
)
16453 if (!tree_fits_uhwi_p (offset_node
))
16456 offset
= tree_to_uhwi (offset_node
);
16459 if (!tree_fits_uhwi_p (mem_size
))
16462 /* ARRAY_SIZE is the byte size of the array the constant sequence
16463 is stored in and equal to sizeof A. INIT_BYTES is the number
16464 of bytes in the constant sequence used to initialize the array,
16465 including any embedded NULs as well as the terminating NUL (for
16466 strings), but not including any trailing zeros/NULs past
16467 the terminating one appended implicitly to a string literal to
16468 zero out the remainder of the array it's stored in. For example,
16470 const char a[7] = "abc\0d";
16471 n = strlen (a + 1);
16472 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16473 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16474 is equal to strlen (A) + 1. */
16475 const unsigned HOST_WIDE_INT array_size
= tree_to_uhwi (mem_size
);
16476 unsigned HOST_WIDE_INT init_bytes
= TREE_STRING_LENGTH (src
);
16477 const char *string
= TREE_STRING_POINTER (src
);
16479 /* Ideally this would turn into a gcc_checking_assert over time. */
16480 if (init_bytes
> array_size
)
16481 init_bytes
= array_size
;
16483 if (init_bytes
== 0 || offset
>= array_size
)
16488 /* Compute and store the number of characters from the beginning
16489 of the substring at OFFSET to the end, including the terminating
16490 nul. Offsets past the initial length refer to null strings. */
16491 if (offset
< init_bytes
)
16492 *strsize
= init_bytes
- offset
;
16498 tree eltype
= TREE_TYPE (TREE_TYPE (src
));
16499 /* Support only properly NUL-terminated single byte strings. */
16500 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype
)) != 1)
16502 if (string
[init_bytes
- 1] != '\0')
16506 return offset
< init_bytes
? string
+ offset
: "";
16509 /* Return a pointer to a NUL-terminated string corresponding to
16510 the expression STR referencing a constant string, possibly
16511 involving a constant offset. Return null if STR either doesn't
16512 reference a constant string or if it involves a nonconstant
16516 c_getstr (tree str
)
16518 return getbyterep (str
, NULL
);
16521 /* Given a tree T, compute which bits in T may be nonzero. */
16524 tree_nonzero_bits (const_tree t
)
16526 switch (TREE_CODE (t
))
16529 return wi::to_wide (t
);
16531 return get_nonzero_bits (t
);
16532 case NON_LVALUE_EXPR
:
16534 return tree_nonzero_bits (TREE_OPERAND (t
, 0));
16536 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16537 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
16540 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16541 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
16543 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 1)),
16544 tree_nonzero_bits (TREE_OPERAND (t
, 2)));
16546 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16547 TYPE_PRECISION (TREE_TYPE (t
)),
16548 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t
, 0))));
16550 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
16552 wide_int nzbits1
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16553 wide_int nzbits2
= tree_nonzero_bits (TREE_OPERAND (t
, 1));
16554 if (wi::bit_and (nzbits1
, nzbits2
) == 0)
16555 return wi::bit_or (nzbits1
, nzbits2
);
16559 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
16561 tree type
= TREE_TYPE (t
);
16562 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16563 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
16564 TYPE_PRECISION (type
));
16565 return wi::neg_p (arg1
)
16566 ? wi::rshift (nzbits
, -arg1
, TYPE_SIGN (type
))
16567 : wi::lshift (nzbits
, arg1
);
16571 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
16573 tree type
= TREE_TYPE (t
);
16574 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16575 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
16576 TYPE_PRECISION (type
));
16577 return wi::neg_p (arg1
)
16578 ? wi::lshift (nzbits
, -arg1
)
16579 : wi::rshift (nzbits
, arg1
, TYPE_SIGN (type
));
16586 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t
)));
16589 /* Helper function for address compare simplifications in match.pd.
16590 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16591 TYPE is the type of comparison operands.
16592 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16593 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16594 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16595 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16596 and 2 if unknown. */
16599 address_compare (tree_code code
, tree type
, tree op0
, tree op1
,
16600 tree
&base0
, tree
&base1
, poly_int64
&off0
, poly_int64
&off1
,
16603 if (TREE_CODE (op0
) == SSA_NAME
)
16604 op0
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0
));
16605 if (TREE_CODE (op1
) == SSA_NAME
)
16606 op1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1
));
16607 gcc_checking_assert (TREE_CODE (op0
) == ADDR_EXPR
);
16608 gcc_checking_assert (TREE_CODE (op1
) == ADDR_EXPR
);
16609 base0
= get_addr_base_and_unit_offset (TREE_OPERAND (op0
, 0), &off0
);
16610 base1
= get_addr_base_and_unit_offset (TREE_OPERAND (op1
, 0), &off1
);
16611 if (base0
&& TREE_CODE (base0
) == MEM_REF
)
16613 off0
+= mem_ref_offset (base0
).force_shwi ();
16614 base0
= TREE_OPERAND (base0
, 0);
16616 if (base1
&& TREE_CODE (base1
) == MEM_REF
)
16618 off1
+= mem_ref_offset (base1
).force_shwi ();
16619 base1
= TREE_OPERAND (base1
, 0);
16621 if (base0
== NULL_TREE
|| base1
== NULL_TREE
)
16625 /* Punt in GENERIC on variables with value expressions;
16626 the value expressions might point to fields/elements
16627 of other vars etc. */
16629 && ((VAR_P (base0
) && DECL_HAS_VALUE_EXPR_P (base0
))
16630 || (VAR_P (base1
) && DECL_HAS_VALUE_EXPR_P (base1
))))
16632 else if (decl_in_symtab_p (base0
) && decl_in_symtab_p (base1
))
16634 symtab_node
*node0
= symtab_node::get_create (base0
);
16635 symtab_node
*node1
= symtab_node::get_create (base1
);
16636 equal
= node0
->equal_address_to (node1
);
16638 else if ((DECL_P (base0
)
16639 || TREE_CODE (base0
) == SSA_NAME
16640 || TREE_CODE (base0
) == STRING_CST
)
16642 || TREE_CODE (base1
) == SSA_NAME
16643 || TREE_CODE (base1
) == STRING_CST
))
16644 equal
= (base0
== base1
);
16645 /* Assume different STRING_CSTs with the same content will be
16648 && TREE_CODE (base0
) == STRING_CST
16649 && TREE_CODE (base1
) == STRING_CST
16650 && TREE_STRING_LENGTH (base0
) == TREE_STRING_LENGTH (base1
)
16651 && memcmp (TREE_STRING_POINTER (base0
), TREE_STRING_POINTER (base1
),
16652 TREE_STRING_LENGTH (base0
)) == 0)
16656 if (code
== EQ_EXPR
16658 /* If the offsets are equal we can ignore overflow. */
16659 || known_eq (off0
, off1
)
16660 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
16661 /* Or if we compare using pointers to decls or strings. */
16662 || (POINTER_TYPE_P (type
)
16663 && (DECL_P (base0
) || TREE_CODE (base0
) == STRING_CST
)))
16669 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
16672 /* At this point we know (or assume) the two pointers point at
16673 different objects. */
16674 HOST_WIDE_INT ioff0
= -1, ioff1
= -1;
16675 off0
.is_constant (&ioff0
);
16676 off1
.is_constant (&ioff1
);
16677 /* Punt on non-zero offsets from functions. */
16678 if ((TREE_CODE (base0
) == FUNCTION_DECL
&& ioff0
)
16679 || (TREE_CODE (base1
) == FUNCTION_DECL
&& ioff1
))
16681 /* Or if the bases are neither decls nor string literals. */
16682 if (!DECL_P (base0
) && TREE_CODE (base0
) != STRING_CST
)
16684 if (!DECL_P (base1
) && TREE_CODE (base1
) != STRING_CST
)
16686 /* For initializers, assume addresses of different functions are
16688 if (folding_initializer
16689 && TREE_CODE (base0
) == FUNCTION_DECL
16690 && TREE_CODE (base1
) == FUNCTION_DECL
)
16693 /* Compute whether one address points to the start of one
16694 object and another one to the end of another one. */
16695 poly_int64 size0
= 0, size1
= 0;
16696 if (TREE_CODE (base0
) == STRING_CST
)
16698 if (ioff0
< 0 || ioff0
> TREE_STRING_LENGTH (base0
))
16701 size0
= TREE_STRING_LENGTH (base0
);
16703 else if (TREE_CODE (base0
) == FUNCTION_DECL
)
16707 tree sz0
= DECL_SIZE_UNIT (base0
);
16708 if (!tree_fits_poly_int64_p (sz0
))
16711 size0
= tree_to_poly_int64 (sz0
);
16713 if (TREE_CODE (base1
) == STRING_CST
)
16715 if (ioff1
< 0 || ioff1
> TREE_STRING_LENGTH (base1
))
16718 size1
= TREE_STRING_LENGTH (base1
);
16720 else if (TREE_CODE (base1
) == FUNCTION_DECL
)
16724 tree sz1
= DECL_SIZE_UNIT (base1
);
16725 if (!tree_fits_poly_int64_p (sz1
))
16728 size1
= tree_to_poly_int64 (sz1
);
16732 /* If one offset is pointing (or could be) to the beginning of one
16733 object and the other is pointing to one past the last byte of the
16734 other object, punt. */
16735 if (maybe_eq (off0
, 0) && maybe_eq (off1
, size1
))
16737 else if (maybe_eq (off1
, 0) && maybe_eq (off0
, size0
))
16739 /* If both offsets are the same, there are some cases we know that are
16740 ok. Either if we know they aren't zero, or if we know both sizes
16743 && known_eq (off0
, off1
)
16744 && (known_ne (off0
, 0)
16745 || (known_ne (size0
, 0) && known_ne (size1
, 0))))
16749 /* At this point, equal is 2 if either one or both pointers are out of
16750 bounds of their object, or one points to start of its object and the
16751 other points to end of its object. This is unspecified behavior
16752 e.g. in C++. Otherwise equal is 0. */
16753 if (folding_cxx_constexpr
&& equal
)
16756 /* When both pointers point to string literals, even when equal is 0,
16757 due to tail merging of string literals the pointers might be the same. */
16758 if (TREE_CODE (base0
) == STRING_CST
&& TREE_CODE (base1
) == STRING_CST
)
16762 || ioff0
> TREE_STRING_LENGTH (base0
)
16763 || ioff1
> TREE_STRING_LENGTH (base1
))
16766 /* If the bytes in the string literals starting at the pointers
16767 differ, the pointers need to be different. */
16768 if (memcmp (TREE_STRING_POINTER (base0
) + ioff0
,
16769 TREE_STRING_POINTER (base1
) + ioff1
,
16770 MIN (TREE_STRING_LENGTH (base0
) - ioff0
,
16771 TREE_STRING_LENGTH (base1
) - ioff1
)) == 0)
16773 HOST_WIDE_INT ioffmin
= MIN (ioff0
, ioff1
);
16774 if (memcmp (TREE_STRING_POINTER (base0
) + ioff0
- ioffmin
,
16775 TREE_STRING_POINTER (base1
) + ioff1
- ioffmin
,
16777 /* If even the bytes in the string literal before the
16778 pointers are the same, the string literals could be
16785 if (folding_cxx_constexpr
)
16788 /* If this is a pointer comparison, ignore for now even
16789 valid equalities where one pointer is the offset zero
16790 of one object and the other to one past end of another one. */
16791 if (!INTEGRAL_TYPE_P (type
))
16794 /* Assume that string literals can't be adjacent to variables
16795 (automatic or global). */
16796 if (TREE_CODE (base0
) == STRING_CST
|| TREE_CODE (base1
) == STRING_CST
)
16799 /* Assume that automatic variables can't be adjacent to global
16801 if (is_global_var (base0
) != is_global_var (base1
))
16807 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16809 ctor_single_nonzero_element (const_tree t
)
16811 unsigned HOST_WIDE_INT idx
;
16812 constructor_elt
*ce
;
16813 tree elt
= NULL_TREE
;
16815 if (TREE_CODE (t
) != CONSTRUCTOR
)
16817 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t
), idx
, &ce
); idx
++)
16818 if (!integer_zerop (ce
->value
) && !real_zerop (ce
->value
))
16829 namespace selftest
{
16831 /* Helper functions for writing tests of folding trees. */
16833 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16836 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
16839 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
16842 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16843 wrapping WRAPPED_EXPR. */
16846 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
16849 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
16850 ASSERT_NE (wrapped_expr
, result
);
16851 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
16852 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
16855 /* Verify that various arithmetic binary operations are folded
16859 test_arithmetic_folding ()
16861 tree type
= integer_type_node
;
16862 tree x
= create_tmp_var_raw (type
, "x");
16863 tree zero
= build_zero_cst (type
);
16864 tree one
= build_int_cst (type
, 1);
16867 /* 1 <-- (0 + 1) */
16868 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
16870 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
16873 /* (nonlvalue)x <-- (x + 0) */
16874 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
16878 /* 0 <-- (x - x) */
16879 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
16881 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
16884 /* Multiplication. */
16885 /* 0 <-- (x * 0) */
16886 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
16889 /* (nonlvalue)x <-- (x * 1) */
16890 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
16894 /* Verify that various binary operations on vectors are folded
16898 test_vector_folding ()
16900 tree inner_type
= integer_type_node
;
16901 tree type
= build_vector_type (inner_type
, 4);
16902 tree zero
= build_zero_cst (type
);
16903 tree one
= build_one_cst (type
);
16904 tree index
= build_index_vector (type
, 0, 1);
16906 /* Verify equality tests that return a scalar boolean result. */
16907 tree res_type
= boolean_type_node
;
16908 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
16909 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
16910 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
16911 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
16912 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, index
, one
)));
16913 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
16915 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
,
16917 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
16921 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16924 test_vec_duplicate_folding ()
16926 scalar_int_mode int_mode
= SCALAR_INT_TYPE_MODE (ssizetype
);
16927 machine_mode vec_mode
= targetm
.vectorize
.preferred_simd_mode (int_mode
);
16928 /* This will be 1 if VEC_MODE isn't a vector mode. */
16929 poly_uint64 nunits
= GET_MODE_NUNITS (vec_mode
);
16931 tree type
= build_vector_type (ssizetype
, nunits
);
16932 tree dup5_expr
= fold_unary (VEC_DUPLICATE_EXPR
, type
, ssize_int (5));
16933 tree dup5_cst
= build_vector_from_val (type
, ssize_int (5));
16934 ASSERT_TRUE (operand_equal_p (dup5_expr
, dup5_cst
, 0));
16937 /* Run all of the selftests within this file. */
16940 fold_const_cc_tests ()
16942 test_arithmetic_folding ();
16943 test_vector_folding ();
16944 test_vec_duplicate_folding ();
16947 } // namespace selftest
16949 #endif /* CHECKING_P */