1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
80 #include "tree-ssanames.h"
82 #include "stringpool.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
93 int folding_initializer
= 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr
= false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code
{
123 static bool negate_expr_p (tree
);
124 static tree
negate_expr (tree
);
125 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
126 static enum comparison_code
comparison_to_compcode (enum tree_code
);
127 static enum tree_code
compcode_to_comparison (enum comparison_code
);
128 static bool twoval_comparison_p (tree
, tree
*, tree
*);
129 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
130 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
132 static bool simple_operand_p (const_tree
);
133 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
134 static tree
range_predecessor (tree
);
135 static tree
range_successor (tree
);
136 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_cond_expr_with_comparison (location_t
, tree
, enum tree_code
,
138 tree
, tree
, tree
, tree
);
139 static tree
unextend (tree
, int, int, tree
);
140 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
141 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
142 static tree
fold_binary_op_with_conditional_arg (location_t
,
143 enum tree_code
, tree
,
146 static tree
fold_negate_const (tree
, tree
);
147 static tree
fold_not_const (const_tree
, tree
);
148 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
149 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
150 static tree
fold_view_convert_expr (tree
, tree
);
151 static tree
fold_negate_expr (location_t
, tree
);
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
158 expr_location_or (tree t
, location_t loc
)
160 location_t tloc
= EXPR_LOCATION (t
);
161 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
168 protected_set_expr_location_unshare (tree x
, location_t loc
)
170 if (CAN_HAVE_LOCATION_P (x
)
171 && EXPR_LOCATION (x
) != loc
172 && !(TREE_CODE (x
) == SAVE_EXPR
173 || TREE_CODE (x
) == TARGET_EXPR
174 || TREE_CODE (x
) == BIND_EXPR
))
177 SET_EXPR_LOCATION (x
, loc
);
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
187 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
191 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
193 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
207 static int fold_deferring_overflow_warnings
;
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
214 static const char* fold_deferred_overflow_warning
;
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
219 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
225 fold_defer_overflow_warnings (void)
227 ++fold_deferring_overflow_warnings
;
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
240 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
245 gcc_assert (fold_deferring_overflow_warnings
> 0);
246 --fold_deferring_overflow_warnings
;
247 if (fold_deferring_overflow_warnings
> 0)
249 if (fold_deferred_overflow_warning
!= NULL
251 && code
< (int) fold_deferred_overflow_code
)
252 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
256 warnmsg
= fold_deferred_overflow_warning
;
257 fold_deferred_overflow_warning
= NULL
;
259 if (!issue
|| warnmsg
== NULL
)
262 if (warning_suppressed_p (stmt
, OPT_Wstrict_overflow
))
265 /* Use the smallest code level when deciding to issue the
267 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
268 code
= fold_deferred_overflow_code
;
270 if (!issue_strict_overflow_warning (code
))
274 locus
= input_location
;
276 locus
= gimple_location (stmt
);
277 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
280 /* Stop deferring overflow warnings, ignoring any deferred
284 fold_undefer_and_ignore_overflow_warnings (void)
286 fold_undefer_overflow_warnings (false, NULL
, 0);
289 /* Whether we are deferring overflow warnings. */
292 fold_deferring_overflow_warnings_p (void)
294 return fold_deferring_overflow_warnings
> 0;
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
301 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
303 if (fold_deferring_overflow_warnings
> 0)
305 if (fold_deferred_overflow_warning
== NULL
306 || wc
< fold_deferred_overflow_code
)
308 fold_deferred_overflow_warning
= gmsgid
;
309 fold_deferred_overflow_code
= wc
;
312 else if (issue_strict_overflow_warning (wc
))
313 warning (OPT_Wstrict_overflow
, gmsgid
);
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
320 negate_mathfn_p (combined_fn fn
)
360 CASE_CFN_ROUNDEVEN_FN
:
378 CASE_CFN_NEARBYINT_FN
:
381 return !flag_rounding_math
;
389 /* Check whether we may negate an integer constant T without causing
393 may_negate_without_overflow_p (const_tree t
)
397 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
399 type
= TREE_TYPE (t
);
400 if (TYPE_UNSIGNED (type
))
403 return !wi::only_sign_bit_p (wi::to_wide (t
));
406 /* Determine whether an expression T can be cheaply negated using
407 the function negate_expr without introducing undefined overflow. */
410 negate_expr_p (tree t
)
417 type
= TREE_TYPE (t
);
420 switch (TREE_CODE (t
))
423 if (INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
))
426 /* Check that -CST will not overflow type. */
427 return may_negate_without_overflow_p (t
);
429 return (INTEGRAL_TYPE_P (type
)
430 && TYPE_OVERFLOW_WRAPS (type
));
436 return !TYPE_OVERFLOW_SANITIZED (type
);
439 /* We want to canonicalize to positive real constants. Pretend
440 that only negative ones can be easily negated. */
441 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
444 return negate_expr_p (TREE_REALPART (t
))
445 && negate_expr_p (TREE_IMAGPART (t
));
449 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
452 /* Steps don't prevent negation. */
453 unsigned int count
= vector_cst_encoded_nelts (t
);
454 for (unsigned int i
= 0; i
< count
; ++i
)
455 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t
, i
)))
462 return negate_expr_p (TREE_OPERAND (t
, 0))
463 && negate_expr_p (TREE_OPERAND (t
, 1));
466 return negate_expr_p (TREE_OPERAND (t
, 0));
469 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
)
470 || HONOR_SIGNED_ZEROS (type
)
471 || (ANY_INTEGRAL_TYPE_P (type
)
472 && ! TYPE_OVERFLOW_WRAPS (type
)))
474 /* -(A + B) -> (-B) - A. */
475 if (negate_expr_p (TREE_OPERAND (t
, 1)))
477 /* -(A + B) -> (-A) - B. */
478 return negate_expr_p (TREE_OPERAND (t
, 0));
481 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
482 return !HONOR_SIGN_DEPENDENT_ROUNDING (type
)
483 && !HONOR_SIGNED_ZEROS (type
)
484 && (! ANY_INTEGRAL_TYPE_P (type
)
485 || TYPE_OVERFLOW_WRAPS (type
));
488 if (TYPE_UNSIGNED (type
))
490 /* INT_MIN/n * n doesn't overflow while negating one operand it does
491 if n is a (negative) power of two. */
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
493 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
494 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
496 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 0))))) != 1)
497 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
499 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 1))))) != 1)))
505 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t
))
506 return negate_expr_p (TREE_OPERAND (t
, 1))
507 || negate_expr_p (TREE_OPERAND (t
, 0));
513 if (TYPE_UNSIGNED (type
))
515 /* In general we can't negate A in A / B, because if A is INT_MIN and
516 B is not 1 we change the sign of the result. */
517 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
518 && negate_expr_p (TREE_OPERAND (t
, 0)))
520 /* In general we can't negate B in A / B, because if A is INT_MIN and
521 B is 1, we may turn this into INT_MIN / -1 which is undefined
522 and actually traps on some architectures. */
523 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
524 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
525 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
526 && ! integer_onep (TREE_OPERAND (t
, 1))))
527 return negate_expr_p (TREE_OPERAND (t
, 1));
531 /* Negate -((double)float) as (double)(-float). */
532 if (TREE_CODE (type
) == REAL_TYPE
)
534 tree tem
= strip_float_extensions (t
);
536 return negate_expr_p (tem
);
541 /* Negate -f(x) as f(-x). */
542 if (negate_mathfn_p (get_call_combined_fn (t
)))
543 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
547 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
548 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
550 tree op1
= TREE_OPERAND (t
, 1);
551 if (wi::to_wide (op1
) == element_precision (type
) - 1)
562 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
563 simplification is possible.
564 If negate_expr_p would return true for T, NULL_TREE will never be
568 fold_negate_expr_1 (location_t loc
, tree t
)
570 tree type
= TREE_TYPE (t
);
573 switch (TREE_CODE (t
))
575 /* Convert - (~A) to A + 1. */
577 if (INTEGRAL_TYPE_P (type
))
578 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
579 build_one_cst (type
));
583 tem
= fold_negate_const (t
, type
);
584 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
585 || (ANY_INTEGRAL_TYPE_P (type
)
586 && !TYPE_OVERFLOW_TRAPS (type
)
587 && TYPE_OVERFLOW_WRAPS (type
))
588 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
595 tem
= fold_negate_const (t
, type
);
600 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
601 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
603 return build_complex (type
, rpart
, ipart
);
609 tree_vector_builder elts
;
610 elts
.new_unary_operation (type
, t
, true);
611 unsigned int count
= elts
.encoded_nelts ();
612 for (unsigned int i
= 0; i
< count
; ++i
)
614 tree elt
= fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
615 if (elt
== NULL_TREE
)
617 elts
.quick_push (elt
);
620 return elts
.build ();
624 if (negate_expr_p (t
))
625 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
626 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
627 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
631 if (negate_expr_p (t
))
632 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
633 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
637 if (!TYPE_OVERFLOW_SANITIZED (type
))
638 return TREE_OPERAND (t
, 0);
642 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type
)
643 && !HONOR_SIGNED_ZEROS (type
))
645 /* -(A + B) -> (-B) - A. */
646 if (negate_expr_p (TREE_OPERAND (t
, 1)))
648 tem
= negate_expr (TREE_OPERAND (t
, 1));
649 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
650 tem
, TREE_OPERAND (t
, 0));
653 /* -(A + B) -> (-A) - B. */
654 if (negate_expr_p (TREE_OPERAND (t
, 0)))
656 tem
= negate_expr (TREE_OPERAND (t
, 0));
657 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
658 tem
, TREE_OPERAND (t
, 1));
664 /* - (A - B) -> B - A */
665 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type
)
666 && !HONOR_SIGNED_ZEROS (type
))
667 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
668 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
672 if (TYPE_UNSIGNED (type
))
678 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type
))
680 tem
= TREE_OPERAND (t
, 1);
681 if (negate_expr_p (tem
))
682 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
683 TREE_OPERAND (t
, 0), negate_expr (tem
));
684 tem
= TREE_OPERAND (t
, 0);
685 if (negate_expr_p (tem
))
686 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
687 negate_expr (tem
), TREE_OPERAND (t
, 1));
694 if (TYPE_UNSIGNED (type
))
696 /* In general we can't negate A in A / B, because if A is INT_MIN and
697 B is not 1 we change the sign of the result. */
698 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
699 && negate_expr_p (TREE_OPERAND (t
, 0)))
700 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
701 negate_expr (TREE_OPERAND (t
, 0)),
702 TREE_OPERAND (t
, 1));
703 /* In general we can't negate B in A / B, because if A is INT_MIN and
704 B is 1, we may turn this into INT_MIN / -1 which is undefined
705 and actually traps on some architectures. */
706 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
707 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
708 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
709 && ! integer_onep (TREE_OPERAND (t
, 1))))
710 && negate_expr_p (TREE_OPERAND (t
, 1)))
711 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
713 negate_expr (TREE_OPERAND (t
, 1)));
717 /* Convert -((double)float) into (double)(-float). */
718 if (TREE_CODE (type
) == REAL_TYPE
)
720 tem
= strip_float_extensions (t
);
721 if (tem
!= t
&& negate_expr_p (tem
))
722 return fold_convert_loc (loc
, type
, negate_expr (tem
));
727 /* Negate -f(x) as f(-x). */
728 if (negate_mathfn_p (get_call_combined_fn (t
))
729 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
733 fndecl
= get_callee_fndecl (t
);
734 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
735 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
740 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
741 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
743 tree op1
= TREE_OPERAND (t
, 1);
744 if (wi::to_wide (op1
) == element_precision (type
) - 1)
746 tree ntype
= TYPE_UNSIGNED (type
)
747 ? signed_type_for (type
)
748 : unsigned_type_for (type
);
749 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
750 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
751 return fold_convert_loc (loc
, type
, temp
);
763 /* A wrapper for fold_negate_expr_1. */
766 fold_negate_expr (location_t loc
, tree t
)
768 tree type
= TREE_TYPE (t
);
770 tree tem
= fold_negate_expr_1 (loc
, t
);
771 if (tem
== NULL_TREE
)
773 return fold_convert_loc (loc
, type
, tem
);
776 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
777 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
789 loc
= EXPR_LOCATION (t
);
790 type
= TREE_TYPE (t
);
793 tem
= fold_negate_expr (loc
, t
);
795 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
796 return fold_convert_loc (loc
, type
, tem
);
799 /* Split a tree IN into a constant, literal and variable parts that could be
800 combined with CODE to make IN. "constant" means an expression with
801 TREE_CONSTANT but that isn't an actual constant. CODE must be a
802 commutative arithmetic operation. Store the constant part into *CONP,
803 the literal in *LITP and return the variable part. If a part isn't
804 present, set it to null. If the tree does not decompose in this way,
805 return the entire tree as the variable part and the other parts as null.
807 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
808 case, we negate an operand that was subtracted. Except if it is a
809 literal for which we use *MINUS_LITP instead.
811 If NEGATE_P is true, we are negating all of IN, again except a literal
812 for which we use *MINUS_LITP instead. If a variable part is of pointer
813 type, it is negated after converting to TYPE. This prevents us from
814 generating illegal MINUS pointer expression. LOC is the location of
815 the converted variable part.
817 If IN is itself a literal or constant, return it as appropriate.
819 Note that we do not guarantee that any of the three values will be the
820 same type as IN, but they will have the same signedness and mode. */
823 split_tree (tree in
, tree type
, enum tree_code code
,
824 tree
*minus_varp
, tree
*conp
, tree
*minus_conp
,
825 tree
*litp
, tree
*minus_litp
, int negate_p
)
834 /* Strip any conversions that don't change the machine mode or signedness. */
835 STRIP_SIGN_NOPS (in
);
837 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
838 || TREE_CODE (in
) == FIXED_CST
)
840 else if (TREE_CODE (in
) == code
841 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
842 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
843 /* We can associate addition and subtraction together (even
844 though the C standard doesn't say so) for integers because
845 the value is not affected. For reals, the value might be
846 affected, so we can't. */
847 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == POINTER_PLUS_EXPR
)
848 || (code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
849 || (code
== MINUS_EXPR
850 && (TREE_CODE (in
) == PLUS_EXPR
851 || TREE_CODE (in
) == POINTER_PLUS_EXPR
)))))
853 tree op0
= TREE_OPERAND (in
, 0);
854 tree op1
= TREE_OPERAND (in
, 1);
855 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
856 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
858 /* First see if either of the operands is a literal, then a constant. */
859 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
860 || TREE_CODE (op0
) == FIXED_CST
)
861 *litp
= op0
, op0
= 0;
862 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
863 || TREE_CODE (op1
) == FIXED_CST
)
864 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
866 if (op0
!= 0 && TREE_CONSTANT (op0
))
867 *conp
= op0
, op0
= 0;
868 else if (op1
!= 0 && TREE_CONSTANT (op1
))
869 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
871 /* If we haven't dealt with either operand, this is not a case we can
872 decompose. Otherwise, VAR is either of the ones remaining, if any. */
873 if (op0
!= 0 && op1
!= 0)
878 var
= op1
, neg_var_p
= neg1_p
;
880 /* Now do any needed negations. */
882 *minus_litp
= *litp
, *litp
= 0;
883 if (neg_conp_p
&& *conp
)
884 *minus_conp
= *conp
, *conp
= 0;
885 if (neg_var_p
&& var
)
886 *minus_varp
= var
, var
= 0;
888 else if (TREE_CONSTANT (in
))
890 else if (TREE_CODE (in
) == BIT_NOT_EXPR
891 && code
== PLUS_EXPR
)
893 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
894 when IN is constant. */
895 *litp
= build_minus_one_cst (type
);
896 *minus_varp
= TREE_OPERAND (in
, 0);
904 *minus_litp
= *litp
, *litp
= 0;
905 else if (*minus_litp
)
906 *litp
= *minus_litp
, *minus_litp
= 0;
908 *minus_conp
= *conp
, *conp
= 0;
909 else if (*minus_conp
)
910 *conp
= *minus_conp
, *minus_conp
= 0;
912 *minus_varp
= var
, var
= 0;
913 else if (*minus_varp
)
914 var
= *minus_varp
, *minus_varp
= 0;
918 && TREE_OVERFLOW_P (*litp
))
919 *litp
= drop_tree_overflow (*litp
);
921 && TREE_OVERFLOW_P (*minus_litp
))
922 *minus_litp
= drop_tree_overflow (*minus_litp
);
927 /* Re-associate trees split by the above function. T1 and T2 are
928 either expressions to associate or null. Return the new
929 expression, if any. LOC is the location of the new expression. If
930 we build an operation, do it in TYPE and with CODE. */
933 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
937 gcc_assert (t2
== 0 || code
!= MINUS_EXPR
);
943 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
944 try to fold this since we will have infinite recursion. But do
945 deal with any NEGATE_EXPRs. */
946 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
947 || TREE_CODE (t1
) == PLUS_EXPR
|| TREE_CODE (t2
) == PLUS_EXPR
948 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
950 if (code
== PLUS_EXPR
)
952 if (TREE_CODE (t1
) == NEGATE_EXPR
)
953 return build2_loc (loc
, MINUS_EXPR
, type
,
954 fold_convert_loc (loc
, type
, t2
),
955 fold_convert_loc (loc
, type
,
956 TREE_OPERAND (t1
, 0)));
957 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
958 return build2_loc (loc
, MINUS_EXPR
, type
,
959 fold_convert_loc (loc
, type
, t1
),
960 fold_convert_loc (loc
, type
,
961 TREE_OPERAND (t2
, 0)));
962 else if (integer_zerop (t2
))
963 return fold_convert_loc (loc
, type
, t1
);
965 else if (code
== MINUS_EXPR
)
967 if (integer_zerop (t2
))
968 return fold_convert_loc (loc
, type
, t1
);
971 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
972 fold_convert_loc (loc
, type
, t2
));
975 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
976 fold_convert_loc (loc
, type
, t2
));
979 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
980 for use in int_const_binop, size_binop and size_diffop. */
983 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
985 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
987 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
1002 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1003 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1004 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1007 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1008 a new constant in RES. Return FALSE if we don't know how to
1009 evaluate CODE at compile-time. */
1012 wide_int_binop (wide_int
&res
,
1013 enum tree_code code
, const wide_int
&arg1
, const wide_int
&arg2
,
1014 signop sign
, wi::overflow_type
*overflow
)
1017 *overflow
= wi::OVF_NONE
;
1021 res
= wi::bit_or (arg1
, arg2
);
1025 res
= wi::bit_xor (arg1
, arg2
);
1029 res
= wi::bit_and (arg1
, arg2
);
1033 if (wi::neg_p (arg2
))
1035 res
= wi::lshift (arg1
, arg2
);
1039 if (wi::neg_p (arg2
))
1041 /* It's unclear from the C standard whether shifts can overflow.
1042 The following code ignores overflow; perhaps a C standard
1043 interpretation ruling is needed. */
1044 res
= wi::rshift (arg1
, arg2
, sign
);
1049 if (wi::neg_p (arg2
))
1052 if (code
== RROTATE_EXPR
)
1053 code
= LROTATE_EXPR
;
1055 code
= RROTATE_EXPR
;
1060 if (code
== RROTATE_EXPR
)
1061 res
= wi::rrotate (arg1
, tmp
);
1063 res
= wi::lrotate (arg1
, tmp
);
1067 res
= wi::add (arg1
, arg2
, sign
, overflow
);
1071 res
= wi::sub (arg1
, arg2
, sign
, overflow
);
1075 res
= wi::mul (arg1
, arg2
, sign
, overflow
);
1078 case MULT_HIGHPART_EXPR
:
1079 res
= wi::mul_high (arg1
, arg2
, sign
);
1082 case TRUNC_DIV_EXPR
:
1083 case EXACT_DIV_EXPR
:
1086 res
= wi::div_trunc (arg1
, arg2
, sign
, overflow
);
1089 case FLOOR_DIV_EXPR
:
1092 res
= wi::div_floor (arg1
, arg2
, sign
, overflow
);
1098 res
= wi::div_ceil (arg1
, arg2
, sign
, overflow
);
1101 case ROUND_DIV_EXPR
:
1104 res
= wi::div_round (arg1
, arg2
, sign
, overflow
);
1107 case TRUNC_MOD_EXPR
:
1110 res
= wi::mod_trunc (arg1
, arg2
, sign
, overflow
);
1113 case FLOOR_MOD_EXPR
:
1116 res
= wi::mod_floor (arg1
, arg2
, sign
, overflow
);
1122 res
= wi::mod_ceil (arg1
, arg2
, sign
, overflow
);
1125 case ROUND_MOD_EXPR
:
1128 res
= wi::mod_round (arg1
, arg2
, sign
, overflow
);
1132 res
= wi::min (arg1
, arg2
, sign
);
1136 res
= wi::max (arg1
, arg2
, sign
);
1145 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1146 produce a new constant in RES. Return FALSE if we don't know how
1147 to evaluate CODE at compile-time. */
1150 poly_int_binop (poly_wide_int
&res
, enum tree_code code
,
1151 const_tree arg1
, const_tree arg2
,
1152 signop sign
, wi::overflow_type
*overflow
)
1154 gcc_assert (NUM_POLY_INT_COEFFS
!= 1);
1155 gcc_assert (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
));
1159 res
= wi::add (wi::to_poly_wide (arg1
),
1160 wi::to_poly_wide (arg2
), sign
, overflow
);
1164 res
= wi::sub (wi::to_poly_wide (arg1
),
1165 wi::to_poly_wide (arg2
), sign
, overflow
);
1169 if (TREE_CODE (arg2
) == INTEGER_CST
)
1170 res
= wi::mul (wi::to_poly_wide (arg1
),
1171 wi::to_wide (arg2
), sign
, overflow
);
1172 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1173 res
= wi::mul (wi::to_poly_wide (arg2
),
1174 wi::to_wide (arg1
), sign
, overflow
);
1180 if (TREE_CODE (arg2
) == INTEGER_CST
)
1181 res
= wi::to_poly_wide (arg1
) << wi::to_wide (arg2
);
1187 if (TREE_CODE (arg2
) != INTEGER_CST
1188 || !can_ior_p (wi::to_poly_wide (arg1
), wi::to_wide (arg2
),
1199 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1200 produce a new constant. Return NULL_TREE if we don't know how to
1201 evaluate CODE at compile-time. */
1204 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
,
1207 poly_wide_int poly_res
;
1208 tree type
= TREE_TYPE (arg1
);
1209 signop sign
= TYPE_SIGN (type
);
1210 wi::overflow_type overflow
= wi::OVF_NONE
;
1212 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1214 wide_int warg1
= wi::to_wide (arg1
), res
;
1215 wide_int warg2
= wi::to_wide (arg2
, TYPE_PRECISION (type
));
1216 if (!wide_int_binop (res
, code
, warg1
, warg2
, sign
, &overflow
))
1220 else if (!poly_int_tree_p (arg1
)
1221 || !poly_int_tree_p (arg2
)
1222 || !poly_int_binop (poly_res
, code
, arg1
, arg2
, sign
, &overflow
))
1224 return force_fit_type (type
, poly_res
, overflowable
,
1225 (((sign
== SIGNED
|| overflowable
== -1)
1227 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
)));
1230 /* Return true if binary operation OP distributes over addition in operand
1231 OPNO, with the other operand being held constant. OPNO counts from 1. */
1234 distributes_over_addition_p (tree_code op
, int opno
)
1251 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1252 constant. We assume ARG1 and ARG2 have the same data type, or at least
1253 are the same kind of constant and the same machine mode. Return zero if
1254 combining the constants is not allowed in the current operating mode. */
1257 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1259 /* Sanity check for the recursive cases. */
1266 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1268 if (code
== POINTER_PLUS_EXPR
)
1269 return int_const_binop (PLUS_EXPR
,
1270 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1272 return int_const_binop (code
, arg1
, arg2
);
1275 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1280 REAL_VALUE_TYPE value
;
1281 REAL_VALUE_TYPE result
;
1285 /* The following codes are handled by real_arithmetic. */
1300 d1
= TREE_REAL_CST (arg1
);
1301 d2
= TREE_REAL_CST (arg2
);
1303 type
= TREE_TYPE (arg1
);
1304 mode
= TYPE_MODE (type
);
1306 /* Don't perform operation if we honor signaling NaNs and
1307 either operand is a signaling NaN. */
1308 if (HONOR_SNANS (mode
)
1309 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1310 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1313 /* Don't perform operation if it would raise a division
1314 by zero exception. */
1315 if (code
== RDIV_EXPR
1316 && real_equal (&d2
, &dconst0
)
1317 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1320 /* If either operand is a NaN, just return it. Otherwise, set up
1321 for floating-point trap; we return an overflow. */
1322 if (REAL_VALUE_ISNAN (d1
))
1324 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1327 t
= build_real (type
, d1
);
1330 else if (REAL_VALUE_ISNAN (d2
))
1332 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1335 t
= build_real (type
, d2
);
1339 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1340 real_convert (&result
, mode
, &value
);
1342 /* Don't constant fold this floating point operation if
1343 both operands are not NaN but the result is NaN, and
1344 flag_trapping_math. Such operations should raise an
1345 invalid operation exception. */
1346 if (flag_trapping_math
1347 && MODE_HAS_NANS (mode
)
1348 && REAL_VALUE_ISNAN (result
)
1349 && !REAL_VALUE_ISNAN (d1
)
1350 && !REAL_VALUE_ISNAN (d2
))
1353 /* Don't constant fold this floating point operation if
1354 the result has overflowed and flag_trapping_math. */
1355 if (flag_trapping_math
1356 && MODE_HAS_INFINITIES (mode
)
1357 && REAL_VALUE_ISINF (result
)
1358 && !REAL_VALUE_ISINF (d1
)
1359 && !REAL_VALUE_ISINF (d2
))
1362 /* Don't constant fold this floating point operation if the
1363 result may dependent upon the run-time rounding mode and
1364 flag_rounding_math is set, or if GCC's software emulation
1365 is unable to accurately represent the result. */
1366 if ((flag_rounding_math
1367 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1368 && (inexact
|| !real_identical (&result
, &value
)))
1371 t
= build_real (type
, result
);
1373 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1377 if (TREE_CODE (arg1
) == FIXED_CST
)
1379 FIXED_VALUE_TYPE f1
;
1380 FIXED_VALUE_TYPE f2
;
1381 FIXED_VALUE_TYPE result
;
1386 /* The following codes are handled by fixed_arithmetic. */
1392 case TRUNC_DIV_EXPR
:
1393 if (TREE_CODE (arg2
) != FIXED_CST
)
1395 f2
= TREE_FIXED_CST (arg2
);
1401 if (TREE_CODE (arg2
) != INTEGER_CST
)
1403 wi::tree_to_wide_ref w2
= wi::to_wide (arg2
);
1404 f2
.data
.high
= w2
.elt (1);
1405 f2
.data
.low
= w2
.ulow ();
1414 f1
= TREE_FIXED_CST (arg1
);
1415 type
= TREE_TYPE (arg1
);
1416 sat_p
= TYPE_SATURATING (type
);
1417 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1418 t
= build_fixed (type
, result
);
1419 /* Propagate overflow flags. */
1420 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1421 TREE_OVERFLOW (t
) = 1;
1425 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1427 tree type
= TREE_TYPE (arg1
);
1428 tree r1
= TREE_REALPART (arg1
);
1429 tree i1
= TREE_IMAGPART (arg1
);
1430 tree r2
= TREE_REALPART (arg2
);
1431 tree i2
= TREE_IMAGPART (arg2
);
1438 real
= const_binop (code
, r1
, r2
);
1439 imag
= const_binop (code
, i1
, i2
);
1443 if (COMPLEX_FLOAT_TYPE_P (type
))
1444 return do_mpc_arg2 (arg1
, arg2
, type
,
1445 /* do_nonfinite= */ folding_initializer
,
1448 real
= const_binop (MINUS_EXPR
,
1449 const_binop (MULT_EXPR
, r1
, r2
),
1450 const_binop (MULT_EXPR
, i1
, i2
));
1451 imag
= const_binop (PLUS_EXPR
,
1452 const_binop (MULT_EXPR
, r1
, i2
),
1453 const_binop (MULT_EXPR
, i1
, r2
));
1457 if (COMPLEX_FLOAT_TYPE_P (type
))
1458 return do_mpc_arg2 (arg1
, arg2
, type
,
1459 /* do_nonfinite= */ folding_initializer
,
1462 case TRUNC_DIV_EXPR
:
1464 case FLOOR_DIV_EXPR
:
1465 case ROUND_DIV_EXPR
:
1466 if (flag_complex_method
== 0)
1468 /* Keep this algorithm in sync with
1469 tree-complex.cc:expand_complex_div_straight().
1471 Expand complex division to scalars, straightforward algorithm.
1472 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1476 = const_binop (PLUS_EXPR
,
1477 const_binop (MULT_EXPR
, r2
, r2
),
1478 const_binop (MULT_EXPR
, i2
, i2
));
1480 = const_binop (PLUS_EXPR
,
1481 const_binop (MULT_EXPR
, r1
, r2
),
1482 const_binop (MULT_EXPR
, i1
, i2
));
1484 = const_binop (MINUS_EXPR
,
1485 const_binop (MULT_EXPR
, i1
, r2
),
1486 const_binop (MULT_EXPR
, r1
, i2
));
1488 real
= const_binop (code
, t1
, magsquared
);
1489 imag
= const_binop (code
, t2
, magsquared
);
1493 /* Keep this algorithm in sync with
1494 tree-complex.cc:expand_complex_div_wide().
1496 Expand complex division to scalars, modified algorithm to minimize
1497 overflow with wide input ranges. */
1498 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1499 fold_abs_const (r2
, TREE_TYPE (type
)),
1500 fold_abs_const (i2
, TREE_TYPE (type
)));
1502 if (integer_nonzerop (compare
))
1504 /* In the TRUE branch, we compute
1506 div = (br * ratio) + bi;
1507 tr = (ar * ratio) + ai;
1508 ti = (ai * ratio) - ar;
1511 tree ratio
= const_binop (code
, r2
, i2
);
1512 tree div
= const_binop (PLUS_EXPR
, i2
,
1513 const_binop (MULT_EXPR
, r2
, ratio
));
1514 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1515 real
= const_binop (PLUS_EXPR
, real
, i1
);
1516 real
= const_binop (code
, real
, div
);
1518 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1519 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1520 imag
= const_binop (code
, imag
, div
);
1524 /* In the FALSE branch, we compute
1526 divisor = (d * ratio) + c;
1527 tr = (b * ratio) + a;
1528 ti = b - (a * ratio);
1531 tree ratio
= const_binop (code
, i2
, r2
);
1532 tree div
= const_binop (PLUS_EXPR
, r2
,
1533 const_binop (MULT_EXPR
, i2
, ratio
));
1535 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1536 real
= const_binop (PLUS_EXPR
, real
, r1
);
1537 real
= const_binop (code
, real
, div
);
1539 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1540 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1541 imag
= const_binop (code
, imag
, div
);
1551 return build_complex (type
, real
, imag
);
1554 if (TREE_CODE (arg1
) == VECTOR_CST
1555 && TREE_CODE (arg2
) == VECTOR_CST
1556 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)),
1557 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
))))
1559 tree type
= TREE_TYPE (arg1
);
1561 if (VECTOR_CST_STEPPED_P (arg1
)
1562 && VECTOR_CST_STEPPED_P (arg2
))
1563 /* We can operate directly on the encoding if:
1565 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1567 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1569 Addition and subtraction are the supported operators
1570 for which this is true. */
1571 step_ok_p
= (code
== PLUS_EXPR
|| code
== MINUS_EXPR
);
1572 else if (VECTOR_CST_STEPPED_P (arg1
))
1573 /* We can operate directly on stepped encodings if:
1577 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1579 which is true if (x -> x op c) distributes over addition. */
1580 step_ok_p
= distributes_over_addition_p (code
, 1);
1582 /* Similarly in reverse. */
1583 step_ok_p
= distributes_over_addition_p (code
, 2);
1584 tree_vector_builder elts
;
1585 if (!elts
.new_binary_operation (type
, arg1
, arg2
, step_ok_p
))
1587 unsigned int count
= elts
.encoded_nelts ();
1588 for (unsigned int i
= 0; i
< count
; ++i
)
1590 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1591 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1593 tree elt
= const_binop (code
, elem1
, elem2
);
1595 /* It is possible that const_binop cannot handle the given
1596 code and return NULL_TREE */
1597 if (elt
== NULL_TREE
)
1599 elts
.quick_push (elt
);
1602 return elts
.build ();
1605 /* Shifts allow a scalar offset for a vector. */
1606 if (TREE_CODE (arg1
) == VECTOR_CST
1607 && TREE_CODE (arg2
) == INTEGER_CST
)
1609 tree type
= TREE_TYPE (arg1
);
1610 bool step_ok_p
= distributes_over_addition_p (code
, 1);
1611 tree_vector_builder elts
;
1612 if (!elts
.new_unary_operation (type
, arg1
, step_ok_p
))
1614 unsigned int count
= elts
.encoded_nelts ();
1615 for (unsigned int i
= 0; i
< count
; ++i
)
1617 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1619 tree elt
= const_binop (code
, elem1
, arg2
);
1621 /* It is possible that const_binop cannot handle the given
1622 code and return NULL_TREE. */
1623 if (elt
== NULL_TREE
)
1625 elts
.quick_push (elt
);
1628 return elts
.build ();
1633 /* Overload that adds a TYPE parameter to be able to dispatch
1634 to fold_relational_const. */
1637 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1639 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1640 return fold_relational_const (code
, type
, arg1
, arg2
);
1642 /* ??? Until we make the const_binop worker take the type of the
1643 result as argument put those cases that need it here. */
1646 case VEC_SERIES_EXPR
:
1647 if (CONSTANT_CLASS_P (arg1
)
1648 && CONSTANT_CLASS_P (arg2
))
1649 return build_vec_series (type
, arg1
, arg2
);
1653 if ((TREE_CODE (arg1
) == REAL_CST
1654 && TREE_CODE (arg2
) == REAL_CST
)
1655 || (TREE_CODE (arg1
) == INTEGER_CST
1656 && TREE_CODE (arg2
) == INTEGER_CST
))
1657 return build_complex (type
, arg1
, arg2
);
1660 case POINTER_DIFF_EXPR
:
1661 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1663 poly_offset_int res
= (wi::to_poly_offset (arg1
)
1664 - wi::to_poly_offset (arg2
));
1665 return force_fit_type (type
, res
, 1,
1666 TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1670 case VEC_PACK_TRUNC_EXPR
:
1671 case VEC_PACK_FIX_TRUNC_EXPR
:
1672 case VEC_PACK_FLOAT_EXPR
:
1674 unsigned int HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1676 if (TREE_CODE (arg1
) != VECTOR_CST
1677 || TREE_CODE (arg2
) != VECTOR_CST
)
1680 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1683 out_nelts
= in_nelts
* 2;
1684 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1685 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1687 tree_vector_builder
elts (type
, out_nelts
, 1);
1688 for (i
= 0; i
< out_nelts
; i
++)
1690 tree elt
= (i
< in_nelts
1691 ? VECTOR_CST_ELT (arg1
, i
)
1692 : VECTOR_CST_ELT (arg2
, i
- in_nelts
));
1693 elt
= fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1695 : code
== VEC_PACK_FLOAT_EXPR
1696 ? FLOAT_EXPR
: FIX_TRUNC_EXPR
,
1697 TREE_TYPE (type
), elt
);
1698 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1700 elts
.quick_push (elt
);
1703 return elts
.build ();
1706 case VEC_WIDEN_MULT_LO_EXPR
:
1707 case VEC_WIDEN_MULT_HI_EXPR
:
1708 case VEC_WIDEN_MULT_EVEN_EXPR
:
1709 case VEC_WIDEN_MULT_ODD_EXPR
:
1711 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, out
, ofs
, scale
;
1713 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1716 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1718 out_nelts
= in_nelts
/ 2;
1719 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1720 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1722 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1723 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? out_nelts
: 0;
1724 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1725 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : out_nelts
;
1726 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1728 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1731 tree_vector_builder
elts (type
, out_nelts
, 1);
1732 for (out
= 0; out
< out_nelts
; out
++)
1734 unsigned int in
= (out
<< scale
) + ofs
;
1735 tree t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1736 VECTOR_CST_ELT (arg1
, in
));
1737 tree t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1738 VECTOR_CST_ELT (arg2
, in
));
1740 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1742 tree elt
= const_binop (MULT_EXPR
, t1
, t2
);
1743 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1745 elts
.quick_push (elt
);
1748 return elts
.build ();
1754 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1757 /* Make sure type and arg0 have the same saturating flag. */
1758 gcc_checking_assert (TYPE_SATURATING (type
)
1759 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1761 return const_binop (code
, arg1
, arg2
);
1764 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1765 Return zero if computing the constants is not possible. */
1768 const_unop (enum tree_code code
, tree type
, tree arg0
)
1770 /* Don't perform the operation, other than NEGATE and ABS, if
1771 flag_signaling_nans is on and the operand is a signaling NaN. */
1772 if (TREE_CODE (arg0
) == REAL_CST
1773 && HONOR_SNANS (arg0
)
1774 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1775 && code
!= NEGATE_EXPR
1777 && code
!= ABSU_EXPR
)
1784 case FIX_TRUNC_EXPR
:
1785 case FIXED_CONVERT_EXPR
:
1786 return fold_convert_const (code
, type
, arg0
);
1788 case ADDR_SPACE_CONVERT_EXPR
:
1789 /* If the source address is 0, and the source address space
1790 cannot have a valid object at 0, fold to dest type null. */
1791 if (integer_zerop (arg0
)
1792 && !(targetm
.addr_space
.zero_address_valid
1793 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1794 return fold_convert_const (code
, type
, arg0
);
1797 case VIEW_CONVERT_EXPR
:
1798 return fold_view_convert_expr (type
, arg0
);
1802 /* Can't call fold_negate_const directly here as that doesn't
1803 handle all cases and we might not be able to negate some
1805 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1806 if (tem
&& CONSTANT_CLASS_P (tem
))
1813 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1814 return fold_abs_const (arg0
, type
);
1818 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1820 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1822 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1827 if (TREE_CODE (arg0
) == INTEGER_CST
)
1828 return fold_not_const (arg0
, type
);
1829 else if (POLY_INT_CST_P (arg0
))
1830 return wide_int_to_tree (type
, -poly_int_cst_value (arg0
));
1831 /* Perform BIT_NOT_EXPR on each element individually. */
1832 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1836 /* This can cope with stepped encodings because ~x == -1 - x. */
1837 tree_vector_builder elements
;
1838 elements
.new_unary_operation (type
, arg0
, true);
1839 unsigned int i
, count
= elements
.encoded_nelts ();
1840 for (i
= 0; i
< count
; ++i
)
1842 elem
= VECTOR_CST_ELT (arg0
, i
);
1843 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1844 if (elem
== NULL_TREE
)
1846 elements
.quick_push (elem
);
1849 return elements
.build ();
1853 case TRUTH_NOT_EXPR
:
1854 if (TREE_CODE (arg0
) == INTEGER_CST
)
1855 return constant_boolean_node (integer_zerop (arg0
), type
);
1859 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1860 return fold_convert (type
, TREE_REALPART (arg0
));
1864 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1865 return fold_convert (type
, TREE_IMAGPART (arg0
));
1868 case VEC_UNPACK_LO_EXPR
:
1869 case VEC_UNPACK_HI_EXPR
:
1870 case VEC_UNPACK_FLOAT_LO_EXPR
:
1871 case VEC_UNPACK_FLOAT_HI_EXPR
:
1872 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
1873 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
1875 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1876 enum tree_code subcode
;
1878 if (TREE_CODE (arg0
) != VECTOR_CST
)
1881 if (!VECTOR_CST_NELTS (arg0
).is_constant (&in_nelts
))
1883 out_nelts
= in_nelts
/ 2;
1884 gcc_assert (known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1886 unsigned int offset
= 0;
1887 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1888 || code
== VEC_UNPACK_FLOAT_LO_EXPR
1889 || code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
))
1892 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1894 else if (code
== VEC_UNPACK_FLOAT_LO_EXPR
1895 || code
== VEC_UNPACK_FLOAT_HI_EXPR
)
1896 subcode
= FLOAT_EXPR
;
1898 subcode
= FIX_TRUNC_EXPR
;
1900 tree_vector_builder
elts (type
, out_nelts
, 1);
1901 for (i
= 0; i
< out_nelts
; i
++)
1903 tree elt
= fold_convert_const (subcode
, TREE_TYPE (type
),
1904 VECTOR_CST_ELT (arg0
, i
+ offset
));
1905 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1907 elts
.quick_push (elt
);
1910 return elts
.build ();
1913 case VEC_DUPLICATE_EXPR
:
1914 if (CONSTANT_CLASS_P (arg0
))
1915 return build_vector_from_val (type
, arg0
);
1925 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1926 indicates which particular sizetype to create. */
1929 size_int_kind (poly_int64 number
, enum size_type_kind kind
)
1931 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1934 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1935 is a tree code. The type of the result is taken from the operands.
1936 Both must be equivalent integer types, ala int_binop_types_match_p.
1937 If the operands are constant, so is the result. */
1940 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1942 tree type
= TREE_TYPE (arg0
);
1944 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1945 return error_mark_node
;
1947 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1950 /* Handle the special case of two poly_int constants faster. */
1951 if (poly_int_tree_p (arg0
) && poly_int_tree_p (arg1
))
1953 /* And some specific cases even faster than that. */
1954 if (code
== PLUS_EXPR
)
1956 if (integer_zerop (arg0
)
1957 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1959 if (integer_zerop (arg1
)
1960 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1963 else if (code
== MINUS_EXPR
)
1965 if (integer_zerop (arg1
)
1966 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1969 else if (code
== MULT_EXPR
)
1971 if (integer_onep (arg0
)
1972 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1976 /* Handle general case of two integer constants. For sizetype
1977 constant calculations we always want to know about overflow,
1978 even in the unsigned case. */
1979 tree res
= int_const_binop (code
, arg0
, arg1
, -1);
1980 if (res
!= NULL_TREE
)
1984 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1987 /* Given two values, either both of sizetype or both of bitsizetype,
1988 compute the difference between the two values. Return the value
1989 in signed type corresponding to the type of the operands. */
1992 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1994 tree type
= TREE_TYPE (arg0
);
1997 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2000 /* If the type is already signed, just do the simple thing. */
2001 if (!TYPE_UNSIGNED (type
))
2002 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
2004 if (type
== sizetype
)
2006 else if (type
== bitsizetype
)
2007 ctype
= sbitsizetype
;
2009 ctype
= signed_type_for (type
);
2011 /* If either operand is not a constant, do the conversions to the signed
2012 type and subtract. The hardware will do the right thing with any
2013 overflow in the subtraction. */
2014 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2015 return size_binop_loc (loc
, MINUS_EXPR
,
2016 fold_convert_loc (loc
, ctype
, arg0
),
2017 fold_convert_loc (loc
, ctype
, arg1
));
2019 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2020 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2021 overflow) and negate (which can't either). Special-case a result
2022 of zero while we're here. */
2023 if (tree_int_cst_equal (arg0
, arg1
))
2024 return build_int_cst (ctype
, 0);
2025 else if (tree_int_cst_lt (arg1
, arg0
))
2026 return fold_convert_loc (loc
, ctype
,
2027 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
2029 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
2030 fold_convert_loc (loc
, ctype
,
2031 size_binop_loc (loc
,
2036 /* A subroutine of fold_convert_const handling conversions of an
2037 INTEGER_CST to another integer type. */
2040 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2042 /* Given an integer constant, make new constant with new type,
2043 appropriately sign-extended or truncated. Use widest_int
2044 so that any extension is done according ARG1's type. */
2045 return force_fit_type (type
, wi::to_widest (arg1
),
2046 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
2047 TREE_OVERFLOW (arg1
));
2050 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2051 to an integer type. */
2054 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2056 bool overflow
= false;
2059 /* The following code implements the floating point to integer
2060 conversion rules required by the Java Language Specification,
2061 that IEEE NaNs are mapped to zero and values that overflow
2062 the target precision saturate, i.e. values greater than
2063 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2064 are mapped to INT_MIN. These semantics are allowed by the
2065 C and C++ standards that simply state that the behavior of
2066 FP-to-integer conversion is unspecified upon overflow. */
2070 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2074 case FIX_TRUNC_EXPR
:
2075 real_trunc (&r
, VOIDmode
, &x
);
2082 /* If R is NaN, return zero and show we have an overflow. */
2083 if (REAL_VALUE_ISNAN (r
))
2086 val
= wi::zero (TYPE_PRECISION (type
));
2089 /* See if R is less than the lower bound or greater than the
2094 tree lt
= TYPE_MIN_VALUE (type
);
2095 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2096 if (real_less (&r
, &l
))
2099 val
= wi::to_wide (lt
);
2105 tree ut
= TYPE_MAX_VALUE (type
);
2108 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2109 if (real_less (&u
, &r
))
2112 val
= wi::to_wide (ut
);
2118 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
2120 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
2124 /* A subroutine of fold_convert_const handling conversions of a
2125 FIXED_CST to an integer type. */
2128 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2131 double_int temp
, temp_trunc
;
2134 /* Right shift FIXED_CST to temp by fbit. */
2135 temp
= TREE_FIXED_CST (arg1
).data
;
2136 mode
= TREE_FIXED_CST (arg1
).mode
;
2137 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
2139 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
2140 HOST_BITS_PER_DOUBLE_INT
,
2141 SIGNED_FIXED_POINT_MODE_P (mode
));
2143 /* Left shift temp to temp_trunc by fbit. */
2144 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
2145 HOST_BITS_PER_DOUBLE_INT
,
2146 SIGNED_FIXED_POINT_MODE_P (mode
));
2150 temp
= double_int_zero
;
2151 temp_trunc
= double_int_zero
;
2154 /* If FIXED_CST is negative, we need to round the value toward 0.
2155 By checking if the fractional bits are not zero to add 1 to temp. */
2156 if (SIGNED_FIXED_POINT_MODE_P (mode
)
2157 && temp_trunc
.is_negative ()
2158 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
2159 temp
+= double_int_one
;
2161 /* Given a fixed-point constant, make new constant with new type,
2162 appropriately sign-extended or truncated. */
2163 t
= force_fit_type (type
, temp
, -1,
2164 (temp
.is_negative ()
2165 && (TYPE_UNSIGNED (type
)
2166 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2167 | TREE_OVERFLOW (arg1
));
2172 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2173 to another floating point type. */
2176 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2178 REAL_VALUE_TYPE value
;
2181 /* Don't perform the operation if flag_signaling_nans is on
2182 and the operand is a signaling NaN. */
2183 if (HONOR_SNANS (arg1
)
2184 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
2187 /* With flag_rounding_math we should respect the current rounding mode
2188 unless the conversion is exact. */
2189 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1
)
2190 && !exact_real_truncate (TYPE_MODE (type
), &TREE_REAL_CST (arg1
)))
2193 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2194 t
= build_real (type
, value
);
2196 /* If converting an infinity or NAN to a representation that doesn't
2197 have one, set the overflow bit so that we can produce some kind of
2198 error message at the appropriate point if necessary. It's not the
2199 most user-friendly message, but it's better than nothing. */
2200 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2201 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2202 TREE_OVERFLOW (t
) = 1;
2203 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2204 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2205 TREE_OVERFLOW (t
) = 1;
2206 /* Regular overflow, conversion produced an infinity in a mode that
2207 can't represent them. */
2208 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2209 && REAL_VALUE_ISINF (value
)
2210 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2211 TREE_OVERFLOW (t
) = 1;
2213 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2217 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2218 to a floating point type. */
2221 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2223 REAL_VALUE_TYPE value
;
2226 real_convert_from_fixed (&value
, SCALAR_FLOAT_TYPE_MODE (type
),
2227 &TREE_FIXED_CST (arg1
));
2228 t
= build_real (type
, value
);
2230 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2234 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2235 to another fixed-point type. */
2238 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2240 FIXED_VALUE_TYPE value
;
2244 overflow_p
= fixed_convert (&value
, SCALAR_TYPE_MODE (type
),
2245 &TREE_FIXED_CST (arg1
), TYPE_SATURATING (type
));
2246 t
= build_fixed (type
, value
);
2248 /* Propagate overflow flags. */
2249 if (overflow_p
| TREE_OVERFLOW (arg1
))
2250 TREE_OVERFLOW (t
) = 1;
2254 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2255 to a fixed-point type. */
2258 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2260 FIXED_VALUE_TYPE value
;
2265 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2267 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2268 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2269 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2271 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2273 overflow_p
= fixed_convert_from_int (&value
, SCALAR_TYPE_MODE (type
), di
,
2274 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2275 TYPE_SATURATING (type
));
2276 t
= build_fixed (type
, value
);
2278 /* Propagate overflow flags. */
2279 if (overflow_p
| TREE_OVERFLOW (arg1
))
2280 TREE_OVERFLOW (t
) = 1;
2284 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2285 to a fixed-point type. */
2288 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2290 FIXED_VALUE_TYPE value
;
2294 overflow_p
= fixed_convert_from_real (&value
, SCALAR_TYPE_MODE (type
),
2295 &TREE_REAL_CST (arg1
),
2296 TYPE_SATURATING (type
));
2297 t
= build_fixed (type
, value
);
2299 /* Propagate overflow flags. */
2300 if (overflow_p
| TREE_OVERFLOW (arg1
))
2301 TREE_OVERFLOW (t
) = 1;
2305 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2306 type TYPE. If no simplification can be done return NULL_TREE. */
2309 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2311 tree arg_type
= TREE_TYPE (arg1
);
2312 if (arg_type
== type
)
2315 /* We can't widen types, since the runtime value could overflow the
2316 original type before being extended to the new type. */
2317 if (POLY_INT_CST_P (arg1
)
2318 && (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2319 && TYPE_PRECISION (type
) <= TYPE_PRECISION (arg_type
))
2320 return build_poly_int_cst (type
,
2321 poly_wide_int::from (poly_int_cst_value (arg1
),
2322 TYPE_PRECISION (type
),
2323 TYPE_SIGN (arg_type
)));
2325 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2326 || TREE_CODE (type
) == OFFSET_TYPE
)
2328 if (TREE_CODE (arg1
) == INTEGER_CST
)
2329 return fold_convert_const_int_from_int (type
, arg1
);
2330 else if (TREE_CODE (arg1
) == REAL_CST
)
2331 return fold_convert_const_int_from_real (code
, type
, arg1
);
2332 else if (TREE_CODE (arg1
) == FIXED_CST
)
2333 return fold_convert_const_int_from_fixed (type
, arg1
);
2335 else if (TREE_CODE (type
) == REAL_TYPE
)
2337 if (TREE_CODE (arg1
) == INTEGER_CST
)
2339 tree res
= build_real_from_int_cst (type
, arg1
);
2340 /* Avoid the folding if flag_rounding_math is on and the
2341 conversion is not exact. */
2342 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
))
2345 wide_int w
= real_to_integer (&TREE_REAL_CST (res
), &fail
,
2346 TYPE_PRECISION (TREE_TYPE (arg1
)));
2347 if (fail
|| wi::ne_p (w
, wi::to_wide (arg1
)))
2352 else if (TREE_CODE (arg1
) == REAL_CST
)
2353 return fold_convert_const_real_from_real (type
, arg1
);
2354 else if (TREE_CODE (arg1
) == FIXED_CST
)
2355 return fold_convert_const_real_from_fixed (type
, arg1
);
2357 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2359 if (TREE_CODE (arg1
) == FIXED_CST
)
2360 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2361 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2362 return fold_convert_const_fixed_from_int (type
, arg1
);
2363 else if (TREE_CODE (arg1
) == REAL_CST
)
2364 return fold_convert_const_fixed_from_real (type
, arg1
);
2366 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2368 if (TREE_CODE (arg1
) == VECTOR_CST
2369 && known_eq (TYPE_VECTOR_SUBPARTS (type
), VECTOR_CST_NELTS (arg1
)))
2371 tree elttype
= TREE_TYPE (type
);
2372 tree arg1_elttype
= TREE_TYPE (TREE_TYPE (arg1
));
2373 /* We can't handle steps directly when extending, since the
2374 values need to wrap at the original precision first. */
2376 = (INTEGRAL_TYPE_P (elttype
)
2377 && INTEGRAL_TYPE_P (arg1_elttype
)
2378 && TYPE_PRECISION (elttype
) <= TYPE_PRECISION (arg1_elttype
));
2379 tree_vector_builder v
;
2380 if (!v
.new_unary_operation (type
, arg1
, step_ok_p
))
2382 unsigned int len
= v
.encoded_nelts ();
2383 for (unsigned int i
= 0; i
< len
; ++i
)
2385 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2386 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2387 if (cvt
== NULL_TREE
)
2397 /* Construct a vector of zero elements of vector type TYPE. */
2400 build_zero_vector (tree type
)
2404 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2405 return build_vector_from_val (type
, t
);
2408 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2411 fold_convertible_p (const_tree type
, const_tree arg
)
2413 const_tree orig
= TREE_TYPE (arg
);
2418 if (TREE_CODE (arg
) == ERROR_MARK
2419 || TREE_CODE (type
) == ERROR_MARK
2420 || TREE_CODE (orig
) == ERROR_MARK
)
2423 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2426 switch (TREE_CODE (type
))
2428 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2429 case POINTER_TYPE
: case REFERENCE_TYPE
:
2431 return (INTEGRAL_TYPE_P (orig
)
2432 || (POINTER_TYPE_P (orig
)
2433 && TYPE_PRECISION (type
) <= TYPE_PRECISION (orig
))
2434 || TREE_CODE (orig
) == OFFSET_TYPE
);
2437 case FIXED_POINT_TYPE
:
2439 return TREE_CODE (type
) == TREE_CODE (orig
);
2442 return (VECTOR_TYPE_P (orig
)
2443 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
2444 TYPE_VECTOR_SUBPARTS (orig
))
2445 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2452 /* Convert expression ARG to type TYPE. Used by the middle-end for
2453 simple conversions in preference to calling the front-end's convert. */
2456 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2458 tree orig
= TREE_TYPE (arg
);
2464 if (TREE_CODE (arg
) == ERROR_MARK
2465 || TREE_CODE (type
) == ERROR_MARK
2466 || TREE_CODE (orig
) == ERROR_MARK
)
2467 return error_mark_node
;
2469 switch (TREE_CODE (type
))
2472 case REFERENCE_TYPE
:
2473 /* Handle conversions between pointers to different address spaces. */
2474 if (POINTER_TYPE_P (orig
)
2475 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2476 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2477 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2480 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2482 if (TREE_CODE (arg
) == INTEGER_CST
)
2484 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2485 if (tem
!= NULL_TREE
)
2488 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2489 || TREE_CODE (orig
) == OFFSET_TYPE
)
2490 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2491 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2492 return fold_convert_loc (loc
, type
,
2493 fold_build1_loc (loc
, REALPART_EXPR
,
2494 TREE_TYPE (orig
), arg
));
2495 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2496 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2497 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2500 if (TREE_CODE (arg
) == INTEGER_CST
)
2502 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2503 if (tem
!= NULL_TREE
)
2506 else if (TREE_CODE (arg
) == REAL_CST
)
2508 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2509 if (tem
!= NULL_TREE
)
2512 else if (TREE_CODE (arg
) == FIXED_CST
)
2514 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2515 if (tem
!= NULL_TREE
)
2519 switch (TREE_CODE (orig
))
2522 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2523 case POINTER_TYPE
: case REFERENCE_TYPE
:
2524 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2527 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2529 case FIXED_POINT_TYPE
:
2530 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2533 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2534 return fold_convert_loc (loc
, type
, tem
);
2540 case FIXED_POINT_TYPE
:
2541 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2542 || TREE_CODE (arg
) == REAL_CST
)
2544 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2545 if (tem
!= NULL_TREE
)
2546 goto fold_convert_exit
;
2549 switch (TREE_CODE (orig
))
2551 case FIXED_POINT_TYPE
:
2556 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2559 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2560 return fold_convert_loc (loc
, type
, tem
);
2567 switch (TREE_CODE (orig
))
2570 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2571 case POINTER_TYPE
: case REFERENCE_TYPE
:
2573 case FIXED_POINT_TYPE
:
2574 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2575 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2576 fold_convert_loc (loc
, TREE_TYPE (type
),
2577 integer_zero_node
));
2582 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2584 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2585 TREE_OPERAND (arg
, 0));
2586 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2587 TREE_OPERAND (arg
, 1));
2588 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2591 arg
= save_expr (arg
);
2592 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2593 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2594 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2595 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2596 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2604 if (integer_zerop (arg
))
2605 return build_zero_vector (type
);
2606 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2607 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2608 || TREE_CODE (orig
) == VECTOR_TYPE
);
2609 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2612 tem
= fold_ignored_result (arg
);
2613 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2616 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2617 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2621 protected_set_expr_location_unshare (tem
, loc
);
2625 /* Return false if expr can be assumed not to be an lvalue, true
2629 maybe_lvalue_p (const_tree x
)
2631 /* We only need to wrap lvalue tree codes. */
2632 switch (TREE_CODE (x
))
2645 case ARRAY_RANGE_REF
:
2651 case PREINCREMENT_EXPR
:
2652 case PREDECREMENT_EXPR
:
2654 case TRY_CATCH_EXPR
:
2655 case WITH_CLEANUP_EXPR
:
2661 case VIEW_CONVERT_EXPR
:
2665 /* Assume the worst for front-end tree codes. */
2666 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2674 /* Return an expr equal to X but certainly not valid as an lvalue. */
2677 non_lvalue_loc (location_t loc
, tree x
)
2679 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2684 if (! maybe_lvalue_p (x
))
2686 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2689 /* Given a tree comparison code, return the code that is the logical inverse.
2690 It is generally not safe to do this for floating-point comparisons, except
2691 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2692 ERROR_MARK in this case. */
2695 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2697 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2698 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2708 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2710 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2712 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2714 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2728 return UNORDERED_EXPR
;
2729 case UNORDERED_EXPR
:
2730 return ORDERED_EXPR
;
2736 /* Similar, but return the comparison that results if the operands are
2737 swapped. This is safe for floating-point. */
2740 swap_tree_comparison (enum tree_code code
)
2747 case UNORDERED_EXPR
:
2773 /* Convert a comparison tree code from an enum tree_code representation
2774 into a compcode bit-based encoding. This function is the inverse of
2775 compcode_to_comparison. */
2777 static enum comparison_code
2778 comparison_to_compcode (enum tree_code code
)
2795 return COMPCODE_ORD
;
2796 case UNORDERED_EXPR
:
2797 return COMPCODE_UNORD
;
2799 return COMPCODE_UNLT
;
2801 return COMPCODE_UNEQ
;
2803 return COMPCODE_UNLE
;
2805 return COMPCODE_UNGT
;
2807 return COMPCODE_LTGT
;
2809 return COMPCODE_UNGE
;
2815 /* Convert a compcode bit-based encoding of a comparison operator back
2816 to GCC's enum tree_code representation. This function is the
2817 inverse of comparison_to_compcode. */
2819 static enum tree_code
2820 compcode_to_comparison (enum comparison_code code
)
2837 return ORDERED_EXPR
;
2838 case COMPCODE_UNORD
:
2839 return UNORDERED_EXPR
;
2857 /* Return true if COND1 tests the opposite condition of COND2. */
2860 inverse_conditions_p (const_tree cond1
, const_tree cond2
)
2862 return (COMPARISON_CLASS_P (cond1
)
2863 && COMPARISON_CLASS_P (cond2
)
2864 && (invert_tree_comparison
2866 HONOR_NANS (TREE_OPERAND (cond1
, 0))) == TREE_CODE (cond2
))
2867 && operand_equal_p (TREE_OPERAND (cond1
, 0),
2868 TREE_OPERAND (cond2
, 0), 0)
2869 && operand_equal_p (TREE_OPERAND (cond1
, 1),
2870 TREE_OPERAND (cond2
, 1), 0));
2873 /* Return a tree for the comparison which is the combination of
2874 doing the AND or OR (depending on CODE) of the two operations LCODE
2875 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2876 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2877 if this makes the transformation invalid. */
2880 combine_comparisons (location_t loc
,
2881 enum tree_code code
, enum tree_code lcode
,
2882 enum tree_code rcode
, tree truth_type
,
2883 tree ll_arg
, tree lr_arg
)
2885 bool honor_nans
= HONOR_NANS (ll_arg
);
2886 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2887 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2892 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2893 compcode
= lcompcode
& rcompcode
;
2896 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2897 compcode
= lcompcode
| rcompcode
;
2906 /* Eliminate unordered comparisons, as well as LTGT and ORD
2907 which are not used unless the mode has NaNs. */
2908 compcode
&= ~COMPCODE_UNORD
;
2909 if (compcode
== COMPCODE_LTGT
)
2910 compcode
= COMPCODE_NE
;
2911 else if (compcode
== COMPCODE_ORD
)
2912 compcode
= COMPCODE_TRUE
;
2914 else if (flag_trapping_math
)
2916 /* Check that the original operation and the optimized ones will trap
2917 under the same condition. */
2918 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2919 && (lcompcode
!= COMPCODE_EQ
)
2920 && (lcompcode
!= COMPCODE_ORD
);
2921 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2922 && (rcompcode
!= COMPCODE_EQ
)
2923 && (rcompcode
!= COMPCODE_ORD
);
2924 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2925 && (compcode
!= COMPCODE_EQ
)
2926 && (compcode
!= COMPCODE_ORD
);
2928 /* In a short-circuited boolean expression the LHS might be
2929 such that the RHS, if evaluated, will never trap. For
2930 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2931 if neither x nor y is NaN. (This is a mixed blessing: for
2932 example, the expression above will never trap, hence
2933 optimizing it to x < y would be invalid). */
2934 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2935 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2938 /* If the comparison was short-circuited, and only the RHS
2939 trapped, we may now generate a spurious trap. */
2941 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2944 /* If we changed the conditions that cause a trap, we lose. */
2945 if ((ltrap
|| rtrap
) != trap
)
2949 if (compcode
== COMPCODE_TRUE
)
2950 return constant_boolean_node (true, truth_type
);
2951 else if (compcode
== COMPCODE_FALSE
)
2952 return constant_boolean_node (false, truth_type
);
2955 enum tree_code tcode
;
2957 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2958 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2962 /* Return nonzero if two operands (typically of the same tree node)
2963 are necessarily equal. FLAGS modifies behavior as follows:
2965 If OEP_ONLY_CONST is set, only return nonzero for constants.
2966 This function tests whether the operands are indistinguishable;
2967 it does not test whether they are equal using C's == operation.
2968 The distinction is important for IEEE floating point, because
2969 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2970 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2972 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2973 even though it may hold multiple values during a function.
2974 This is because a GCC tree node guarantees that nothing else is
2975 executed between the evaluation of its "operands" (which may often
2976 be evaluated in arbitrary order). Hence if the operands themselves
2977 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2978 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2979 unset means assuming isochronic (or instantaneous) tree equivalence.
2980 Unless comparing arbitrary expression trees, such as from different
2981 statements, this flag can usually be left unset.
2983 If OEP_PURE_SAME is set, then pure functions with identical arguments
2984 are considered the same. It is used when the caller has other ways
2985 to ensure that global memory is unchanged in between.
2987 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2988 not values of expressions.
2990 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2991 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2993 If OEP_BITWISE is set, then require the values to be bitwise identical
2994 rather than simply numerically equal. Do not take advantage of things
2995 like math-related flags or undefined behavior; only return true for
2996 values that are provably bitwise identical in all circumstances.
2998 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2999 any operand with side effect. This is unnecesarily conservative in the
3000 case we know that arg0 and arg1 are in disjoint code paths (such as in
3001 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3002 addresses with TREE_CONSTANT flag set so we know that &var == &var
3003 even if var is volatile. */
3006 operand_compare::operand_equal_p (const_tree arg0
, const_tree arg1
,
3010 if (verify_hash_value (arg0
, arg1
, flags
, &r
))
3013 STRIP_ANY_LOCATION_WRAPPER (arg0
);
3014 STRIP_ANY_LOCATION_WRAPPER (arg1
);
3016 /* If either is ERROR_MARK, they aren't equal. */
3017 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
3018 || TREE_TYPE (arg0
) == error_mark_node
3019 || TREE_TYPE (arg1
) == error_mark_node
)
3022 /* Similar, if either does not have a type (like a template id),
3023 they aren't equal. */
3024 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
3027 /* Bitwise identity makes no sense if the values have different layouts. */
3028 if ((flags
& OEP_BITWISE
)
3029 && !tree_nop_conversion_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3032 /* We cannot consider pointers to different address space equal. */
3033 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
3034 && POINTER_TYPE_P (TREE_TYPE (arg1
))
3035 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
3036 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
3039 /* Check equality of integer constants before bailing out due to
3040 precision differences. */
3041 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
3043 /* Address of INTEGER_CST is not defined; check that we did not forget
3044 to drop the OEP_ADDRESS_OF flags. */
3045 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3046 return tree_int_cst_equal (arg0
, arg1
);
3049 if (!(flags
& OEP_ADDRESS_OF
))
3051 /* If both types don't have the same signedness, then we can't consider
3052 them equal. We must check this before the STRIP_NOPS calls
3053 because they may change the signedness of the arguments. As pointers
3054 strictly don't have a signedness, require either two pointers or
3055 two non-pointers as well. */
3056 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3057 || POINTER_TYPE_P (TREE_TYPE (arg0
))
3058 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3061 /* If both types don't have the same precision, then it is not safe
3063 if (element_precision (TREE_TYPE (arg0
))
3064 != element_precision (TREE_TYPE (arg1
)))
3071 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3072 sanity check once the issue is solved. */
3074 /* Addresses of conversions and SSA_NAMEs (and many other things)
3075 are not defined. Check that we did not forget to drop the
3076 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3077 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
3078 && TREE_CODE (arg0
) != SSA_NAME
);
3081 /* In case both args are comparisons but with different comparison
3082 code, try to swap the comparison operands of one arg to produce
3083 a match and compare that variant. */
3084 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3085 && COMPARISON_CLASS_P (arg0
)
3086 && COMPARISON_CLASS_P (arg1
))
3088 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3090 if (TREE_CODE (arg0
) == swap_code
)
3091 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3092 TREE_OPERAND (arg1
, 1), flags
)
3093 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3094 TREE_OPERAND (arg1
, 0), flags
);
3097 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
3099 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3100 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
3102 else if (flags
& OEP_ADDRESS_OF
)
3104 /* If we are interested in comparing addresses ignore
3105 MEM_REF wrappings of the base that can appear just for
3107 if (TREE_CODE (arg0
) == MEM_REF
3109 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
3110 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
3111 && integer_zerop (TREE_OPERAND (arg0
, 1)))
3113 else if (TREE_CODE (arg1
) == MEM_REF
3115 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
3116 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
3117 && integer_zerop (TREE_OPERAND (arg1
, 1)))
3125 /* When not checking adddresses, this is needed for conversions and for
3126 COMPONENT_REF. Might as well play it safe and always test this. */
3127 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3128 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3129 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
3130 && !(flags
& OEP_ADDRESS_OF
)))
3133 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3134 We don't care about side effects in that case because the SAVE_EXPR
3135 takes care of that for us. In all other cases, two expressions are
3136 equal if they have no side effects. If we have two identical
3137 expressions with side effects that should be treated the same due
3138 to the only side effects being identical SAVE_EXPR's, that will
3139 be detected in the recursive calls below.
3140 If we are taking an invariant address of two identical objects
3141 they are necessarily equal as well. */
3142 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3143 && (TREE_CODE (arg0
) == SAVE_EXPR
3144 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
3145 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3148 /* Next handle constant cases, those for which we can return 1 even
3149 if ONLY_CONST is set. */
3150 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3151 switch (TREE_CODE (arg0
))
3154 return tree_int_cst_equal (arg0
, arg1
);
3157 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3158 TREE_FIXED_CST (arg1
));
3161 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
3164 if (!(flags
& OEP_BITWISE
) && !HONOR_SIGNED_ZEROS (arg0
))
3166 /* If we do not distinguish between signed and unsigned zero,
3167 consider them equal. */
3168 if (real_zerop (arg0
) && real_zerop (arg1
))
3175 if (VECTOR_CST_LOG2_NPATTERNS (arg0
)
3176 != VECTOR_CST_LOG2_NPATTERNS (arg1
))
3179 if (VECTOR_CST_NELTS_PER_PATTERN (arg0
)
3180 != VECTOR_CST_NELTS_PER_PATTERN (arg1
))
3183 unsigned int count
= vector_cst_encoded_nelts (arg0
);
3184 for (unsigned int i
= 0; i
< count
; ++i
)
3185 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0
, i
),
3186 VECTOR_CST_ENCODED_ELT (arg1
, i
), flags
))
3192 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3194 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3198 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3199 && ! memcmp (TREE_STRING_POINTER (arg0
),
3200 TREE_STRING_POINTER (arg1
),
3201 TREE_STRING_LENGTH (arg0
)));
3204 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3205 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3206 flags
| OEP_ADDRESS_OF
3207 | OEP_MATCH_SIDE_EFFECTS
);
3209 /* In GIMPLE empty constructors are allowed in initializers of
3211 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
3216 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3217 two instances of undefined behavior will give identical results. */
3218 if (flags
& (OEP_ONLY_CONST
| OEP_BITWISE
))
3221 /* Define macros to test an operand from arg0 and arg1 for equality and a
3222 variant that allows null and views null as being different from any
3223 non-null value. In the latter case, if either is null, the both
3224 must be; otherwise, do the normal comparison. */
3225 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3226 TREE_OPERAND (arg1, N), flags)
3228 #define OP_SAME_WITH_NULL(N) \
3229 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3230 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3232 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3235 /* Two conversions are equal only if signedness and modes match. */
3236 switch (TREE_CODE (arg0
))
3239 case FIX_TRUNC_EXPR
:
3240 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3241 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3251 case tcc_comparison
:
3253 if (OP_SAME (0) && OP_SAME (1))
3256 /* For commutative ops, allow the other order. */
3257 return (commutative_tree_code (TREE_CODE (arg0
))
3258 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3259 TREE_OPERAND (arg1
, 1), flags
)
3260 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3261 TREE_OPERAND (arg1
, 0), flags
));
3264 /* If either of the pointer (or reference) expressions we are
3265 dereferencing contain a side effect, these cannot be equal,
3266 but their addresses can be. */
3267 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3268 && (TREE_SIDE_EFFECTS (arg0
)
3269 || TREE_SIDE_EFFECTS (arg1
)))
3272 switch (TREE_CODE (arg0
))
3275 if (!(flags
& OEP_ADDRESS_OF
))
3277 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3278 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3280 /* Verify that the access types are compatible. */
3281 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0
))
3282 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)))
3285 flags
&= ~OEP_ADDRESS_OF
;
3289 /* Require the same offset. */
3290 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3291 TYPE_SIZE (TREE_TYPE (arg1
)),
3292 flags
& ~OEP_ADDRESS_OF
))
3297 case VIEW_CONVERT_EXPR
:
3300 case TARGET_MEM_REF
:
3302 if (!(flags
& OEP_ADDRESS_OF
))
3304 /* Require equal access sizes */
3305 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3306 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3307 || !TYPE_SIZE (TREE_TYPE (arg1
))
3308 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3309 TYPE_SIZE (TREE_TYPE (arg1
)),
3312 /* Verify that access happens in similar types. */
3313 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3315 /* Verify that accesses are TBAA compatible. */
3316 if (!alias_ptr_types_compatible_p
3317 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3318 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3319 || (MR_DEPENDENCE_CLIQUE (arg0
)
3320 != MR_DEPENDENCE_CLIQUE (arg1
))
3321 || (MR_DEPENDENCE_BASE (arg0
)
3322 != MR_DEPENDENCE_BASE (arg1
)))
3324 /* Verify that alignment is compatible. */
3325 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3326 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3329 flags
&= ~OEP_ADDRESS_OF
;
3330 return (OP_SAME (0) && OP_SAME (1)
3331 /* TARGET_MEM_REF require equal extra operands. */
3332 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3333 || (OP_SAME_WITH_NULL (2)
3334 && OP_SAME_WITH_NULL (3)
3335 && OP_SAME_WITH_NULL (4))));
3338 case ARRAY_RANGE_REF
:
3341 flags
&= ~OEP_ADDRESS_OF
;
3342 /* Compare the array index by value if it is constant first as we
3343 may have different types but same value here. */
3344 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3345 TREE_OPERAND (arg1
, 1))
3347 && OP_SAME_WITH_NULL (2)
3348 && OP_SAME_WITH_NULL (3)
3349 /* Compare low bound and element size as with OEP_ADDRESS_OF
3350 we have to account for the offset of the ref. */
3351 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3352 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3353 || (operand_equal_p (array_ref_low_bound
3354 (CONST_CAST_TREE (arg0
)),
3356 (CONST_CAST_TREE (arg1
)), flags
)
3357 && operand_equal_p (array_ref_element_size
3358 (CONST_CAST_TREE (arg0
)),
3359 array_ref_element_size
3360 (CONST_CAST_TREE (arg1
)),
3364 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3365 may be NULL when we're called to compare MEM_EXPRs. */
3366 if (!OP_SAME_WITH_NULL (0))
3369 bool compare_address
= flags
& OEP_ADDRESS_OF
;
3371 /* Most of time we only need to compare FIELD_DECLs for equality.
3372 However when determining address look into actual offsets.
3373 These may match for unions and unshared record types. */
3374 flags
&= ~OEP_ADDRESS_OF
;
3378 && (flags
& OEP_ADDRESS_OF_SAME_FIELD
) == 0)
3380 tree field0
= TREE_OPERAND (arg0
, 1);
3381 tree field1
= TREE_OPERAND (arg1
, 1);
3383 /* Non-FIELD_DECL operands can appear in C++ templates. */
3384 if (TREE_CODE (field0
) != FIELD_DECL
3385 || TREE_CODE (field1
) != FIELD_DECL
3386 || !operand_equal_p (DECL_FIELD_OFFSET (field0
),
3387 DECL_FIELD_OFFSET (field1
), flags
)
3388 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0
),
3389 DECL_FIELD_BIT_OFFSET (field1
),
3397 return OP_SAME_WITH_NULL (2);
3402 flags
&= ~OEP_ADDRESS_OF
;
3403 return OP_SAME (1) && OP_SAME (2);
3409 case tcc_expression
:
3410 switch (TREE_CODE (arg0
))
3413 /* Be sure we pass right ADDRESS_OF flag. */
3414 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3415 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3416 TREE_OPERAND (arg1
, 0),
3417 flags
| OEP_ADDRESS_OF
);
3419 case TRUTH_NOT_EXPR
:
3422 case TRUTH_ANDIF_EXPR
:
3423 case TRUTH_ORIF_EXPR
:
3424 return OP_SAME (0) && OP_SAME (1);
3426 case WIDEN_MULT_PLUS_EXPR
:
3427 case WIDEN_MULT_MINUS_EXPR
:
3430 /* The multiplcation operands are commutative. */
3433 case TRUTH_AND_EXPR
:
3435 case TRUTH_XOR_EXPR
:
3436 if (OP_SAME (0) && OP_SAME (1))
3439 /* Otherwise take into account this is a commutative operation. */
3440 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3441 TREE_OPERAND (arg1
, 1), flags
)
3442 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3443 TREE_OPERAND (arg1
, 0), flags
));
3446 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3448 flags
&= ~OEP_ADDRESS_OF
;
3451 case BIT_INSERT_EXPR
:
3452 /* BIT_INSERT_EXPR has an implict operand as the type precision
3453 of op1. Need to check to make sure they are the same. */
3454 if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3455 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3456 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
3457 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
3463 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3468 case PREDECREMENT_EXPR
:
3469 case PREINCREMENT_EXPR
:
3470 case POSTDECREMENT_EXPR
:
3471 case POSTINCREMENT_EXPR
:
3472 if (flags
& OEP_LEXICOGRAPHIC
)
3473 return OP_SAME (0) && OP_SAME (1);
3476 case CLEANUP_POINT_EXPR
:
3479 if (flags
& OEP_LEXICOGRAPHIC
)
3484 /* Virtual table reference. */
3485 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0
),
3486 OBJ_TYPE_REF_EXPR (arg1
), flags
))
3488 flags
&= ~OEP_ADDRESS_OF
;
3489 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0
))
3490 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1
)))
3492 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0
),
3493 OBJ_TYPE_REF_OBJECT (arg1
), flags
))
3495 if (virtual_method_call_p (arg0
))
3497 if (!virtual_method_call_p (arg1
))
3499 return types_same_for_odr (obj_type_ref_class (arg0
),
3500 obj_type_ref_class (arg1
));
3509 switch (TREE_CODE (arg0
))
3512 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3513 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3514 /* If not both CALL_EXPRs are either internal or normal function
3515 functions, then they are not equal. */
3517 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3519 /* If the CALL_EXPRs call different internal functions, then they
3521 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3526 /* If the CALL_EXPRs call different functions, then they are not
3528 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3533 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3535 unsigned int cef
= call_expr_flags (arg0
);
3536 if (flags
& OEP_PURE_SAME
)
3537 cef
&= ECF_CONST
| ECF_PURE
;
3540 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3544 /* Now see if all the arguments are the same. */
3546 const_call_expr_arg_iterator iter0
, iter1
;
3548 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3549 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3551 a0
= next_const_call_expr_arg (&iter0
),
3552 a1
= next_const_call_expr_arg (&iter1
))
3553 if (! operand_equal_p (a0
, a1
, flags
))
3556 /* If we get here and both argument lists are exhausted
3557 then the CALL_EXPRs are equal. */
3558 return ! (a0
|| a1
);
3564 case tcc_declaration
:
3565 /* Consider __builtin_sqrt equal to sqrt. */
3566 if (TREE_CODE (arg0
) == FUNCTION_DECL
)
3567 return (fndecl_built_in_p (arg0
) && fndecl_built_in_p (arg1
)
3568 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3569 && (DECL_UNCHECKED_FUNCTION_CODE (arg0
)
3570 == DECL_UNCHECKED_FUNCTION_CODE (arg1
)));
3573 && (flags
& OEP_DECL_NAME
)
3574 && (flags
& OEP_LEXICOGRAPHIC
))
3576 /* Consider decls with the same name equal. The caller needs
3577 to make sure they refer to the same entity (such as a function
3578 formal parameter). */
3579 tree a0name
= DECL_NAME (arg0
);
3580 tree a1name
= DECL_NAME (arg1
);
3581 const char *a0ns
= a0name
? IDENTIFIER_POINTER (a0name
) : NULL
;
3582 const char *a1ns
= a1name
? IDENTIFIER_POINTER (a1name
) : NULL
;
3583 return a0ns
&& a1ns
&& strcmp (a0ns
, a1ns
) == 0;
3587 case tcc_exceptional
:
3588 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3590 if (CONSTRUCTOR_NO_CLEARING (arg0
) != CONSTRUCTOR_NO_CLEARING (arg1
))
3593 /* In GIMPLE constructors are used only to build vectors from
3594 elements. Individual elements in the constructor must be
3595 indexed in increasing order and form an initial sequence.
3597 We make no effort to compare constructors in generic.
3598 (see sem_variable::equals in ipa-icf which can do so for
3600 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3601 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3604 /* Be sure that vectors constructed have the same representation.
3605 We only tested element precision and modes to match.
3606 Vectors may be BLKmode and thus also check that the number of
3608 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)),
3609 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
))))
3612 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3613 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3614 unsigned int len
= vec_safe_length (v0
);
3616 if (len
!= vec_safe_length (v1
))
3619 for (unsigned int i
= 0; i
< len
; i
++)
3621 constructor_elt
*c0
= &(*v0
)[i
];
3622 constructor_elt
*c1
= &(*v1
)[i
];
3624 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3625 /* In GIMPLE the indexes can be either NULL or matching i.
3626 Double check this so we won't get false
3627 positives for GENERIC. */
3629 && (TREE_CODE (c0
->index
) != INTEGER_CST
3630 || compare_tree_int (c0
->index
, i
)))
3632 && (TREE_CODE (c1
->index
) != INTEGER_CST
3633 || compare_tree_int (c1
->index
, i
))))
3638 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3639 && (flags
& OEP_LEXICOGRAPHIC
))
3641 /* Compare the STATEMENT_LISTs. */
3642 tree_stmt_iterator tsi1
, tsi2
;
3643 tree body1
= CONST_CAST_TREE (arg0
);
3644 tree body2
= CONST_CAST_TREE (arg1
);
3645 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3646 tsi_next (&tsi1
), tsi_next (&tsi2
))
3648 /* The lists don't have the same number of statements. */
3649 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3651 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3653 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3654 flags
& (OEP_LEXICOGRAPHIC
3655 | OEP_NO_HASH_CHECK
)))
3662 switch (TREE_CODE (arg0
))
3665 if (flags
& OEP_LEXICOGRAPHIC
)
3666 return OP_SAME_WITH_NULL (0);
3668 case DEBUG_BEGIN_STMT
:
3669 if (flags
& OEP_LEXICOGRAPHIC
)
3681 #undef OP_SAME_WITH_NULL
3684 /* Generate a hash value for an expression. This can be used iteratively
3685 by passing a previous result as the HSTATE argument. */
3688 operand_compare::hash_operand (const_tree t
, inchash::hash
&hstate
,
3692 enum tree_code code
;
3693 enum tree_code_class tclass
;
3695 if (t
== NULL_TREE
|| t
== error_mark_node
)
3697 hstate
.merge_hash (0);
3701 STRIP_ANY_LOCATION_WRAPPER (t
);
3703 if (!(flags
& OEP_ADDRESS_OF
))
3706 code
= TREE_CODE (t
);
3710 /* Alas, constants aren't shared, so we can't rely on pointer
3713 hstate
.merge_hash (0);
3716 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3717 for (i
= 0; i
< TREE_INT_CST_EXT_NUNITS (t
); i
++)
3718 hstate
.add_hwi (TREE_INT_CST_ELT (t
, i
));
3723 if (!HONOR_SIGNED_ZEROS (t
) && real_zerop (t
))
3726 val2
= real_hash (TREE_REAL_CST_PTR (t
));
3727 hstate
.merge_hash (val2
);
3732 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
3733 hstate
.merge_hash (val2
);
3737 hstate
.add ((const void *) TREE_STRING_POINTER (t
),
3738 TREE_STRING_LENGTH (t
));
3741 hash_operand (TREE_REALPART (t
), hstate
, flags
);
3742 hash_operand (TREE_IMAGPART (t
), hstate
, flags
);
3746 hstate
.add_int (VECTOR_CST_NPATTERNS (t
));
3747 hstate
.add_int (VECTOR_CST_NELTS_PER_PATTERN (t
));
3748 unsigned int count
= vector_cst_encoded_nelts (t
);
3749 for (unsigned int i
= 0; i
< count
; ++i
)
3750 hash_operand (VECTOR_CST_ENCODED_ELT (t
, i
), hstate
, flags
);
3754 /* We can just compare by pointer. */
3755 hstate
.add_hwi (SSA_NAME_VERSION (t
));
3757 case PLACEHOLDER_EXPR
:
3758 /* The node itself doesn't matter. */
3765 /* A list of expressions, for a CALL_EXPR or as the elements of a
3767 for (; t
; t
= TREE_CHAIN (t
))
3768 hash_operand (TREE_VALUE (t
), hstate
, flags
);
3772 unsigned HOST_WIDE_INT idx
;
3774 flags
&= ~OEP_ADDRESS_OF
;
3775 hstate
.add_int (CONSTRUCTOR_NO_CLEARING (t
));
3776 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
3778 /* In GIMPLE the indexes can be either NULL or matching i. */
3779 if (field
== NULL_TREE
)
3780 field
= bitsize_int (idx
);
3781 hash_operand (field
, hstate
, flags
);
3782 hash_operand (value
, hstate
, flags
);
3786 case STATEMENT_LIST
:
3788 tree_stmt_iterator i
;
3789 for (i
= tsi_start (CONST_CAST_TREE (t
));
3790 !tsi_end_p (i
); tsi_next (&i
))
3791 hash_operand (tsi_stmt (i
), hstate
, flags
);
3795 for (i
= 0; i
< TREE_VEC_LENGTH (t
); ++i
)
3796 hash_operand (TREE_VEC_ELT (t
, i
), hstate
, flags
);
3798 case IDENTIFIER_NODE
:
3799 hstate
.add_object (IDENTIFIER_HASH_VALUE (t
));
3802 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3803 Otherwise nodes that compare equal according to operand_equal_p might
3804 get different hash codes. However, don't do this for machine specific
3805 or front end builtins, since the function code is overloaded in those
3807 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
3808 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
3810 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
3811 code
= TREE_CODE (t
);
3815 if (POLY_INT_CST_P (t
))
3817 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3818 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
3821 tclass
= TREE_CODE_CLASS (code
);
3823 if (tclass
== tcc_declaration
)
3825 /* DECL's have a unique ID */
3826 hstate
.add_hwi (DECL_UID (t
));
3828 else if (tclass
== tcc_comparison
&& !commutative_tree_code (code
))
3830 /* For comparisons that can be swapped, use the lower
3832 enum tree_code ccode
= swap_tree_comparison (code
);
3835 hstate
.add_object (ccode
);
3836 hash_operand (TREE_OPERAND (t
, ccode
!= code
), hstate
, flags
);
3837 hash_operand (TREE_OPERAND (t
, ccode
== code
), hstate
, flags
);
3839 else if (CONVERT_EXPR_CODE_P (code
))
3841 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3843 enum tree_code ccode
= NOP_EXPR
;
3844 hstate
.add_object (ccode
);
3846 /* Don't hash the type, that can lead to having nodes which
3847 compare equal according to operand_equal_p, but which
3848 have different hash codes. Make sure to include signedness
3849 in the hash computation. */
3850 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3851 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3853 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3854 else if (code
== MEM_REF
3855 && (flags
& OEP_ADDRESS_OF
) != 0
3856 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
3857 && DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
3858 && integer_zerop (TREE_OPERAND (t
, 1)))
3859 hash_operand (TREE_OPERAND (TREE_OPERAND (t
, 0), 0),
3861 /* Don't ICE on FE specific trees, or their arguments etc.
3862 during operand_equal_p hash verification. */
3863 else if (!IS_EXPR_CODE_CLASS (tclass
))
3864 gcc_assert (flags
& OEP_HASH_CHECK
);
3867 unsigned int sflags
= flags
;
3869 hstate
.add_object (code
);
3874 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3875 flags
|= OEP_ADDRESS_OF
;
3881 case TARGET_MEM_REF
:
3882 flags
&= ~OEP_ADDRESS_OF
;
3887 if (sflags
& OEP_ADDRESS_OF
)
3889 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3890 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t
, 1)),
3891 hstate
, flags
& ~OEP_ADDRESS_OF
);
3892 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t
, 1)),
3893 hstate
, flags
& ~OEP_ADDRESS_OF
);
3898 case ARRAY_RANGE_REF
:
3900 sflags
&= ~OEP_ADDRESS_OF
;
3904 flags
&= ~OEP_ADDRESS_OF
;
3907 case WIDEN_MULT_PLUS_EXPR
:
3908 case WIDEN_MULT_MINUS_EXPR
:
3910 /* The multiplication operands are commutative. */
3911 inchash::hash one
, two
;
3912 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3913 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3914 hstate
.add_commutative (one
, two
);
3915 hash_operand (TREE_OPERAND (t
, 2), two
, flags
);
3920 if (CALL_EXPR_FN (t
) == NULL_TREE
)
3921 hstate
.add_int (CALL_EXPR_IFN (t
));
3925 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3926 Usually different TARGET_EXPRs just should use
3927 different temporaries in their slots. */
3928 hash_operand (TARGET_EXPR_SLOT (t
), hstate
, flags
);
3932 /* Virtual table reference. */
3933 inchash::add_expr (OBJ_TYPE_REF_EXPR (t
), hstate
, flags
);
3934 flags
&= ~OEP_ADDRESS_OF
;
3935 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t
), hstate
, flags
);
3936 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t
), hstate
, flags
);
3937 if (!virtual_method_call_p (t
))
3939 if (tree c
= obj_type_ref_class (t
))
3941 c
= TYPE_NAME (TYPE_MAIN_VARIANT (c
));
3942 /* We compute mangled names only when free_lang_data is run.
3943 In that case we can hash precisely. */
3944 if (TREE_CODE (c
) == TYPE_DECL
3945 && DECL_ASSEMBLER_NAME_SET_P (c
))
3947 (IDENTIFIER_HASH_VALUE
3948 (DECL_ASSEMBLER_NAME (c
)));
3955 /* Don't hash the type, that can lead to having nodes which
3956 compare equal according to operand_equal_p, but which
3957 have different hash codes. */
3958 if (code
== NON_LVALUE_EXPR
)
3960 /* Make sure to include signness in the hash computation. */
3961 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3962 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3965 else if (commutative_tree_code (code
))
3967 /* It's a commutative expression. We want to hash it the same
3968 however it appears. We do this by first hashing both operands
3969 and then rehashing based on the order of their independent
3971 inchash::hash one
, two
;
3972 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3973 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3974 hstate
.add_commutative (one
, two
);
3977 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
3978 hash_operand (TREE_OPERAND (t
, i
), hstate
,
3979 i
== 0 ? flags
: sflags
);
3986 operand_compare::verify_hash_value (const_tree arg0
, const_tree arg1
,
3987 unsigned int flags
, bool *ret
)
3989 /* When checking and unless comparing DECL names, verify that if
3990 the outermost operand_equal_p call returns non-zero then ARG0
3991 and ARG1 have the same hash value. */
3992 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
3994 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
3996 if (arg0
!= arg1
&& !(flags
& OEP_DECL_NAME
))
3998 inchash::hash
hstate0 (0), hstate1 (0);
3999 hash_operand (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
4000 hash_operand (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
4001 hashval_t h0
= hstate0
.end ();
4002 hashval_t h1
= hstate1
.end ();
4003 gcc_assert (h0
== h1
);
4017 static operand_compare default_compare_instance
;
4019 /* Conveinece wrapper around operand_compare class because usually we do
4020 not need to play with the valueizer. */
4023 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
4025 return default_compare_instance
.operand_equal_p (arg0
, arg1
, flags
);
4031 /* Generate a hash value for an expression. This can be used iteratively
4032 by passing a previous result as the HSTATE argument.
4034 This function is intended to produce the same hash for expressions which
4035 would compare equal using operand_equal_p. */
4037 add_expr (const_tree t
, inchash::hash
&hstate
, unsigned int flags
)
4039 default_compare_instance
.hash_operand (t
, hstate
, flags
);
4044 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4045 with a different signedness or a narrower precision. */
4048 operand_equal_for_comparison_p (tree arg0
, tree arg1
)
4050 if (operand_equal_p (arg0
, arg1
, 0))
4053 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
4054 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
4057 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4058 and see if the inner values are the same. This removes any
4059 signedness comparison, which doesn't matter here. */
4064 if (operand_equal_p (op0
, op1
, 0))
4067 /* Discard a single widening conversion from ARG1 and see if the inner
4068 value is the same as ARG0. */
4069 if (CONVERT_EXPR_P (arg1
)
4070 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
4071 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
4072 < TYPE_PRECISION (TREE_TYPE (arg1
))
4073 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
4079 /* See if ARG is an expression that is either a comparison or is performing
4080 arithmetic on comparisons. The comparisons must only be comparing
4081 two different values, which will be stored in *CVAL1 and *CVAL2; if
4082 they are nonzero it means that some operands have already been found.
4083 No variables may be used anywhere else in the expression except in the
4086 If this is true, return 1. Otherwise, return zero. */
4089 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
)
4091 enum tree_code code
= TREE_CODE (arg
);
4092 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4094 /* We can handle some of the tcc_expression cases here. */
4095 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4097 else if (tclass
== tcc_expression
4098 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
4099 || code
== COMPOUND_EXPR
))
4100 tclass
= tcc_binary
;
4105 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
);
4108 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4109 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
));
4114 case tcc_expression
:
4115 if (code
== COND_EXPR
)
4116 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4117 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
)
4118 && twoval_comparison_p (TREE_OPERAND (arg
, 2), cval1
, cval2
));
4121 case tcc_comparison
:
4122 /* First see if we can handle the first operand, then the second. For
4123 the second operand, we know *CVAL1 can't be zero. It must be that
4124 one side of the comparison is each of the values; test for the
4125 case where this isn't true by failing if the two operands
4128 if (operand_equal_p (TREE_OPERAND (arg
, 0),
4129 TREE_OPERAND (arg
, 1), 0))
4133 *cval1
= TREE_OPERAND (arg
, 0);
4134 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
4136 else if (*cval2
== 0)
4137 *cval2
= TREE_OPERAND (arg
, 0);
4138 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
4143 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
4145 else if (*cval2
== 0)
4146 *cval2
= TREE_OPERAND (arg
, 1);
4147 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
4159 /* ARG is a tree that is known to contain just arithmetic operations and
4160 comparisons. Evaluate the operations in the tree substituting NEW0 for
4161 any occurrence of OLD0 as an operand of a comparison and likewise for
4165 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
4166 tree old1
, tree new1
)
4168 tree type
= TREE_TYPE (arg
);
4169 enum tree_code code
= TREE_CODE (arg
);
4170 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4172 /* We can handle some of the tcc_expression cases here. */
4173 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4175 else if (tclass
== tcc_expression
4176 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
4177 tclass
= tcc_binary
;
4182 return fold_build1_loc (loc
, code
, type
,
4183 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4184 old0
, new0
, old1
, new1
));
4187 return fold_build2_loc (loc
, code
, type
,
4188 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4189 old0
, new0
, old1
, new1
),
4190 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4191 old0
, new0
, old1
, new1
));
4193 case tcc_expression
:
4197 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
4201 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
4205 return fold_build3_loc (loc
, code
, type
,
4206 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4207 old0
, new0
, old1
, new1
),
4208 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4209 old0
, new0
, old1
, new1
),
4210 eval_subst (loc
, TREE_OPERAND (arg
, 2),
4211 old0
, new0
, old1
, new1
));
4215 /* Fall through - ??? */
4217 case tcc_comparison
:
4219 tree arg0
= TREE_OPERAND (arg
, 0);
4220 tree arg1
= TREE_OPERAND (arg
, 1);
4222 /* We need to check both for exact equality and tree equality. The
4223 former will be true if the operand has a side-effect. In that
4224 case, we know the operand occurred exactly once. */
4226 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
4228 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
4231 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
4233 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
4236 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
4244 /* Return a tree for the case when the result of an expression is RESULT
4245 converted to TYPE and OMITTED was previously an operand of the expression
4246 but is now not needed (e.g., we folded OMITTED * 0).
4248 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4249 the conversion of RESULT to TYPE. */
4252 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
4254 tree t
= fold_convert_loc (loc
, type
, result
);
4256 /* If the resulting operand is an empty statement, just return the omitted
4257 statement casted to void. */
4258 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
4259 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
4260 fold_ignored_result (omitted
));
4262 if (TREE_SIDE_EFFECTS (omitted
))
4263 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4264 fold_ignored_result (omitted
), t
);
4266 return non_lvalue_loc (loc
, t
);
4269 /* Return a tree for the case when the result of an expression is RESULT
4270 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4271 of the expression but are now not needed.
4273 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4274 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4275 evaluated before OMITTED2. Otherwise, if neither has side effects,
4276 just do the conversion of RESULT to TYPE. */
4279 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
4280 tree omitted1
, tree omitted2
)
4282 tree t
= fold_convert_loc (loc
, type
, result
);
4284 if (TREE_SIDE_EFFECTS (omitted2
))
4285 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
4286 if (TREE_SIDE_EFFECTS (omitted1
))
4287 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
4289 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
4293 /* Return a simplified tree node for the truth-negation of ARG. This
4294 never alters ARG itself. We assume that ARG is an operation that
4295 returns a truth value (0 or 1).
4297 FIXME: one would think we would fold the result, but it causes
4298 problems with the dominator optimizer. */
4301 fold_truth_not_expr (location_t loc
, tree arg
)
4303 tree type
= TREE_TYPE (arg
);
4304 enum tree_code code
= TREE_CODE (arg
);
4305 location_t loc1
, loc2
;
4307 /* If this is a comparison, we can simply invert it, except for
4308 floating-point non-equality comparisons, in which case we just
4309 enclose a TRUTH_NOT_EXPR around what we have. */
4311 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4313 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
4314 if (FLOAT_TYPE_P (op_type
)
4315 && flag_trapping_math
4316 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
4317 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
4320 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
4321 if (code
== ERROR_MARK
)
4324 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
4325 TREE_OPERAND (arg
, 1));
4326 copy_warning (ret
, arg
);
4333 return constant_boolean_node (integer_zerop (arg
), type
);
4335 case TRUTH_AND_EXPR
:
4336 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4337 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4338 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
4339 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4340 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4343 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4344 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4345 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
4346 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4347 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4349 case TRUTH_XOR_EXPR
:
4350 /* Here we can invert either operand. We invert the first operand
4351 unless the second operand is a TRUTH_NOT_EXPR in which case our
4352 result is the XOR of the first operand with the inside of the
4353 negation of the second operand. */
4355 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
4356 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
4357 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
4359 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
4360 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
4361 TREE_OPERAND (arg
, 1));
4363 case TRUTH_ANDIF_EXPR
:
4364 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4365 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4366 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
4367 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4368 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4370 case TRUTH_ORIF_EXPR
:
4371 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4372 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4373 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
4374 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4375 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4377 case TRUTH_NOT_EXPR
:
4378 return TREE_OPERAND (arg
, 0);
4382 tree arg1
= TREE_OPERAND (arg
, 1);
4383 tree arg2
= TREE_OPERAND (arg
, 2);
4385 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4386 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
4388 /* A COND_EXPR may have a throw as one operand, which
4389 then has void type. Just leave void operands
4391 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
4392 VOID_TYPE_P (TREE_TYPE (arg1
))
4393 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
4394 VOID_TYPE_P (TREE_TYPE (arg2
))
4395 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
4399 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4400 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4401 TREE_OPERAND (arg
, 0),
4402 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
4404 case NON_LVALUE_EXPR
:
4405 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4406 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
4409 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
4410 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4415 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4416 return build1_loc (loc
, TREE_CODE (arg
), type
,
4417 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4420 if (!integer_onep (TREE_OPERAND (arg
, 1)))
4422 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
4425 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4427 case CLEANUP_POINT_EXPR
:
4428 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4429 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
4430 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4437 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4438 assume that ARG is an operation that returns a truth value (0 or 1
4439 for scalars, 0 or -1 for vectors). Return the folded expression if
4440 folding is successful. Otherwise, return NULL_TREE. */
4443 fold_invert_truthvalue (location_t loc
, tree arg
)
4445 tree type
= TREE_TYPE (arg
);
4446 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
4452 /* Return a simplified tree node for the truth-negation of ARG. This
4453 never alters ARG itself. We assume that ARG is an operation that
4454 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4457 invert_truthvalue_loc (location_t loc
, tree arg
)
4459 if (TREE_CODE (arg
) == ERROR_MARK
)
4462 tree type
= TREE_TYPE (arg
);
4463 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
4469 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4470 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4471 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4472 is the original memory reference used to preserve the alias set of
4476 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
4477 HOST_WIDE_INT bitsize
, poly_int64 bitpos
,
4478 int unsignedp
, int reversep
)
4480 tree result
, bftype
;
4482 /* Attempt not to lose the access path if possible. */
4483 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
4485 tree ninner
= TREE_OPERAND (orig_inner
, 0);
4487 poly_int64 nbitsize
, nbitpos
;
4489 int nunsignedp
, nreversep
, nvolatilep
= 0;
4490 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
4491 &noffset
, &nmode
, &nunsignedp
,
4492 &nreversep
, &nvolatilep
);
4494 && noffset
== NULL_TREE
4495 && known_subrange_p (bitpos
, bitsize
, nbitpos
, nbitsize
)
4505 alias_set_type iset
= get_alias_set (orig_inner
);
4506 if (iset
== 0 && get_alias_set (inner
) != iset
)
4507 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
4508 build_fold_addr_expr (inner
),
4509 build_int_cst (ptr_type_node
, 0));
4511 if (known_eq (bitpos
, 0) && !reversep
)
4513 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
4514 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
4515 || POINTER_TYPE_P (TREE_TYPE (inner
)))
4516 && tree_fits_shwi_p (size
)
4517 && tree_to_shwi (size
) == bitsize
)
4518 return fold_convert_loc (loc
, type
, inner
);
4522 if (TYPE_PRECISION (bftype
) != bitsize
4523 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
4524 bftype
= build_nonstandard_integer_type (bitsize
, 0);
4526 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
4527 bitsize_int (bitsize
), bitsize_int (bitpos
));
4528 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
4531 result
= fold_convert_loc (loc
, type
, result
);
4536 /* Optimize a bit-field compare.
4538 There are two cases: First is a compare against a constant and the
4539 second is a comparison of two items where the fields are at the same
4540 bit position relative to the start of a chunk (byte, halfword, word)
4541 large enough to contain it. In these cases we can avoid the shift
4542 implicit in bitfield extractions.
4544 For constants, we emit a compare of the shifted constant with the
4545 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4546 compared. For two fields at the same position, we do the ANDs with the
4547 similar mask and compare the result of the ANDs.
4549 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4550 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4551 are the left and right operands of the comparison, respectively.
4553 If the optimization described above can be done, we return the resulting
4554 tree. Otherwise we return zero. */
4557 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
4558 tree compare_type
, tree lhs
, tree rhs
)
4560 poly_int64 plbitpos
, plbitsize
, rbitpos
, rbitsize
;
4561 HOST_WIDE_INT lbitpos
, lbitsize
, nbitpos
, nbitsize
;
4562 tree type
= TREE_TYPE (lhs
);
4564 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
4565 machine_mode lmode
, rmode
;
4566 scalar_int_mode nmode
;
4567 int lunsignedp
, runsignedp
;
4568 int lreversep
, rreversep
;
4569 int lvolatilep
= 0, rvolatilep
= 0;
4570 tree linner
, rinner
= NULL_TREE
;
4574 /* Get all the information about the extractions being done. If the bit size
4575 is the same as the size of the underlying object, we aren't doing an
4576 extraction at all and so can do nothing. We also don't want to
4577 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4578 then will no longer be able to replace it. */
4579 linner
= get_inner_reference (lhs
, &plbitsize
, &plbitpos
, &offset
, &lmode
,
4580 &lunsignedp
, &lreversep
, &lvolatilep
);
4582 || !known_size_p (plbitsize
)
4583 || !plbitsize
.is_constant (&lbitsize
)
4584 || !plbitpos
.is_constant (&lbitpos
)
4585 || known_eq (lbitsize
, GET_MODE_BITSIZE (lmode
))
4587 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
4592 rreversep
= lreversep
;
4595 /* If this is not a constant, we can only do something if bit positions,
4596 sizes, signedness and storage order are the same. */
4598 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
4599 &runsignedp
, &rreversep
, &rvolatilep
);
4602 || maybe_ne (lbitpos
, rbitpos
)
4603 || maybe_ne (lbitsize
, rbitsize
)
4604 || lunsignedp
!= runsignedp
4605 || lreversep
!= rreversep
4607 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
4612 /* Honor the C++ memory model and mimic what RTL expansion does. */
4613 poly_uint64 bitstart
= 0;
4614 poly_uint64 bitend
= 0;
4615 if (TREE_CODE (lhs
) == COMPONENT_REF
)
4617 get_bit_range (&bitstart
, &bitend
, lhs
, &plbitpos
, &offset
);
4618 if (!plbitpos
.is_constant (&lbitpos
) || offset
!= NULL_TREE
)
4622 /* See if we can find a mode to refer to this field. We should be able to,
4623 but fail if we can't. */
4624 if (!get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
4625 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4626 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4627 TYPE_ALIGN (TREE_TYPE (rinner
))),
4628 BITS_PER_WORD
, false, &nmode
))
4631 /* Set signed and unsigned types of the precision of this mode for the
4633 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4635 /* Compute the bit position and size for the new reference and our offset
4636 within it. If the new reference is the same size as the original, we
4637 won't optimize anything, so return zero. */
4638 nbitsize
= GET_MODE_BITSIZE (nmode
);
4639 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4641 if (nbitsize
== lbitsize
)
4644 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4645 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4647 /* Make the mask to be used against the extracted field. */
4648 mask
= build_int_cst_type (unsigned_type
, -1);
4649 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4650 mask
= const_binop (RSHIFT_EXPR
, mask
,
4651 size_int (nbitsize
- lbitsize
- lbitpos
));
4658 /* If not comparing with constant, just rework the comparison
4660 tree t1
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4661 nbitsize
, nbitpos
, 1, lreversep
);
4662 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t1
, mask
);
4663 tree t2
= make_bit_field_ref (loc
, rinner
, rhs
, unsigned_type
,
4664 nbitsize
, nbitpos
, 1, rreversep
);
4665 t2
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t2
, mask
);
4666 return fold_build2_loc (loc
, code
, compare_type
, t1
, t2
);
4669 /* Otherwise, we are handling the constant case. See if the constant is too
4670 big for the field. Warn and return a tree for 0 (false) if so. We do
4671 this not only for its own sake, but to avoid having to test for this
4672 error case below. If we didn't, we might generate wrong code.
4674 For unsigned fields, the constant shifted right by the field length should
4675 be all zero. For signed fields, the high-order bits should agree with
4680 if (wi::lrshift (wi::to_wide (rhs
), lbitsize
) != 0)
4682 warning (0, "comparison is always %d due to width of bit-field",
4684 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4689 wide_int tem
= wi::arshift (wi::to_wide (rhs
), lbitsize
- 1);
4690 if (tem
!= 0 && tem
!= -1)
4692 warning (0, "comparison is always %d due to width of bit-field",
4694 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4701 /* Single-bit compares should always be against zero. */
4702 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4704 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4705 rhs
= build_int_cst (type
, 0);
4708 /* Make a new bitfield reference, shift the constant over the
4709 appropriate number of bits and mask it with the computed mask
4710 (in case this was a signed field). If we changed it, make a new one. */
4711 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4712 nbitsize
, nbitpos
, 1, lreversep
);
4714 rhs
= const_binop (BIT_AND_EXPR
,
4715 const_binop (LSHIFT_EXPR
,
4716 fold_convert_loc (loc
, unsigned_type
, rhs
),
4717 size_int (lbitpos
)),
4720 lhs
= build2_loc (loc
, code
, compare_type
,
4721 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4725 /* Subroutine for fold_truth_andor_1: decode a field reference.
4727 If EXP is a comparison reference, we return the innermost reference.
4729 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4730 set to the starting bit number.
4732 If the innermost field can be completely contained in a mode-sized
4733 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4735 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4736 otherwise it is not changed.
4738 *PUNSIGNEDP is set to the signedness of the field.
4740 *PREVERSEP is set to the storage order of the field.
4742 *PMASK is set to the mask used. This is either contained in a
4743 BIT_AND_EXPR or derived from the width of the field.
4745 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4747 Return 0 if this is not a component reference or is one that we can't
4748 do anything with. */
4751 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4752 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4753 int *punsignedp
, int *preversep
, int *pvolatilep
,
4754 tree
*pmask
, tree
*pand_mask
)
4757 tree outer_type
= 0;
4759 tree mask
, inner
, offset
;
4761 unsigned int precision
;
4763 /* All the optimizations using this function assume integer fields.
4764 There are problems with FP fields since the type_for_size call
4765 below can fail for, e.g., XFmode. */
4766 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4769 /* We are interested in the bare arrangement of bits, so strip everything
4770 that doesn't affect the machine mode. However, record the type of the
4771 outermost expression if it may matter below. */
4772 if (CONVERT_EXPR_P (exp
)
4773 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4774 outer_type
= TREE_TYPE (exp
);
4777 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4779 and_mask
= TREE_OPERAND (exp
, 1);
4780 exp
= TREE_OPERAND (exp
, 0);
4781 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4782 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4786 poly_int64 poly_bitsize
, poly_bitpos
;
4787 inner
= get_inner_reference (exp
, &poly_bitsize
, &poly_bitpos
, &offset
,
4788 pmode
, punsignedp
, preversep
, pvolatilep
);
4789 if ((inner
== exp
&& and_mask
== 0)
4790 || !poly_bitsize
.is_constant (pbitsize
)
4791 || !poly_bitpos
.is_constant (pbitpos
)
4794 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4795 /* Reject out-of-bound accesses (PR79731). */
4796 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4797 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4798 *pbitpos
+ *pbitsize
) < 0))
4801 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4802 if (unsigned_type
== NULL_TREE
)
4807 /* If the number of bits in the reference is the same as the bitsize of
4808 the outer type, then the outer type gives the signedness. Otherwise
4809 (in case of a small bitfield) the signedness is unchanged. */
4810 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4811 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4813 /* Compute the mask to access the bitfield. */
4814 precision
= TYPE_PRECISION (unsigned_type
);
4816 mask
= build_int_cst_type (unsigned_type
, -1);
4818 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4819 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4821 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4823 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4824 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4827 *pand_mask
= and_mask
;
4831 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4832 bit positions and MASK is SIGNED. */
4835 all_ones_mask_p (const_tree mask
, unsigned int size
)
4837 tree type
= TREE_TYPE (mask
);
4838 unsigned int precision
= TYPE_PRECISION (type
);
4840 /* If this function returns true when the type of the mask is
4841 UNSIGNED, then there will be errors. In particular see
4842 gcc.c-torture/execute/990326-1.c. There does not appear to be
4843 any documentation paper trail as to why this is so. But the pre
4844 wide-int worked with that restriction and it has been preserved
4846 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4849 return wi::mask (size
, false, precision
) == wi::to_wide (mask
);
4852 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4853 represents the sign bit of EXP's type. If EXP represents a sign
4854 or zero extension, also test VAL against the unextended type.
4855 The return value is the (sub)expression whose sign bit is VAL,
4856 or NULL_TREE otherwise. */
4859 sign_bit_p (tree exp
, const_tree val
)
4864 /* Tree EXP must have an integral type. */
4865 t
= TREE_TYPE (exp
);
4866 if (! INTEGRAL_TYPE_P (t
))
4869 /* Tree VAL must be an integer constant. */
4870 if (TREE_CODE (val
) != INTEGER_CST
4871 || TREE_OVERFLOW (val
))
4874 width
= TYPE_PRECISION (t
);
4875 if (wi::only_sign_bit_p (wi::to_wide (val
), width
))
4878 /* Handle extension from a narrower type. */
4879 if (TREE_CODE (exp
) == NOP_EXPR
4880 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4881 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4886 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4887 operand is simple enough to be evaluated unconditionally. */
4890 simple_operand_p (const_tree exp
)
4892 /* Strip any conversions that don't change the machine mode. */
4895 return (CONSTANT_CLASS_P (exp
)
4896 || TREE_CODE (exp
) == SSA_NAME
4898 && ! TREE_ADDRESSABLE (exp
)
4899 && ! TREE_THIS_VOLATILE (exp
)
4900 && ! DECL_NONLOCAL (exp
)
4901 /* Don't regard global variables as simple. They may be
4902 allocated in ways unknown to the compiler (shared memory,
4903 #pragma weak, etc). */
4904 && ! TREE_PUBLIC (exp
)
4905 && ! DECL_EXTERNAL (exp
)
4906 /* Weakrefs are not safe to be read, since they can be NULL.
4907 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4908 have DECL_WEAK flag set. */
4909 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4910 /* Loading a static variable is unduly expensive, but global
4911 registers aren't expensive. */
4912 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4915 /* Determine if an operand is simple enough to be evaluated unconditionally.
4916 In addition to simple_operand_p, we assume that comparisons, conversions,
4917 and logic-not operations are simple, if their operands are simple, too. */
4920 simple_condition_p (tree exp
)
4922 enum tree_code code
;
4924 if (TREE_SIDE_EFFECTS (exp
) || generic_expr_could_trap_p (exp
))
4927 while (CONVERT_EXPR_P (exp
))
4928 exp
= TREE_OPERAND (exp
, 0);
4930 code
= TREE_CODE (exp
);
4932 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4933 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4934 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4936 if (code
== TRUTH_NOT_EXPR
)
4937 return simple_condition_p (TREE_OPERAND (exp
, 0));
4939 return simple_operand_p (exp
);
4943 /* The following functions are subroutines to fold_range_test and allow it to
4944 try to change a logical combination of comparisons into a range test.
4947 X == 2 || X == 3 || X == 4 || X == 5
4951 (unsigned) (X - 2) <= 3
4953 We describe each set of comparisons as being either inside or outside
4954 a range, using a variable named like IN_P, and then describe the
4955 range with a lower and upper bound. If one of the bounds is omitted,
4956 it represents either the highest or lowest value of the type.
4958 In the comments below, we represent a range by two numbers in brackets
4959 preceded by a "+" to designate being inside that range, or a "-" to
4960 designate being outside that range, so the condition can be inverted by
4961 flipping the prefix. An omitted bound is represented by a "-". For
4962 example, "- [-, 10]" means being outside the range starting at the lowest
4963 possible value and ending at 10, in other words, being greater than 10.
4964 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4967 We set up things so that the missing bounds are handled in a consistent
4968 manner so neither a missing bound nor "true" and "false" need to be
4969 handled using a special case. */
4971 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4972 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4973 and UPPER1_P are nonzero if the respective argument is an upper bound
4974 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4975 must be specified for a comparison. ARG1 will be converted to ARG0's
4976 type if both are specified. */
4979 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4980 tree arg1
, int upper1_p
)
4986 /* If neither arg represents infinity, do the normal operation.
4987 Else, if not a comparison, return infinity. Else handle the special
4988 comparison rules. Note that most of the cases below won't occur, but
4989 are handled for consistency. */
4991 if (arg0
!= 0 && arg1
!= 0)
4993 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4994 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4996 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4999 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5002 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5003 for neither. In real maths, we cannot assume open ended ranges are
5004 the same. But, this is computer arithmetic, where numbers are finite.
5005 We can therefore make the transformation of any unbounded range with
5006 the value Z, Z being greater than any representable number. This permits
5007 us to treat unbounded ranges as equal. */
5008 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
5009 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
5013 result
= sgn0
== sgn1
;
5016 result
= sgn0
!= sgn1
;
5019 result
= sgn0
< sgn1
;
5022 result
= sgn0
<= sgn1
;
5025 result
= sgn0
> sgn1
;
5028 result
= sgn0
>= sgn1
;
5034 return constant_boolean_node (result
, type
);
5037 /* Helper routine for make_range. Perform one step for it, return
5038 new expression if the loop should continue or NULL_TREE if it should
5042 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
5043 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
5044 bool *strict_overflow_p
)
5046 tree arg0_type
= TREE_TYPE (arg0
);
5047 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
5048 int in_p
= *p_in_p
, n_in_p
;
5052 case TRUTH_NOT_EXPR
:
5053 /* We can only do something if the range is testing for zero. */
5054 if (low
== NULL_TREE
|| high
== NULL_TREE
5055 || ! integer_zerop (low
) || ! integer_zerop (high
))
5060 case EQ_EXPR
: case NE_EXPR
:
5061 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
5062 /* We can only do something if the range is testing for zero
5063 and if the second operand is an integer constant. Note that
5064 saying something is "in" the range we make is done by
5065 complementing IN_P since it will set in the initial case of
5066 being not equal to zero; "out" is leaving it alone. */
5067 if (low
== NULL_TREE
|| high
== NULL_TREE
5068 || ! integer_zerop (low
) || ! integer_zerop (high
)
5069 || TREE_CODE (arg1
) != INTEGER_CST
)
5074 case NE_EXPR
: /* - [c, c] */
5077 case EQ_EXPR
: /* + [c, c] */
5078 in_p
= ! in_p
, low
= high
= arg1
;
5080 case GT_EXPR
: /* - [-, c] */
5081 low
= 0, high
= arg1
;
5083 case GE_EXPR
: /* + [c, -] */
5084 in_p
= ! in_p
, low
= arg1
, high
= 0;
5086 case LT_EXPR
: /* - [c, -] */
5087 low
= arg1
, high
= 0;
5089 case LE_EXPR
: /* + [-, c] */
5090 in_p
= ! in_p
, low
= 0, high
= arg1
;
5096 /* If this is an unsigned comparison, we also know that EXP is
5097 greater than or equal to zero. We base the range tests we make
5098 on that fact, so we record it here so we can parse existing
5099 range tests. We test arg0_type since often the return type
5100 of, e.g. EQ_EXPR, is boolean. */
5101 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
5103 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
5105 build_int_cst (arg0_type
, 0),
5109 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
5111 /* If the high bound is missing, but we have a nonzero low
5112 bound, reverse the range so it goes from zero to the low bound
5114 if (high
== 0 && low
&& ! integer_zerop (low
))
5117 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
5118 build_int_cst (TREE_TYPE (low
), 1), 0);
5119 low
= build_int_cst (arg0_type
, 0);
5129 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5130 low and high are non-NULL, then normalize will DTRT. */
5131 if (!TYPE_UNSIGNED (arg0_type
)
5132 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5134 if (low
== NULL_TREE
)
5135 low
= TYPE_MIN_VALUE (arg0_type
);
5136 if (high
== NULL_TREE
)
5137 high
= TYPE_MAX_VALUE (arg0_type
);
5140 /* (-x) IN [a,b] -> x in [-b, -a] */
5141 n_low
= range_binop (MINUS_EXPR
, exp_type
,
5142 build_int_cst (exp_type
, 0),
5144 n_high
= range_binop (MINUS_EXPR
, exp_type
,
5145 build_int_cst (exp_type
, 0),
5147 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
5153 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
5154 build_int_cst (exp_type
, 1));
5158 if (TREE_CODE (arg1
) != INTEGER_CST
)
5161 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5162 move a constant to the other side. */
5163 if (!TYPE_UNSIGNED (arg0_type
)
5164 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5167 /* If EXP is signed, any overflow in the computation is undefined,
5168 so we don't worry about it so long as our computations on
5169 the bounds don't overflow. For unsigned, overflow is defined
5170 and this is exactly the right thing. */
5171 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5172 arg0_type
, low
, 0, arg1
, 0);
5173 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5174 arg0_type
, high
, 1, arg1
, 0);
5175 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
5176 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
5179 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5180 *strict_overflow_p
= true;
5183 /* Check for an unsigned range which has wrapped around the maximum
5184 value thus making n_high < n_low, and normalize it. */
5185 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
5187 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
5188 build_int_cst (TREE_TYPE (n_high
), 1), 0);
5189 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
5190 build_int_cst (TREE_TYPE (n_low
), 1), 0);
5192 /* If the range is of the form +/- [ x+1, x ], we won't
5193 be able to normalize it. But then, it represents the
5194 whole range or the empty set, so make it
5196 if (tree_int_cst_equal (n_low
, low
)
5197 && tree_int_cst_equal (n_high
, high
))
5203 low
= n_low
, high
= n_high
;
5211 case NON_LVALUE_EXPR
:
5212 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
5215 if (! INTEGRAL_TYPE_P (arg0_type
)
5216 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
5217 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
5220 n_low
= low
, n_high
= high
;
5223 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
5226 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
5228 /* If we're converting arg0 from an unsigned type, to exp,
5229 a signed type, we will be doing the comparison as unsigned.
5230 The tests above have already verified that LOW and HIGH
5233 So we have to ensure that we will handle large unsigned
5234 values the same way that the current signed bounds treat
5237 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
5241 /* For fixed-point modes, we need to pass the saturating flag
5242 as the 2nd parameter. */
5243 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
5245 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
5246 TYPE_SATURATING (arg0_type
));
5249 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
5251 /* A range without an upper bound is, naturally, unbounded.
5252 Since convert would have cropped a very large value, use
5253 the max value for the destination type. */
5255 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
5256 : TYPE_MAX_VALUE (arg0_type
);
5258 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
5259 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
5260 fold_convert_loc (loc
, arg0_type
,
5262 build_int_cst (arg0_type
, 1));
5264 /* If the low bound is specified, "and" the range with the
5265 range for which the original unsigned value will be
5269 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
5270 1, fold_convert_loc (loc
, arg0_type
,
5275 in_p
= (n_in_p
== in_p
);
5279 /* Otherwise, "or" the range with the range of the input
5280 that will be interpreted as negative. */
5281 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
5282 1, fold_convert_loc (loc
, arg0_type
,
5287 in_p
= (in_p
!= n_in_p
);
5291 /* Otherwise, if we are converting arg0 from signed type, to exp,
5292 an unsigned type, we will do the comparison as signed. If
5293 high is non-NULL, we punt above if it doesn't fit in the signed
5294 type, so if we get through here, +[-, high] or +[low, high] are
5295 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5296 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5297 high is not, the +[low, -] range is equivalent to union of
5298 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5299 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5300 low being 0, which should be treated as [-, -]. */
5301 else if (TYPE_UNSIGNED (exp_type
)
5302 && !TYPE_UNSIGNED (arg0_type
)
5306 if (integer_zerop (low
))
5310 n_high
= fold_build2_loc (loc
, PLUS_EXPR
, arg0_type
,
5311 n_low
, build_int_cst (arg0_type
, -1));
5312 n_low
= build_zero_cst (arg0_type
);
5327 /* Given EXP, a logical expression, set the range it is testing into
5328 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5329 actually being tested. *PLOW and *PHIGH will be made of the same
5330 type as the returned expression. If EXP is not a comparison, we
5331 will most likely not be returning a useful value and range. Set
5332 *STRICT_OVERFLOW_P to true if the return value is only valid
5333 because signed overflow is undefined; otherwise, do not change
5334 *STRICT_OVERFLOW_P. */
5337 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
5338 bool *strict_overflow_p
)
5340 enum tree_code code
;
5341 tree arg0
, arg1
= NULL_TREE
;
5342 tree exp_type
, nexp
;
5345 location_t loc
= EXPR_LOCATION (exp
);
5347 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5348 and see if we can refine the range. Some of the cases below may not
5349 happen, but it doesn't seem worth worrying about this. We "continue"
5350 the outer loop when we've changed something; otherwise we "break"
5351 the switch, which will "break" the while. */
5354 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
5358 code
= TREE_CODE (exp
);
5359 exp_type
= TREE_TYPE (exp
);
5362 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
5364 if (TREE_OPERAND_LENGTH (exp
) > 0)
5365 arg0
= TREE_OPERAND (exp
, 0);
5366 if (TREE_CODE_CLASS (code
) == tcc_binary
5367 || TREE_CODE_CLASS (code
) == tcc_comparison
5368 || (TREE_CODE_CLASS (code
) == tcc_expression
5369 && TREE_OPERAND_LENGTH (exp
) > 1))
5370 arg1
= TREE_OPERAND (exp
, 1);
5372 if (arg0
== NULL_TREE
)
5375 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
5376 &high
, &in_p
, strict_overflow_p
);
5377 if (nexp
== NULL_TREE
)
5382 /* If EXP is a constant, we can evaluate whether this is true or false. */
5383 if (TREE_CODE (exp
) == INTEGER_CST
)
5385 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
5387 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5393 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5397 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5398 a bitwise check i.e. when
5399 LOW == 0xXX...X00...0
5400 HIGH == 0xXX...X11...1
5401 Return corresponding mask in MASK and stem in VALUE. */
5404 maskable_range_p (const_tree low
, const_tree high
, tree type
, tree
*mask
,
5407 if (TREE_CODE (low
) != INTEGER_CST
5408 || TREE_CODE (high
) != INTEGER_CST
)
5411 unsigned prec
= TYPE_PRECISION (type
);
5412 wide_int lo
= wi::to_wide (low
, prec
);
5413 wide_int hi
= wi::to_wide (high
, prec
);
5415 wide_int end_mask
= lo
^ hi
;
5416 if ((end_mask
& (end_mask
+ 1)) != 0
5417 || (lo
& end_mask
) != 0)
5420 wide_int stem_mask
= ~end_mask
;
5421 wide_int stem
= lo
& stem_mask
;
5422 if (stem
!= (hi
& stem_mask
))
5425 *mask
= wide_int_to_tree (type
, stem_mask
);
5426 *value
= wide_int_to_tree (type
, stem
);
5431 /* Helper routine for build_range_check and match.pd. Return the type to
5432 perform the check or NULL if it shouldn't be optimized. */
5435 range_check_type (tree etype
)
5437 /* First make sure that arithmetics in this type is valid, then make sure
5438 that it wraps around. */
5439 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
5440 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
), 1);
5442 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_UNSIGNED (etype
))
5444 tree utype
, minv
, maxv
;
5446 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5447 for the type in question, as we rely on this here. */
5448 utype
= unsigned_type_for (etype
);
5449 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
5450 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
5451 build_int_cst (TREE_TYPE (maxv
), 1), 1);
5452 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
5454 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
5460 else if (POINTER_TYPE_P (etype
) || TREE_CODE (etype
) == OFFSET_TYPE
)
5461 etype
= unsigned_type_for (etype
);
5465 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5466 type, TYPE, return an expression to test if EXP is in (or out of, depending
5467 on IN_P) the range. Return 0 if the test couldn't be created. */
5470 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
5471 tree low
, tree high
)
5473 tree etype
= TREE_TYPE (exp
), mask
, value
;
5475 /* Disable this optimization for function pointer expressions
5476 on targets that require function pointer canonicalization. */
5477 if (targetm
.have_canonicalize_funcptr_for_compare ()
5478 && POINTER_TYPE_P (etype
)
5479 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype
)))
5484 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
5486 return invert_truthvalue_loc (loc
, value
);
5491 if (low
== 0 && high
== 0)
5492 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
5495 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
5496 fold_convert_loc (loc
, etype
, high
));
5499 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
5500 fold_convert_loc (loc
, etype
, low
));
5502 if (operand_equal_p (low
, high
, 0))
5503 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
5504 fold_convert_loc (loc
, etype
, low
));
5506 if (TREE_CODE (exp
) == BIT_AND_EXPR
5507 && maskable_range_p (low
, high
, etype
, &mask
, &value
))
5508 return fold_build2_loc (loc
, EQ_EXPR
, type
,
5509 fold_build2_loc (loc
, BIT_AND_EXPR
, etype
,
5513 if (integer_zerop (low
))
5515 if (! TYPE_UNSIGNED (etype
))
5517 etype
= unsigned_type_for (etype
);
5518 high
= fold_convert_loc (loc
, etype
, high
);
5519 exp
= fold_convert_loc (loc
, etype
, exp
);
5521 return build_range_check (loc
, type
, exp
, 1, 0, high
);
5524 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5525 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
5527 int prec
= TYPE_PRECISION (etype
);
5529 if (wi::mask
<widest_int
> (prec
- 1, false) == wi::to_widest (high
))
5531 if (TYPE_UNSIGNED (etype
))
5533 tree signed_etype
= signed_type_for (etype
);
5534 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
5536 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
5538 etype
= signed_etype
;
5539 exp
= fold_convert_loc (loc
, etype
, exp
);
5541 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
5542 build_int_cst (etype
, 0));
5546 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5547 This requires wrap-around arithmetics for the type of the expression. */
5548 etype
= range_check_type (etype
);
5549 if (etype
== NULL_TREE
)
5552 high
= fold_convert_loc (loc
, etype
, high
);
5553 low
= fold_convert_loc (loc
, etype
, low
);
5554 exp
= fold_convert_loc (loc
, etype
, exp
);
5556 value
= const_binop (MINUS_EXPR
, high
, low
);
5558 if (value
!= 0 && !TREE_OVERFLOW (value
))
5559 return build_range_check (loc
, type
,
5560 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
5561 1, build_int_cst (etype
, 0), value
);
5566 /* Return the predecessor of VAL in its type, handling the infinite case. */
5569 range_predecessor (tree val
)
5571 tree type
= TREE_TYPE (val
);
5573 if (INTEGRAL_TYPE_P (type
)
5574 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
5577 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
5578 build_int_cst (TREE_TYPE (val
), 1), 0);
5581 /* Return the successor of VAL in its type, handling the infinite case. */
5584 range_successor (tree val
)
5586 tree type
= TREE_TYPE (val
);
5588 if (INTEGRAL_TYPE_P (type
)
5589 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
5592 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
5593 build_int_cst (TREE_TYPE (val
), 1), 0);
5596 /* Given two ranges, see if we can merge them into one. Return 1 if we
5597 can, 0 if we can't. Set the output range into the specified parameters. */
5600 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
5601 tree high0
, int in1_p
, tree low1
, tree high1
)
5609 int lowequal
= ((low0
== 0 && low1
== 0)
5610 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5611 low0
, 0, low1
, 0)));
5612 int highequal
= ((high0
== 0 && high1
== 0)
5613 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5614 high0
, 1, high1
, 1)));
5616 /* Make range 0 be the range that starts first, or ends last if they
5617 start at the same value. Swap them if it isn't. */
5618 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5621 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5622 high1
, 1, high0
, 1))))
5624 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
5625 tem
= low0
, low0
= low1
, low1
= tem
;
5626 tem
= high0
, high0
= high1
, high1
= tem
;
5629 /* If the second range is != high1 where high1 is the type maximum of
5630 the type, try first merging with < high1 range. */
5633 && TREE_CODE (low1
) == INTEGER_CST
5634 && (TREE_CODE (TREE_TYPE (low1
)) == INTEGER_TYPE
5635 || (TREE_CODE (TREE_TYPE (low1
)) == ENUMERAL_TYPE
5636 && known_eq (TYPE_PRECISION (TREE_TYPE (low1
)),
5637 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1
))))))
5638 && operand_equal_p (low1
, high1
, 0))
5640 if (tree_int_cst_equal (low1
, TYPE_MAX_VALUE (TREE_TYPE (low1
)))
5641 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5642 !in1_p
, NULL_TREE
, range_predecessor (low1
)))
5644 /* Similarly for the second range != low1 where low1 is the type minimum
5645 of the type, try first merging with > low1 range. */
5646 if (tree_int_cst_equal (low1
, TYPE_MIN_VALUE (TREE_TYPE (low1
)))
5647 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5648 !in1_p
, range_successor (low1
), NULL_TREE
))
5652 /* Now flag two cases, whether the ranges are disjoint or whether the
5653 second range is totally subsumed in the first. Note that the tests
5654 below are simplified by the ones above. */
5655 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
5656 high0
, 1, low1
, 0));
5657 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5658 high1
, 1, high0
, 1));
5660 /* We now have four cases, depending on whether we are including or
5661 excluding the two ranges. */
5664 /* If they don't overlap, the result is false. If the second range
5665 is a subset it is the result. Otherwise, the range is from the start
5666 of the second to the end of the first. */
5668 in_p
= 0, low
= high
= 0;
5670 in_p
= 1, low
= low1
, high
= high1
;
5672 in_p
= 1, low
= low1
, high
= high0
;
5675 else if (in0_p
&& ! in1_p
)
5677 /* If they don't overlap, the result is the first range. If they are
5678 equal, the result is false. If the second range is a subset of the
5679 first, and the ranges begin at the same place, we go from just after
5680 the end of the second range to the end of the first. If the second
5681 range is not a subset of the first, or if it is a subset and both
5682 ranges end at the same place, the range starts at the start of the
5683 first range and ends just before the second range.
5684 Otherwise, we can't describe this as a single range. */
5686 in_p
= 1, low
= low0
, high
= high0
;
5687 else if (lowequal
&& highequal
)
5688 in_p
= 0, low
= high
= 0;
5689 else if (subset
&& lowequal
)
5691 low
= range_successor (high1
);
5696 /* We are in the weird situation where high0 > high1 but
5697 high1 has no successor. Punt. */
5701 else if (! subset
|| highequal
)
5704 high
= range_predecessor (low1
);
5708 /* low0 < low1 but low1 has no predecessor. Punt. */
5716 else if (! in0_p
&& in1_p
)
5718 /* If they don't overlap, the result is the second range. If the second
5719 is a subset of the first, the result is false. Otherwise,
5720 the range starts just after the first range and ends at the
5721 end of the second. */
5723 in_p
= 1, low
= low1
, high
= high1
;
5724 else if (subset
|| highequal
)
5725 in_p
= 0, low
= high
= 0;
5728 low
= range_successor (high0
);
5733 /* high1 > high0 but high0 has no successor. Punt. */
5741 /* The case where we are excluding both ranges. Here the complex case
5742 is if they don't overlap. In that case, the only time we have a
5743 range is if they are adjacent. If the second is a subset of the
5744 first, the result is the first. Otherwise, the range to exclude
5745 starts at the beginning of the first range and ends at the end of the
5749 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5750 range_successor (high0
),
5752 in_p
= 0, low
= low0
, high
= high1
;
5755 /* Canonicalize - [min, x] into - [-, x]. */
5756 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5757 switch (TREE_CODE (TREE_TYPE (low0
)))
5760 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0
)),
5762 (TYPE_MODE (TREE_TYPE (low0
)))))
5766 if (tree_int_cst_equal (low0
,
5767 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5771 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5772 && integer_zerop (low0
))
5779 /* Canonicalize - [x, max] into - [x, -]. */
5780 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5781 switch (TREE_CODE (TREE_TYPE (high1
)))
5784 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1
)),
5786 (TYPE_MODE (TREE_TYPE (high1
)))))
5790 if (tree_int_cst_equal (high1
,
5791 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5795 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5796 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5798 build_int_cst (TREE_TYPE (high1
), 1),
5806 /* The ranges might be also adjacent between the maximum and
5807 minimum values of the given type. For
5808 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5809 return + [x + 1, y - 1]. */
5810 if (low0
== 0 && high1
== 0)
5812 low
= range_successor (high0
);
5813 high
= range_predecessor (low1
);
5814 if (low
== 0 || high
== 0)
5824 in_p
= 0, low
= low0
, high
= high0
;
5826 in_p
= 0, low
= low0
, high
= high1
;
5829 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5834 /* Subroutine of fold, looking inside expressions of the form
5835 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5836 are the three operands of the COND_EXPR. This function is
5837 being used also to optimize A op B ? C : A, by reversing the
5840 Return a folded expression whose code is not a COND_EXPR
5841 anymore, or NULL_TREE if no folding opportunity is found. */
5844 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5845 enum tree_code comp_code
,
5846 tree arg00
, tree arg01
, tree arg1
, tree arg2
)
5848 tree arg1_type
= TREE_TYPE (arg1
);
5854 /* If we have A op 0 ? A : -A, consider applying the following
5857 A == 0? A : -A same as -A
5858 A != 0? A : -A same as A
5859 A >= 0? A : -A same as abs (A)
5860 A > 0? A : -A same as abs (A)
5861 A <= 0? A : -A same as -abs (A)
5862 A < 0? A : -A same as -abs (A)
5864 None of these transformations work for modes with signed
5865 zeros. If A is +/-0, the first two transformations will
5866 change the sign of the result (from +0 to -0, or vice
5867 versa). The last four will fix the sign of the result,
5868 even though the original expressions could be positive or
5869 negative, depending on the sign of A.
5871 Note that all these transformations are correct if A is
5872 NaN, since the two alternatives (A and -A) are also NaNs. */
5873 if (!HONOR_SIGNED_ZEROS (type
)
5874 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5875 ? real_zerop (arg01
)
5876 : integer_zerop (arg01
))
5877 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5878 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5879 /* In the case that A is of the form X-Y, '-A' (arg2) may
5880 have already been folded to Y-X, check for that. */
5881 || (TREE_CODE (arg1
) == MINUS_EXPR
5882 && TREE_CODE (arg2
) == MINUS_EXPR
5883 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5884 TREE_OPERAND (arg2
, 1), 0)
5885 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5886 TREE_OPERAND (arg2
, 0), 0))))
5891 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5892 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5895 return fold_convert_loc (loc
, type
, arg1
);
5898 if (flag_trapping_math
)
5903 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5905 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5906 return fold_convert_loc (loc
, type
, tem
);
5909 if (flag_trapping_math
)
5914 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5916 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
5917 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
5919 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5920 is not, invokes UB both in abs and in the negation of it.
5921 So, use ABSU_EXPR instead. */
5922 tree utype
= unsigned_type_for (TREE_TYPE (arg1
));
5923 tem
= fold_build1_loc (loc
, ABSU_EXPR
, utype
, arg1
);
5924 tem
= negate_expr (tem
);
5925 return fold_convert_loc (loc
, type
, tem
);
5929 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5930 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5933 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5937 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5938 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5939 both transformations are correct when A is NaN: A != 0
5940 is then true, and A == 0 is false. */
5942 if (!HONOR_SIGNED_ZEROS (type
)
5943 && integer_zerop (arg01
) && integer_zerop (arg2
))
5945 if (comp_code
== NE_EXPR
)
5946 return fold_convert_loc (loc
, type
, arg1
);
5947 else if (comp_code
== EQ_EXPR
)
5948 return build_zero_cst (type
);
5951 /* Try some transformations of A op B ? A : B.
5953 A == B? A : B same as B
5954 A != B? A : B same as A
5955 A >= B? A : B same as max (A, B)
5956 A > B? A : B same as max (B, A)
5957 A <= B? A : B same as min (A, B)
5958 A < B? A : B same as min (B, A)
5960 As above, these transformations don't work in the presence
5961 of signed zeros. For example, if A and B are zeros of
5962 opposite sign, the first two transformations will change
5963 the sign of the result. In the last four, the original
5964 expressions give different results for (A=+0, B=-0) and
5965 (A=-0, B=+0), but the transformed expressions do not.
5967 The first two transformations are correct if either A or B
5968 is a NaN. In the first transformation, the condition will
5969 be false, and B will indeed be chosen. In the case of the
5970 second transformation, the condition A != B will be true,
5971 and A will be chosen.
5973 The conversions to max() and min() are not correct if B is
5974 a number and A is not. The conditions in the original
5975 expressions will be false, so all four give B. The min()
5976 and max() versions would give a NaN instead. */
5977 if (!HONOR_SIGNED_ZEROS (type
)
5978 && operand_equal_for_comparison_p (arg01
, arg2
)
5979 /* Avoid these transformations if the COND_EXPR may be used
5980 as an lvalue in the C++ front-end. PR c++/19199. */
5982 || VECTOR_TYPE_P (type
)
5983 || (! lang_GNU_CXX ()
5984 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5985 || ! maybe_lvalue_p (arg1
)
5986 || ! maybe_lvalue_p (arg2
)))
5988 tree comp_op0
= arg00
;
5989 tree comp_op1
= arg01
;
5990 tree comp_type
= TREE_TYPE (comp_op0
);
5995 return fold_convert_loc (loc
, type
, arg2
);
5997 return fold_convert_loc (loc
, type
, arg1
);
6002 /* In C++ a ?: expression can be an lvalue, so put the
6003 operand which will be used if they are equal first
6004 so that we can convert this back to the
6005 corresponding COND_EXPR. */
6006 if (!HONOR_NANS (arg1
))
6008 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
6009 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
6010 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
6011 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
6012 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
6013 comp_op1
, comp_op0
);
6014 return fold_convert_loc (loc
, type
, tem
);
6021 if (!HONOR_NANS (arg1
))
6023 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
6024 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
6025 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
6026 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
6027 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
6028 comp_op1
, comp_op0
);
6029 return fold_convert_loc (loc
, type
, tem
);
6033 if (!HONOR_NANS (arg1
))
6034 return fold_convert_loc (loc
, type
, arg2
);
6037 if (!HONOR_NANS (arg1
))
6038 return fold_convert_loc (loc
, type
, arg1
);
6041 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
6051 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6052 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6053 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6057 /* EXP is some logical combination of boolean tests. See if we can
6058 merge it into some range test. Return the new tree if so. */
6061 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
6064 int or_op
= (code
== TRUTH_ORIF_EXPR
6065 || code
== TRUTH_OR_EXPR
);
6066 int in0_p
, in1_p
, in_p
;
6067 tree low0
, low1
, low
, high0
, high1
, high
;
6068 bool strict_overflow_p
= false;
6070 const char * const warnmsg
= G_("assuming signed overflow does not occur "
6071 "when simplifying range test");
6073 if (!INTEGRAL_TYPE_P (type
))
6076 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
6077 /* If op0 is known true or false and this is a short-circuiting
6078 operation we must not merge with op1 since that makes side-effects
6079 unconditional. So special-case this. */
6081 && ((code
== TRUTH_ORIF_EXPR
&& in0_p
)
6082 || (code
== TRUTH_ANDIF_EXPR
&& !in0_p
)))
6084 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
6086 /* If this is an OR operation, invert both sides; we will invert
6087 again at the end. */
6089 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
6091 /* If both expressions are the same, if we can merge the ranges, and we
6092 can build the range test, return it or it inverted. If one of the
6093 ranges is always true or always false, consider it to be the same
6094 expression as the other. */
6095 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
6096 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
6098 && (tem
= (build_range_check (loc
, type
,
6100 : rhs
!= 0 ? rhs
: integer_zero_node
,
6101 in_p
, low
, high
))) != 0)
6103 if (strict_overflow_p
)
6104 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
6105 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
6108 /* On machines where the branch cost is expensive, if this is a
6109 short-circuited branch and the underlying object on both sides
6110 is the same, make a non-short-circuit operation. */
6111 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
6112 if (param_logical_op_non_short_circuit
!= -1)
6113 logical_op_non_short_circuit
6114 = param_logical_op_non_short_circuit
;
6115 if (logical_op_non_short_circuit
6116 && !sanitize_coverage_p ()
6117 && lhs
!= 0 && rhs
!= 0
6118 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6119 && operand_equal_p (lhs
, rhs
, 0))
6121 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6122 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6123 which cases we can't do this. */
6124 if (simple_operand_p (lhs
))
6125 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6126 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6129 else if (!lang_hooks
.decls
.global_bindings_p ()
6130 && !CONTAINS_PLACEHOLDER_P (lhs
))
6132 tree common
= save_expr (lhs
);
6134 if ((lhs
= build_range_check (loc
, type
, common
,
6135 or_op
? ! in0_p
: in0_p
,
6137 && (rhs
= build_range_check (loc
, type
, common
,
6138 or_op
? ! in1_p
: in1_p
,
6141 if (strict_overflow_p
)
6142 fold_overflow_warning (warnmsg
,
6143 WARN_STRICT_OVERFLOW_COMPARISON
);
6144 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6145 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6154 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6155 bit value. Arrange things so the extra bits will be set to zero if and
6156 only if C is signed-extended to its full width. If MASK is nonzero,
6157 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6160 unextend (tree c
, int p
, int unsignedp
, tree mask
)
6162 tree type
= TREE_TYPE (c
);
6163 int modesize
= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type
));
6166 if (p
== modesize
|| unsignedp
)
6169 /* We work by getting just the sign bit into the low-order bit, then
6170 into the high-order bit, then sign-extend. We then XOR that value
6172 temp
= build_int_cst (TREE_TYPE (c
),
6173 wi::extract_uhwi (wi::to_wide (c
), p
- 1, 1));
6175 /* We must use a signed type in order to get an arithmetic right shift.
6176 However, we must also avoid introducing accidental overflows, so that
6177 a subsequent call to integer_zerop will work. Hence we must
6178 do the type conversion here. At this point, the constant is either
6179 zero or one, and the conversion to a signed type can never overflow.
6180 We could get an overflow if this conversion is done anywhere else. */
6181 if (TYPE_UNSIGNED (type
))
6182 temp
= fold_convert (signed_type_for (type
), temp
);
6184 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
6185 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
6187 temp
= const_binop (BIT_AND_EXPR
, temp
,
6188 fold_convert (TREE_TYPE (c
), mask
));
6189 /* If necessary, convert the type back to match the type of C. */
6190 if (TYPE_UNSIGNED (type
))
6191 temp
= fold_convert (type
, temp
);
6193 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
6196 /* For an expression that has the form
6200 we can drop one of the inner expressions and simplify to
6204 LOC is the location of the resulting expression. OP is the inner
6205 logical operation; the left-hand side in the examples above, while CMPOP
6206 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6207 removing a condition that guards another, as in
6208 (A != NULL && A->...) || A == NULL
6209 which we must not transform. If RHS_ONLY is true, only eliminate the
6210 right-most operand of the inner logical operation. */
6213 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
6216 tree type
= TREE_TYPE (cmpop
);
6217 enum tree_code code
= TREE_CODE (cmpop
);
6218 enum tree_code truthop_code
= TREE_CODE (op
);
6219 tree lhs
= TREE_OPERAND (op
, 0);
6220 tree rhs
= TREE_OPERAND (op
, 1);
6221 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
6222 enum tree_code rhs_code
= TREE_CODE (rhs
);
6223 enum tree_code lhs_code
= TREE_CODE (lhs
);
6224 enum tree_code inv_code
;
6226 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
6229 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
6232 if (rhs_code
== truthop_code
)
6234 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
6235 if (newrhs
!= NULL_TREE
)
6238 rhs_code
= TREE_CODE (rhs
);
6241 if (lhs_code
== truthop_code
&& !rhs_only
)
6243 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
6244 if (newlhs
!= NULL_TREE
)
6247 lhs_code
= TREE_CODE (lhs
);
6251 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
6252 if (inv_code
== rhs_code
6253 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6254 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6256 if (!rhs_only
&& inv_code
== lhs_code
6257 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6258 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6260 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
6261 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
6266 /* Find ways of folding logical expressions of LHS and RHS:
6267 Try to merge two comparisons to the same innermost item.
6268 Look for range tests like "ch >= '0' && ch <= '9'".
6269 Look for combinations of simple terms on machines with expensive branches
6270 and evaluate the RHS unconditionally.
6272 For example, if we have p->a == 2 && p->b == 4 and we can make an
6273 object large enough to span both A and B, we can do this with a comparison
6274 against the object ANDed with the a mask.
6276 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6277 operations to do this with one comparison.
6279 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6280 function and the one above.
6282 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6283 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6285 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6288 We return the simplified tree or 0 if no optimization is possible. */
6291 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
6294 /* If this is the "or" of two comparisons, we can do something if
6295 the comparisons are NE_EXPR. If this is the "and", we can do something
6296 if the comparisons are EQ_EXPR. I.e.,
6297 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6299 WANTED_CODE is this operation code. For single bit fields, we can
6300 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6301 comparison for one-bit fields. */
6303 enum tree_code wanted_code
;
6304 enum tree_code lcode
, rcode
;
6305 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
6306 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
6307 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
6308 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
6309 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
6310 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
6311 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
6312 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
6313 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
6314 scalar_int_mode lnmode
, rnmode
;
6315 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
6316 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
6317 tree l_const
, r_const
;
6318 tree lntype
, rntype
, result
;
6319 HOST_WIDE_INT first_bit
, end_bit
;
6322 /* Start by getting the comparison codes. Fail if anything is volatile.
6323 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6324 it were surrounded with a NE_EXPR. */
6326 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
6329 lcode
= TREE_CODE (lhs
);
6330 rcode
= TREE_CODE (rhs
);
6332 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
6334 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
6335 build_int_cst (TREE_TYPE (lhs
), 0));
6339 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
6341 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
6342 build_int_cst (TREE_TYPE (rhs
), 0));
6346 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
6347 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
6350 ll_arg
= TREE_OPERAND (lhs
, 0);
6351 lr_arg
= TREE_OPERAND (lhs
, 1);
6352 rl_arg
= TREE_OPERAND (rhs
, 0);
6353 rr_arg
= TREE_OPERAND (rhs
, 1);
6355 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6356 if (simple_operand_p (ll_arg
)
6357 && simple_operand_p (lr_arg
))
6359 if (operand_equal_p (ll_arg
, rl_arg
, 0)
6360 && operand_equal_p (lr_arg
, rr_arg
, 0))
6362 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
6363 truth_type
, ll_arg
, lr_arg
);
6367 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
6368 && operand_equal_p (lr_arg
, rl_arg
, 0))
6370 result
= combine_comparisons (loc
, code
, lcode
,
6371 swap_tree_comparison (rcode
),
6372 truth_type
, ll_arg
, lr_arg
);
6378 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
6379 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
6381 /* If the RHS can be evaluated unconditionally and its operands are
6382 simple, it wins to evaluate the RHS unconditionally on machines
6383 with expensive branches. In this case, this isn't a comparison
6384 that can be merged. */
6386 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
6388 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
6389 && simple_operand_p (rl_arg
)
6390 && simple_operand_p (rr_arg
))
6392 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6393 if (code
== TRUTH_OR_EXPR
6394 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
6395 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
6396 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6397 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6398 return build2_loc (loc
, NE_EXPR
, truth_type
,
6399 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6401 build_int_cst (TREE_TYPE (ll_arg
), 0));
6403 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6404 if (code
== TRUTH_AND_EXPR
6405 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
6406 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
6407 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6408 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6409 return build2_loc (loc
, EQ_EXPR
, truth_type
,
6410 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6412 build_int_cst (TREE_TYPE (ll_arg
), 0));
6415 /* See if the comparisons can be merged. Then get all the parameters for
6418 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
6419 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
6422 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
6424 ll_inner
= decode_field_reference (loc
, &ll_arg
,
6425 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
6426 &ll_unsignedp
, &ll_reversep
, &volatilep
,
6427 &ll_mask
, &ll_and_mask
);
6428 lr_inner
= decode_field_reference (loc
, &lr_arg
,
6429 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
6430 &lr_unsignedp
, &lr_reversep
, &volatilep
,
6431 &lr_mask
, &lr_and_mask
);
6432 rl_inner
= decode_field_reference (loc
, &rl_arg
,
6433 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
6434 &rl_unsignedp
, &rl_reversep
, &volatilep
,
6435 &rl_mask
, &rl_and_mask
);
6436 rr_inner
= decode_field_reference (loc
, &rr_arg
,
6437 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
6438 &rr_unsignedp
, &rr_reversep
, &volatilep
,
6439 &rr_mask
, &rr_and_mask
);
6441 /* It must be true that the inner operation on the lhs of each
6442 comparison must be the same if we are to be able to do anything.
6443 Then see if we have constants. If not, the same must be true for
6446 || ll_reversep
!= rl_reversep
6447 || ll_inner
== 0 || rl_inner
== 0
6448 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
6451 if (TREE_CODE (lr_arg
) == INTEGER_CST
6452 && TREE_CODE (rr_arg
) == INTEGER_CST
)
6454 l_const
= lr_arg
, r_const
= rr_arg
;
6455 lr_reversep
= ll_reversep
;
6457 else if (lr_reversep
!= rr_reversep
6458 || lr_inner
== 0 || rr_inner
== 0
6459 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
6462 l_const
= r_const
= 0;
6464 /* If either comparison code is not correct for our logical operation,
6465 fail. However, we can convert a one-bit comparison against zero into
6466 the opposite comparison against that bit being set in the field. */
6468 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
6469 if (lcode
!= wanted_code
)
6471 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
6473 /* Make the left operand unsigned, since we are only interested
6474 in the value of one bit. Otherwise we are doing the wrong
6483 /* This is analogous to the code for l_const above. */
6484 if (rcode
!= wanted_code
)
6486 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
6495 /* See if we can find a mode that contains both fields being compared on
6496 the left. If we can't, fail. Otherwise, update all constants and masks
6497 to be relative to a field of that size. */
6498 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
6499 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
6500 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6501 TYPE_ALIGN (TREE_TYPE (ll_inner
)), BITS_PER_WORD
,
6502 volatilep
, &lnmode
))
6505 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
6506 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
6507 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
6508 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
6510 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6512 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
6513 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
6516 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
6517 size_int (xll_bitpos
));
6518 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
6519 size_int (xrl_bitpos
));
6520 if (ll_mask
== NULL_TREE
|| rl_mask
== NULL_TREE
)
6525 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
6526 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
6527 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
6528 if (l_const
== NULL_TREE
)
6530 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
6531 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6534 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6536 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6541 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
6542 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
6543 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
6544 if (r_const
== NULL_TREE
)
6546 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
6547 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6550 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6552 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6556 /* If the right sides are not constant, do the same for it. Also,
6557 disallow this optimization if a size, signedness or storage order
6558 mismatch occurs between the left and right sides. */
6561 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
6562 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
6563 || ll_reversep
!= lr_reversep
6564 /* Make sure the two fields on the right
6565 correspond to the left without being swapped. */
6566 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
6569 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
6570 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
6571 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6572 TYPE_ALIGN (TREE_TYPE (lr_inner
)), BITS_PER_WORD
,
6573 volatilep
, &rnmode
))
6576 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
6577 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
6578 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
6579 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
6581 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6583 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
6584 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
6587 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6589 size_int (xlr_bitpos
));
6590 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6592 size_int (xrr_bitpos
));
6593 if (lr_mask
== NULL_TREE
|| rr_mask
== NULL_TREE
)
6596 /* Make a mask that corresponds to both fields being compared.
6597 Do this for both items being compared. If the operands are the
6598 same size and the bits being compared are in the same position
6599 then we can do this by masking both and comparing the masked
6601 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6602 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
6603 if (lnbitsize
== rnbitsize
6604 && xll_bitpos
== xlr_bitpos
6608 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6609 lntype
, lnbitsize
, lnbitpos
,
6610 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6611 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6612 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
6614 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
6615 rntype
, rnbitsize
, rnbitpos
,
6616 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
6617 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
6618 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
6620 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6623 /* There is still another way we can do something: If both pairs of
6624 fields being compared are adjacent, we may be able to make a wider
6625 field containing them both.
6627 Note that we still must mask the lhs/rhs expressions. Furthermore,
6628 the mask must be shifted to account for the shift done by
6629 make_bit_field_ref. */
6630 if (((ll_bitsize
+ ll_bitpos
== rl_bitpos
6631 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
6632 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
6633 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
6641 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
6642 ll_bitsize
+ rl_bitsize
,
6643 MIN (ll_bitpos
, rl_bitpos
),
6644 ll_unsignedp
, ll_reversep
);
6645 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
6646 lr_bitsize
+ rr_bitsize
,
6647 MIN (lr_bitpos
, rr_bitpos
),
6648 lr_unsignedp
, lr_reversep
);
6650 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
6651 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
6652 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
6653 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
6654 if (ll_mask
== NULL_TREE
|| lr_mask
== NULL_TREE
)
6657 /* Convert to the smaller type before masking out unwanted bits. */
6659 if (lntype
!= rntype
)
6661 if (lnbitsize
> rnbitsize
)
6663 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
6664 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
6667 else if (lnbitsize
< rnbitsize
)
6669 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
6670 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
6675 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
6676 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
6678 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
6679 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
6681 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6687 /* Handle the case of comparisons with constants. If there is something in
6688 common between the masks, those bits of the constants must be the same.
6689 If not, the condition is always false. Test for this to avoid generating
6690 incorrect code below. */
6691 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
6692 if (! integer_zerop (result
)
6693 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
6694 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
6696 if (wanted_code
== NE_EXPR
)
6698 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6699 return constant_boolean_node (true, truth_type
);
6703 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6704 return constant_boolean_node (false, truth_type
);
6711 /* Construct the expression we will return. First get the component
6712 reference we will make. Unless the mask is all ones the width of
6713 that field, perform the mask operation. Then compare with the
6715 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6716 lntype
, lnbitsize
, lnbitpos
,
6717 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6719 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6720 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6721 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6723 return build2_loc (loc
, wanted_code
, truth_type
, result
,
6724 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
6727 /* T is an integer expression that is being multiplied, divided, or taken a
6728 modulus (CODE says which and what kind of divide or modulus) by a
6729 constant C. See if we can eliminate that operation by folding it with
6730 other operations already in T. WIDE_TYPE, if non-null, is a type that
6731 should be used for the computation if wider than our type.
6733 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6734 (X * 2) + (Y * 4). We must, however, be assured that either the original
6735 expression would not overflow or that overflow is undefined for the type
6736 in the language in question.
6738 If we return a non-null expression, it is an equivalent form of the
6739 original computation, but need not be in the original type.
6741 We set *STRICT_OVERFLOW_P to true if the return values depends on
6742 signed overflow being undefined. Otherwise we do not change
6743 *STRICT_OVERFLOW_P. */
6746 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6747 bool *strict_overflow_p
)
6749 /* To avoid exponential search depth, refuse to allow recursion past
6750 three levels. Beyond that (1) it's highly unlikely that we'll find
6751 something interesting and (2) we've probably processed it before
6752 when we built the inner expression. */
6761 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6768 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6769 bool *strict_overflow_p
)
6771 tree type
= TREE_TYPE (t
);
6772 enum tree_code tcode
= TREE_CODE (t
);
6773 tree ctype
= (wide_type
!= 0
6774 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type
))
6775 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
)))
6776 ? wide_type
: type
);
6778 int same_p
= tcode
== code
;
6779 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6780 bool sub_strict_overflow_p
;
6782 /* Don't deal with constants of zero here; they confuse the code below. */
6783 if (integer_zerop (c
))
6786 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6787 op0
= TREE_OPERAND (t
, 0);
6789 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6790 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6792 /* Note that we need not handle conditional operations here since fold
6793 already handles those cases. So just do arithmetic here. */
6797 /* For a constant, we can always simplify if we are a multiply
6798 or (for divide and modulus) if it is a multiple of our constant. */
6799 if (code
== MULT_EXPR
6800 || wi::multiple_of_p (wi::to_wide (t
), wi::to_wide (c
),
6803 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6804 fold_convert (ctype
, c
));
6805 /* If the multiplication overflowed, we lost information on it.
6806 See PR68142 and PR69845. */
6807 if (TREE_OVERFLOW (tem
))
6813 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6814 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
6816 /* If op0 is an expression ... */
6817 if ((COMPARISON_CLASS_P (op0
)
6818 || UNARY_CLASS_P (op0
)
6819 || BINARY_CLASS_P (op0
)
6820 || VL_EXP_CLASS_P (op0
)
6821 || EXPRESSION_CLASS_P (op0
))
6822 /* ... and has wrapping overflow, and its type is smaller
6823 than ctype, then we cannot pass through as widening. */
6824 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
6825 && (TYPE_PRECISION (ctype
)
6826 > TYPE_PRECISION (TREE_TYPE (op0
))))
6827 /* ... or this is a truncation (t is narrower than op0),
6828 then we cannot pass through this narrowing. */
6829 || (TYPE_PRECISION (type
)
6830 < TYPE_PRECISION (TREE_TYPE (op0
)))
6831 /* ... or signedness changes for division or modulus,
6832 then we cannot pass through this conversion. */
6833 || (code
!= MULT_EXPR
6834 && (TYPE_UNSIGNED (ctype
)
6835 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6836 /* ... or has undefined overflow while the converted to
6837 type has not, we cannot do the operation in the inner type
6838 as that would introduce undefined overflow. */
6839 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
6840 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6843 /* Pass the constant down and see if we can make a simplification. If
6844 we can, replace this expression with the inner simplification for
6845 possible later conversion to our or some other type. */
6846 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6847 && TREE_CODE (t2
) == INTEGER_CST
6848 && !TREE_OVERFLOW (t2
)
6849 && (t1
= extract_muldiv (op0
, t2
, code
,
6850 code
== MULT_EXPR
? ctype
: NULL_TREE
,
6851 strict_overflow_p
)) != 0)
6856 /* If widening the type changes it from signed to unsigned, then we
6857 must avoid building ABS_EXPR itself as unsigned. */
6858 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6860 tree cstype
= (*signed_type_for
) (ctype
);
6861 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6864 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6865 return fold_convert (ctype
, t1
);
6869 /* If the constant is negative, we cannot simplify this. */
6870 if (tree_int_cst_sgn (c
) == -1)
6874 /* For division and modulus, type can't be unsigned, as e.g.
6875 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6876 For signed types, even with wrapping overflow, this is fine. */
6877 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6879 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6881 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6884 case MIN_EXPR
: case MAX_EXPR
:
6885 /* If widening the type changes the signedness, then we can't perform
6886 this optimization as that changes the result. */
6887 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6890 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6891 sub_strict_overflow_p
= false;
6892 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6893 &sub_strict_overflow_p
)) != 0
6894 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6895 &sub_strict_overflow_p
)) != 0)
6897 if (tree_int_cst_sgn (c
) < 0)
6898 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6899 if (sub_strict_overflow_p
)
6900 *strict_overflow_p
= true;
6901 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6902 fold_convert (ctype
, t2
));
6906 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6907 /* If the second operand is constant, this is a multiplication
6908 or floor division, by a power of two, so we can treat it that
6909 way unless the multiplier or divisor overflows. Signed
6910 left-shift overflow is implementation-defined rather than
6911 undefined in C90, so do not convert signed left shift into
6913 if (TREE_CODE (op1
) == INTEGER_CST
6914 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6915 /* const_binop may not detect overflow correctly,
6916 so check for it explicitly here. */
6917 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
6919 && (t1
= fold_convert (ctype
,
6920 const_binop (LSHIFT_EXPR
, size_one_node
,
6922 && !TREE_OVERFLOW (t1
))
6923 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6924 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6926 fold_convert (ctype
, op0
),
6928 c
, code
, wide_type
, strict_overflow_p
);
6931 case PLUS_EXPR
: case MINUS_EXPR
:
6932 /* See if we can eliminate the operation on both sides. If we can, we
6933 can return a new PLUS or MINUS. If we can't, the only remaining
6934 cases where we can do anything are if the second operand is a
6936 sub_strict_overflow_p
= false;
6937 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6938 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6939 if (t1
!= 0 && t2
!= 0
6940 && TYPE_OVERFLOW_WRAPS (ctype
)
6941 && (code
== MULT_EXPR
6942 /* If not multiplication, we can only do this if both operands
6943 are divisible by c. */
6944 || (multiple_of_p (ctype
, op0
, c
)
6945 && multiple_of_p (ctype
, op1
, c
))))
6947 if (sub_strict_overflow_p
)
6948 *strict_overflow_p
= true;
6949 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6950 fold_convert (ctype
, t2
));
6953 /* If this was a subtraction, negate OP1 and set it to be an addition.
6954 This simplifies the logic below. */
6955 if (tcode
== MINUS_EXPR
)
6957 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6958 /* If OP1 was not easily negatable, the constant may be OP0. */
6959 if (TREE_CODE (op0
) == INTEGER_CST
)
6961 std::swap (op0
, op1
);
6966 if (TREE_CODE (op1
) != INTEGER_CST
)
6969 /* If either OP1 or C are negative, this optimization is not safe for
6970 some of the division and remainder types while for others we need
6971 to change the code. */
6972 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6974 if (code
== CEIL_DIV_EXPR
)
6975 code
= FLOOR_DIV_EXPR
;
6976 else if (code
== FLOOR_DIV_EXPR
)
6977 code
= CEIL_DIV_EXPR
;
6978 else if (code
!= MULT_EXPR
6979 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6983 /* If it's a multiply or a division/modulus operation of a multiple
6984 of our constant, do the operation and verify it doesn't overflow. */
6985 if (code
== MULT_EXPR
6986 || wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6989 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6990 fold_convert (ctype
, c
));
6991 /* We allow the constant to overflow with wrapping semantics. */
6993 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6999 /* If we have an unsigned type, we cannot widen the operation since it
7000 will change the result if the original computation overflowed. */
7001 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
7004 /* The last case is if we are a multiply. In that case, we can
7005 apply the distributive law to commute the multiply and addition
7006 if the multiplication of the constants doesn't overflow
7007 and overflow is defined. With undefined overflow
7008 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7009 But fold_plusminus_mult_expr would factor back any power-of-two
7010 value so do not distribute in the first place in this case. */
7011 if (code
== MULT_EXPR
7012 && TYPE_OVERFLOW_WRAPS (ctype
)
7013 && !(tree_fits_shwi_p (c
) && pow2p_hwi (absu_hwi (tree_to_shwi (c
)))))
7014 return fold_build2 (tcode
, ctype
,
7015 fold_build2 (code
, ctype
,
7016 fold_convert (ctype
, op0
),
7017 fold_convert (ctype
, c
)),
7023 /* We have a special case here if we are doing something like
7024 (C * 8) % 4 since we know that's zero. */
7025 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
7026 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
7027 /* If the multiplication can overflow we cannot optimize this. */
7028 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
7029 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
7030 && wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
7033 *strict_overflow_p
= true;
7034 return omit_one_operand (type
, integer_zero_node
, op0
);
7037 /* ... fall through ... */
7039 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
7040 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
7041 /* If we can extract our operation from the LHS, do so and return a
7042 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7043 do something only if the second operand is a constant. */
7045 && TYPE_OVERFLOW_WRAPS (ctype
)
7046 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
7047 strict_overflow_p
)) != 0)
7048 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
7049 fold_convert (ctype
, op1
));
7050 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
7051 && TYPE_OVERFLOW_WRAPS (ctype
)
7052 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
7053 strict_overflow_p
)) != 0)
7054 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
7055 fold_convert (ctype
, t1
));
7056 else if (TREE_CODE (op1
) != INTEGER_CST
)
7059 /* If these are the same operation types, we can associate them
7060 assuming no overflow. */
7063 bool overflow_p
= false;
7064 wi::overflow_type overflow_mul
;
7065 signop sign
= TYPE_SIGN (ctype
);
7066 unsigned prec
= TYPE_PRECISION (ctype
);
7067 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
7068 wi::to_wide (c
, prec
),
7069 sign
, &overflow_mul
);
7070 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
7072 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
7075 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
7076 wide_int_to_tree (ctype
, mul
));
7079 /* If these operations "cancel" each other, we have the main
7080 optimizations of this pass, which occur when either constant is a
7081 multiple of the other, in which case we replace this with either an
7082 operation or CODE or TCODE.
7084 If we have an unsigned type, we cannot do this since it will change
7085 the result if the original computation overflowed. */
7086 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
7087 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
7088 || (tcode
== MULT_EXPR
7089 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
7090 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
7091 && code
!= MULT_EXPR
)))
7093 if (wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
7096 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
7097 *strict_overflow_p
= true;
7098 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
7099 fold_convert (ctype
,
7100 const_binop (TRUNC_DIV_EXPR
,
7103 else if (wi::multiple_of_p (wi::to_wide (c
), wi::to_wide (op1
),
7106 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
7107 *strict_overflow_p
= true;
7108 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
7109 fold_convert (ctype
,
7110 const_binop (TRUNC_DIV_EXPR
,
7123 /* Return a node which has the indicated constant VALUE (either 0 or
7124 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7125 and is of the indicated TYPE. */
7128 constant_boolean_node (bool value
, tree type
)
7130 if (type
== integer_type_node
)
7131 return value
? integer_one_node
: integer_zero_node
;
7132 else if (type
== boolean_type_node
)
7133 return value
? boolean_true_node
: boolean_false_node
;
7134 else if (TREE_CODE (type
) == VECTOR_TYPE
)
7135 return build_vector_from_val (type
,
7136 build_int_cst (TREE_TYPE (type
),
7139 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
7143 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7144 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7145 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7146 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7147 COND is the first argument to CODE; otherwise (as in the example
7148 given here), it is the second argument. TYPE is the type of the
7149 original expression. Return NULL_TREE if no simplification is
7153 fold_binary_op_with_conditional_arg (location_t loc
,
7154 enum tree_code code
,
7155 tree type
, tree op0
, tree op1
,
7156 tree cond
, tree arg
, int cond_first_p
)
7158 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
7159 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
7160 tree test
, true_value
, false_value
;
7161 tree lhs
= NULL_TREE
;
7162 tree rhs
= NULL_TREE
;
7163 enum tree_code cond_code
= COND_EXPR
;
7165 /* Do not move possibly trapping operations into the conditional as this
7166 pessimizes code and causes gimplification issues when applied late. */
7167 if (operation_could_trap_p (code
, FLOAT_TYPE_P (type
),
7168 ANY_INTEGRAL_TYPE_P (type
)
7169 && TYPE_OVERFLOW_TRAPS (type
), op1
))
7172 if (TREE_CODE (cond
) == COND_EXPR
7173 || TREE_CODE (cond
) == VEC_COND_EXPR
)
7175 test
= TREE_OPERAND (cond
, 0);
7176 true_value
= TREE_OPERAND (cond
, 1);
7177 false_value
= TREE_OPERAND (cond
, 2);
7178 /* If this operand throws an expression, then it does not make
7179 sense to try to perform a logical or arithmetic operation
7181 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
7183 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
7186 else if (!(TREE_CODE (type
) != VECTOR_TYPE
7187 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
7189 tree testtype
= TREE_TYPE (cond
);
7191 true_value
= constant_boolean_node (true, testtype
);
7192 false_value
= constant_boolean_node (false, testtype
);
7195 /* Detect the case of mixing vector and scalar types - bail out. */
7198 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
7199 cond_code
= VEC_COND_EXPR
;
7201 /* This transformation is only worthwhile if we don't have to wrap ARG
7202 in a SAVE_EXPR and the operation can be simplified without recursing
7203 on at least one of the branches once its pushed inside the COND_EXPR. */
7204 if (!TREE_CONSTANT (arg
)
7205 && (TREE_SIDE_EFFECTS (arg
)
7206 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
7207 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
7210 arg
= fold_convert_loc (loc
, arg_type
, arg
);
7213 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
7215 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
7217 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
7221 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
7223 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
7225 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
7228 /* Check that we have simplified at least one of the branches. */
7229 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
7232 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
7236 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7238 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7239 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7240 if ARG - ZERO_ARG is the same as X.
7242 If ARG is NULL, check for any value of type TYPE.
7244 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7245 and finite. The problematic cases are when X is zero, and its mode
7246 has signed zeros. In the case of rounding towards -infinity,
7247 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7248 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7251 fold_real_zero_addition_p (const_tree type
, const_tree arg
,
7252 const_tree zero_arg
, int negate
)
7254 if (!real_zerop (zero_arg
))
7257 /* Don't allow the fold with -fsignaling-nans. */
7258 if (arg
? tree_expr_maybe_signaling_nan_p (arg
) : HONOR_SNANS (type
))
7261 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7262 if (!HONOR_SIGNED_ZEROS (type
))
7265 /* There is no case that is safe for all rounding modes. */
7266 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
))
7269 /* In a vector or complex, we would need to check the sign of all zeros. */
7270 if (TREE_CODE (zero_arg
) == VECTOR_CST
)
7271 zero_arg
= uniform_vector_p (zero_arg
);
7272 if (!zero_arg
|| TREE_CODE (zero_arg
) != REAL_CST
)
7275 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7276 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg
)))
7279 /* The mode has signed zeros, and we have to honor their sign.
7280 In this situation, there are only two cases we can return true for.
7281 (i) X - 0 is the same as X with default rounding.
7282 (ii) X + 0 is X when X can't possibly be -0.0. */
7283 return negate
|| (arg
&& !tree_expr_maybe_real_minus_zero_p (arg
));
7286 /* Subroutine of match.pd that optimizes comparisons of a division by
7287 a nonzero integer constant against an integer constant, i.e.
7290 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7291 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7294 fold_div_compare (enum tree_code code
, tree c1
, tree c2
, tree
*lo
,
7295 tree
*hi
, bool *neg_overflow
)
7297 tree prod
, tmp
, type
= TREE_TYPE (c1
);
7298 signop sign
= TYPE_SIGN (type
);
7299 wi::overflow_type overflow
;
7301 /* We have to do this the hard way to detect unsigned overflow.
7302 prod = int_const_binop (MULT_EXPR, c1, c2); */
7303 wide_int val
= wi::mul (wi::to_wide (c1
), wi::to_wide (c2
), sign
, &overflow
);
7304 prod
= force_fit_type (type
, val
, -1, overflow
);
7305 *neg_overflow
= false;
7307 if (sign
== UNSIGNED
)
7309 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7312 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7313 val
= wi::add (wi::to_wide (prod
), wi::to_wide (tmp
), sign
, &overflow
);
7314 *hi
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (prod
));
7316 else if (tree_int_cst_sgn (c1
) >= 0)
7318 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7319 switch (tree_int_cst_sgn (c2
))
7322 *neg_overflow
= true;
7323 *lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7328 *lo
= fold_negate_const (tmp
, type
);
7333 *hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7343 /* A negative divisor reverses the relational operators. */
7344 code
= swap_tree_comparison (code
);
7346 tmp
= int_const_binop (PLUS_EXPR
, c1
, build_int_cst (type
, 1));
7347 switch (tree_int_cst_sgn (c2
))
7350 *hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7355 *hi
= fold_negate_const (tmp
, type
);
7360 *neg_overflow
= true;
7361 *lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7370 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
7373 if (TREE_OVERFLOW (*lo
)
7374 || operand_equal_p (*lo
, TYPE_MIN_VALUE (type
), 0))
7376 if (TREE_OVERFLOW (*hi
)
7377 || operand_equal_p (*hi
, TYPE_MAX_VALUE (type
), 0))
7384 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7385 equality/inequality test, then return a simplified form of the test
7386 using a sign testing. Otherwise return NULL. TYPE is the desired
7390 fold_single_bit_test_into_sign_test (location_t loc
,
7391 enum tree_code code
, tree arg0
, tree arg1
,
7394 /* If this is testing a single bit, we can optimize the test. */
7395 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7396 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7397 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7399 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7400 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7401 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
7403 if (arg00
!= NULL_TREE
7404 /* This is only a win if casting to a signed type is cheap,
7405 i.e. when arg00's type is not a partial mode. */
7406 && type_has_mode_precision_p (TREE_TYPE (arg00
)))
7408 tree stype
= signed_type_for (TREE_TYPE (arg00
));
7409 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
7411 fold_convert_loc (loc
, stype
, arg00
),
7412 build_int_cst (stype
, 0));
7419 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7420 equality/inequality test, then return a simplified form of
7421 the test using shifts and logical operations. Otherwise return
7422 NULL. TYPE is the desired result type. */
7425 fold_single_bit_test (location_t loc
, enum tree_code code
,
7426 tree arg0
, tree arg1
, tree result_type
)
7428 /* If this is testing a single bit, we can optimize the test. */
7429 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7430 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7431 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7433 tree inner
= TREE_OPERAND (arg0
, 0);
7434 tree type
= TREE_TYPE (arg0
);
7435 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
7436 scalar_int_mode operand_mode
= SCALAR_INT_TYPE_MODE (type
);
7438 tree signed_type
, unsigned_type
, intermediate_type
;
7441 /* First, see if we can fold the single bit test into a sign-bit
7443 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
7448 /* Otherwise we have (A & C) != 0 where C is a single bit,
7449 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7450 Similarly for (A & C) == 0. */
7452 /* If INNER is a right shift of a constant and it plus BITNUM does
7453 not overflow, adjust BITNUM and INNER. */
7454 if (TREE_CODE (inner
) == RSHIFT_EXPR
7455 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
7456 && bitnum
< TYPE_PRECISION (type
)
7457 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner
, 1)),
7458 TYPE_PRECISION (type
) - bitnum
))
7460 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
7461 inner
= TREE_OPERAND (inner
, 0);
7464 /* If we are going to be able to omit the AND below, we must do our
7465 operations as unsigned. If we must use the AND, we have a choice.
7466 Normally unsigned is faster, but for some machines signed is. */
7467 ops_unsigned
= (load_extend_op (operand_mode
) == SIGN_EXTEND
7468 && !flag_syntax_only
) ? 0 : 1;
7470 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
7471 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
7472 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
7473 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
7476 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
7477 inner
, size_int (bitnum
));
7479 one
= build_int_cst (intermediate_type
, 1);
7481 if (code
== EQ_EXPR
)
7482 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
7484 /* Put the AND last so it can combine with more things. */
7485 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
7487 /* Make sure to return the proper type. */
7488 inner
= fold_convert_loc (loc
, result_type
, inner
);
7495 /* Test whether it is preferable to swap two operands, ARG0 and
7496 ARG1, for example because ARG0 is an integer constant and ARG1
7500 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
7502 if (CONSTANT_CLASS_P (arg1
))
7504 if (CONSTANT_CLASS_P (arg0
))
7510 if (TREE_CONSTANT (arg1
))
7512 if (TREE_CONSTANT (arg0
))
7515 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7516 for commutative and comparison operators. Ensuring a canonical
7517 form allows the optimizers to find additional redundancies without
7518 having to explicitly check for both orderings. */
7519 if (TREE_CODE (arg0
) == SSA_NAME
7520 && TREE_CODE (arg1
) == SSA_NAME
7521 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7524 /* Put SSA_NAMEs last. */
7525 if (TREE_CODE (arg1
) == SSA_NAME
)
7527 if (TREE_CODE (arg0
) == SSA_NAME
)
7530 /* Put variables last. */
7540 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7541 means A >= Y && A != MAX, but in this case we know that
7542 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7545 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7547 tree a
, typea
, type
= TREE_TYPE (bound
), a1
, diff
, y
;
7549 if (TREE_CODE (bound
) == LT_EXPR
)
7550 a
= TREE_OPERAND (bound
, 0);
7551 else if (TREE_CODE (bound
) == GT_EXPR
)
7552 a
= TREE_OPERAND (bound
, 1);
7556 typea
= TREE_TYPE (a
);
7557 if (!INTEGRAL_TYPE_P (typea
)
7558 && !POINTER_TYPE_P (typea
))
7561 if (TREE_CODE (ineq
) == LT_EXPR
)
7563 a1
= TREE_OPERAND (ineq
, 1);
7564 y
= TREE_OPERAND (ineq
, 0);
7566 else if (TREE_CODE (ineq
) == GT_EXPR
)
7568 a1
= TREE_OPERAND (ineq
, 0);
7569 y
= TREE_OPERAND (ineq
, 1);
7574 if (TREE_TYPE (a1
) != typea
)
7577 if (POINTER_TYPE_P (typea
))
7579 /* Convert the pointer types into integer before taking the difference. */
7580 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7581 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7582 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7585 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7587 if (!diff
|| !integer_onep (diff
))
7590 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7593 /* Fold a sum or difference of at least one multiplication.
7594 Returns the folded tree or NULL if no simplification could be made. */
7597 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7598 tree arg0
, tree arg1
)
7600 tree arg00
, arg01
, arg10
, arg11
;
7601 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7603 /* (A * C) +- (B * C) -> (A+-B) * C.
7604 (A * C) +- A -> A * (C+-1).
7605 We are most concerned about the case where C is a constant,
7606 but other combinations show up during loop reduction. Since
7607 it is not difficult, try all four possibilities. */
7609 if (TREE_CODE (arg0
) == MULT_EXPR
)
7611 arg00
= TREE_OPERAND (arg0
, 0);
7612 arg01
= TREE_OPERAND (arg0
, 1);
7614 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7616 arg00
= build_one_cst (type
);
7621 /* We cannot generate constant 1 for fract. */
7622 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7625 arg01
= build_one_cst (type
);
7627 if (TREE_CODE (arg1
) == MULT_EXPR
)
7629 arg10
= TREE_OPERAND (arg1
, 0);
7630 arg11
= TREE_OPERAND (arg1
, 1);
7632 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7634 arg10
= build_one_cst (type
);
7635 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7636 the purpose of this canonicalization. */
7637 if (wi::neg_p (wi::to_wide (arg1
), TYPE_SIGN (TREE_TYPE (arg1
)))
7638 && negate_expr_p (arg1
)
7639 && code
== PLUS_EXPR
)
7641 arg11
= negate_expr (arg1
);
7649 /* We cannot generate constant 1 for fract. */
7650 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7653 arg11
= build_one_cst (type
);
7657 /* Prefer factoring a common non-constant. */
7658 if (operand_equal_p (arg00
, arg10
, 0))
7659 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7660 else if (operand_equal_p (arg01
, arg11
, 0))
7661 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7662 else if (operand_equal_p (arg00
, arg11
, 0))
7663 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7664 else if (operand_equal_p (arg01
, arg10
, 0))
7665 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7667 /* No identical multiplicands; see if we can find a common
7668 power-of-two factor in non-power-of-two multiplies. This
7669 can help in multi-dimensional array access. */
7670 else if (tree_fits_shwi_p (arg01
) && tree_fits_shwi_p (arg11
))
7672 HOST_WIDE_INT int01
= tree_to_shwi (arg01
);
7673 HOST_WIDE_INT int11
= tree_to_shwi (arg11
);
7678 /* Move min of absolute values to int11. */
7679 if (absu_hwi (int01
) < absu_hwi (int11
))
7681 tmp
= int01
, int01
= int11
, int11
= tmp
;
7682 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7689 const unsigned HOST_WIDE_INT factor
= absu_hwi (int11
);
7691 && pow2p_hwi (factor
)
7692 && (int01
& (factor
- 1)) == 0
7693 /* The remainder should not be a constant, otherwise we
7694 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7695 increased the number of multiplications necessary. */
7696 && TREE_CODE (arg10
) != INTEGER_CST
)
7698 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7699 build_int_cst (TREE_TYPE (arg00
),
7704 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7711 if (! ANY_INTEGRAL_TYPE_P (type
)
7712 || TYPE_OVERFLOW_WRAPS (type
)
7713 /* We are neither factoring zero nor minus one. */
7714 || TREE_CODE (same
) == INTEGER_CST
)
7715 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7716 fold_build2_loc (loc
, code
, type
,
7717 fold_convert_loc (loc
, type
, alt0
),
7718 fold_convert_loc (loc
, type
, alt1
)),
7719 fold_convert_loc (loc
, type
, same
));
7721 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7722 same may be minus one and thus the multiplication may overflow. Perform
7723 the sum operation in an unsigned type. */
7724 tree utype
= unsigned_type_for (type
);
7725 tree tem
= fold_build2_loc (loc
, code
, utype
,
7726 fold_convert_loc (loc
, utype
, alt0
),
7727 fold_convert_loc (loc
, utype
, alt1
));
7728 /* If the sum evaluated to a constant that is not -INF the multiplication
7730 if (TREE_CODE (tem
) == INTEGER_CST
7731 && (wi::to_wide (tem
)
7732 != wi::min_value (TYPE_PRECISION (utype
), SIGNED
)))
7733 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7734 fold_convert (type
, tem
), same
);
7736 /* Do not resort to unsigned multiplication because
7737 we lose the no-overflow property of the expression. */
7741 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7742 specified by EXPR into the buffer PTR of length LEN bytes.
7743 Return the number of bytes placed in the buffer, or zero
7747 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7749 tree type
= TREE_TYPE (expr
);
7750 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
7751 int byte
, offset
, word
, words
;
7752 unsigned char value
;
7754 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7761 return MIN (len
, total_bytes
- off
);
7763 words
= total_bytes
/ UNITS_PER_WORD
;
7765 for (byte
= 0; byte
< total_bytes
; byte
++)
7767 int bitpos
= byte
* BITS_PER_UNIT
;
7768 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7770 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7772 if (total_bytes
> UNITS_PER_WORD
)
7774 word
= byte
/ UNITS_PER_WORD
;
7775 if (WORDS_BIG_ENDIAN
)
7776 word
= (words
- 1) - word
;
7777 offset
= word
* UNITS_PER_WORD
;
7778 if (BYTES_BIG_ENDIAN
)
7779 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7781 offset
+= byte
% UNITS_PER_WORD
;
7784 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7785 if (offset
>= off
&& offset
- off
< len
)
7786 ptr
[offset
- off
] = value
;
7788 return MIN (len
, total_bytes
- off
);
7792 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7793 specified by EXPR into the buffer PTR of length LEN bytes.
7794 Return the number of bytes placed in the buffer, or zero
7798 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7800 tree type
= TREE_TYPE (expr
);
7801 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
7802 int total_bytes
= GET_MODE_SIZE (mode
);
7803 FIXED_VALUE_TYPE value
;
7804 tree i_value
, i_type
;
7806 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7809 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7811 if (NULL_TREE
== i_type
|| TYPE_PRECISION (i_type
) != total_bytes
)
7814 value
= TREE_FIXED_CST (expr
);
7815 i_value
= double_int_to_tree (i_type
, value
.data
);
7817 return native_encode_int (i_value
, ptr
, len
, off
);
7821 /* Subroutine of native_encode_expr. Encode the REAL_CST
7822 specified by EXPR into the buffer PTR of length LEN bytes.
7823 Return the number of bytes placed in the buffer, or zero
7827 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7829 tree type
= TREE_TYPE (expr
);
7830 int total_bytes
= GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type
));
7831 int byte
, offset
, word
, words
, bitpos
;
7832 unsigned char value
;
7834 /* There are always 32 bits in each long, no matter the size of
7835 the hosts long. We handle floating point representations with
7839 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7846 return MIN (len
, total_bytes
- off
);
7848 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7850 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7852 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7853 bitpos
+= BITS_PER_UNIT
)
7855 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7856 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7858 if (UNITS_PER_WORD
< 4)
7860 word
= byte
/ UNITS_PER_WORD
;
7861 if (WORDS_BIG_ENDIAN
)
7862 word
= (words
- 1) - word
;
7863 offset
= word
* UNITS_PER_WORD
;
7864 if (BYTES_BIG_ENDIAN
)
7865 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7867 offset
+= byte
% UNITS_PER_WORD
;
7872 if (BYTES_BIG_ENDIAN
)
7874 /* Reverse bytes within each long, or within the entire float
7875 if it's smaller than a long (for HFmode). */
7876 offset
= MIN (3, total_bytes
- 1) - offset
;
7877 gcc_assert (offset
>= 0);
7880 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7882 && offset
- off
< len
)
7883 ptr
[offset
- off
] = value
;
7885 return MIN (len
, total_bytes
- off
);
7888 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7889 specified by EXPR into the buffer PTR of length LEN bytes.
7890 Return the number of bytes placed in the buffer, or zero
7894 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7899 part
= TREE_REALPART (expr
);
7900 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7901 if (off
== -1 && rsize
== 0)
7903 part
= TREE_IMAGPART (expr
);
7905 off
= MAX (0, off
- GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part
))));
7906 isize
= native_encode_expr (part
, ptr
? ptr
+ rsize
: NULL
,
7908 if (off
== -1 && isize
!= rsize
)
7910 return rsize
+ isize
;
7913 /* Like native_encode_vector, but only encode the first COUNT elements.
7914 The other arguments are as for native_encode_vector. */
7917 native_encode_vector_part (const_tree expr
, unsigned char *ptr
, int len
,
7918 int off
, unsigned HOST_WIDE_INT count
)
7920 tree itype
= TREE_TYPE (TREE_TYPE (expr
));
7921 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr
))
7922 && TYPE_PRECISION (itype
) <= BITS_PER_UNIT
)
7924 /* This is the only case in which elements can be smaller than a byte.
7925 Element 0 is always in the lsb of the containing byte. */
7926 unsigned int elt_bits
= TYPE_PRECISION (itype
);
7927 int total_bytes
= CEIL (elt_bits
* count
, BITS_PER_UNIT
);
7928 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7934 /* Zero the buffer and then set bits later where necessary. */
7935 int extract_bytes
= MIN (len
, total_bytes
- off
);
7937 memset (ptr
, 0, extract_bytes
);
7939 unsigned int elts_per_byte
= BITS_PER_UNIT
/ elt_bits
;
7940 unsigned int first_elt
= off
* elts_per_byte
;
7941 unsigned int extract_elts
= extract_bytes
* elts_per_byte
;
7942 for (unsigned int i
= 0; i
< extract_elts
; ++i
)
7944 tree elt
= VECTOR_CST_ELT (expr
, first_elt
+ i
);
7945 if (TREE_CODE (elt
) != INTEGER_CST
)
7948 if (ptr
&& wi::extract_uhwi (wi::to_wide (elt
), 0, 1))
7950 unsigned int bit
= i
* elt_bits
;
7951 ptr
[bit
/ BITS_PER_UNIT
] |= 1 << (bit
% BITS_PER_UNIT
);
7954 return extract_bytes
;
7958 int size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (itype
));
7959 for (unsigned HOST_WIDE_INT i
= 0; i
< count
; i
++)
7966 tree elem
= VECTOR_CST_ELT (expr
, i
);
7967 int res
= native_encode_expr (elem
, ptr
? ptr
+ offset
: NULL
,
7969 if ((off
== -1 && res
!= size
) || res
== 0)
7973 return (off
== -1 && i
< count
- 1) ? 0 : offset
;
7980 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7981 specified by EXPR into the buffer PTR of length LEN bytes.
7982 Return the number of bytes placed in the buffer, or zero
7986 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7988 unsigned HOST_WIDE_INT count
;
7989 if (!VECTOR_CST_NELTS (expr
).is_constant (&count
))
7991 return native_encode_vector_part (expr
, ptr
, len
, off
, count
);
7995 /* Subroutine of native_encode_expr. Encode the STRING_CST
7996 specified by EXPR into the buffer PTR of length LEN bytes.
7997 Return the number of bytes placed in the buffer, or zero
8001 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
8003 tree type
= TREE_TYPE (expr
);
8005 /* Wide-char strings are encoded in target byte-order so native
8006 encoding them is trivial. */
8007 if (BITS_PER_UNIT
!= CHAR_BIT
8008 || TREE_CODE (type
) != ARRAY_TYPE
8009 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
8010 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
8013 HOST_WIDE_INT total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
8014 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
8018 len
= MIN (total_bytes
- off
, len
);
8024 if (off
< TREE_STRING_LENGTH (expr
))
8026 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
8027 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
8029 memset (ptr
+ written
, 0, len
- written
);
8035 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8036 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8037 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8038 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8039 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8040 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8041 Return the number of bytes placed in the buffer, or zero upon failure. */
8044 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
8046 /* We don't support starting at negative offset and -1 is special. */
8050 switch (TREE_CODE (expr
))
8053 return native_encode_int (expr
, ptr
, len
, off
);
8056 return native_encode_real (expr
, ptr
, len
, off
);
8059 return native_encode_fixed (expr
, ptr
, len
, off
);
8062 return native_encode_complex (expr
, ptr
, len
, off
);
8065 return native_encode_vector (expr
, ptr
, len
, off
);
8068 return native_encode_string (expr
, ptr
, len
, off
);
8075 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8076 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8077 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8078 machine modes, we can't just use build_nonstandard_integer_type. */
8081 find_bitfield_repr_type (int fieldsize
, int len
)
8084 for (int pass
= 0; pass
< 2; pass
++)
8086 enum mode_class mclass
= pass
? MODE_PARTIAL_INT
: MODE_INT
;
8087 FOR_EACH_MODE_IN_CLASS (mode
, mclass
)
8088 if (known_ge (GET_MODE_SIZE (mode
), fieldsize
)
8089 && known_eq (GET_MODE_PRECISION (mode
),
8090 GET_MODE_BITSIZE (mode
))
8091 && known_le (GET_MODE_SIZE (mode
), len
))
8093 tree ret
= lang_hooks
.types
.type_for_mode (mode
, 1);
8094 if (ret
&& TYPE_MODE (ret
) == mode
)
8099 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
8100 if (int_n_enabled_p
[i
]
8101 && int_n_data
[i
].bitsize
>= (unsigned) (BITS_PER_UNIT
* fieldsize
)
8102 && int_n_trees
[i
].unsigned_type
)
8104 tree ret
= int_n_trees
[i
].unsigned_type
;
8105 mode
= TYPE_MODE (ret
);
8106 if (known_ge (GET_MODE_SIZE (mode
), fieldsize
)
8107 && known_eq (GET_MODE_PRECISION (mode
),
8108 GET_MODE_BITSIZE (mode
))
8109 && known_le (GET_MODE_SIZE (mode
), len
))
8116 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8117 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8118 to be non-NULL and OFF zero), then in addition to filling the
8119 bytes pointed by PTR with the value also clear any bits pointed
8120 by MASK that are known to be initialized, keep them as is for
8121 e.g. uninitialized padding bits or uninitialized fields. */
8124 native_encode_initializer (tree init
, unsigned char *ptr
, int len
,
8125 int off
, unsigned char *mask
)
8129 /* We don't support starting at negative offset and -1 is special. */
8130 if (off
< -1 || init
== NULL_TREE
)
8133 gcc_assert (mask
== NULL
|| (off
== 0 && ptr
));
8136 switch (TREE_CODE (init
))
8138 case VIEW_CONVERT_EXPR
:
8139 case NON_LVALUE_EXPR
:
8140 return native_encode_initializer (TREE_OPERAND (init
, 0), ptr
, len
, off
,
8143 r
= native_encode_expr (init
, ptr
, len
, off
);
8145 memset (mask
, 0, r
);
8148 tree type
= TREE_TYPE (init
);
8149 HOST_WIDE_INT total_bytes
= int_size_in_bytes (type
);
8150 if (total_bytes
< 0)
8152 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
8154 int o
= off
== -1 ? 0 : off
;
8155 if (TREE_CODE (type
) == ARRAY_TYPE
)
8158 unsigned HOST_WIDE_INT cnt
;
8159 HOST_WIDE_INT curpos
= 0, fieldsize
, valueinit
= -1;
8160 constructor_elt
*ce
;
8162 if (!TYPE_DOMAIN (type
)
8163 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))) != INTEGER_CST
)
8166 fieldsize
= int_size_in_bytes (TREE_TYPE (type
));
8170 min_index
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
8172 memset (ptr
, '\0', MIN (total_bytes
- off
, len
));
8174 for (cnt
= 0; ; cnt
++)
8176 tree val
= NULL_TREE
, index
= NULL_TREE
;
8177 HOST_WIDE_INT pos
= curpos
, count
= 0;
8179 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init
), cnt
, &ce
))
8184 else if (mask
== NULL
8185 || CONSTRUCTOR_NO_CLEARING (init
)
8186 || curpos
>= total_bytes
)
8191 if (index
&& TREE_CODE (index
) == RANGE_EXPR
)
8193 if (TREE_CODE (TREE_OPERAND (index
, 0)) != INTEGER_CST
8194 || TREE_CODE (TREE_OPERAND (index
, 1)) != INTEGER_CST
)
8197 = wi::sext (wi::to_offset (TREE_OPERAND (index
, 0))
8198 - wi::to_offset (min_index
),
8199 TYPE_PRECISION (sizetype
));
8201 if (!wi::fits_shwi_p (pos
))
8203 pos
= wpos
.to_shwi ();
8205 = wi::sext (wi::to_offset (TREE_OPERAND (index
, 1))
8206 - wi::to_offset (TREE_OPERAND (index
, 0)),
8207 TYPE_PRECISION (sizetype
));
8208 if (!wi::fits_shwi_p (wcount
))
8210 count
= wcount
.to_shwi ();
8214 if (TREE_CODE (index
) != INTEGER_CST
)
8217 = wi::sext (wi::to_offset (index
)
8218 - wi::to_offset (min_index
),
8219 TYPE_PRECISION (sizetype
));
8221 if (!wi::fits_shwi_p (wpos
))
8223 pos
= wpos
.to_shwi ();
8226 if (mask
&& !CONSTRUCTOR_NO_CLEARING (init
) && curpos
!= pos
)
8228 if (valueinit
== -1)
8230 tree zero
= build_zero_cst (TREE_TYPE (type
));
8231 r
= native_encode_initializer (zero
, ptr
+ curpos
,
8234 if (TREE_CODE (zero
) == CONSTRUCTOR
)
8239 curpos
+= fieldsize
;
8241 while (curpos
!= pos
)
8243 memcpy (ptr
+ curpos
, ptr
+ valueinit
, fieldsize
);
8244 memcpy (mask
+ curpos
, mask
+ valueinit
, fieldsize
);
8245 curpos
+= fieldsize
;
8255 && (curpos
+ fieldsize
8256 <= (HOST_WIDE_INT
) off
+ len
)))
8261 memcpy (ptr
+ (curpos
- o
), ptr
+ (pos
- o
),
8264 memcpy (mask
+ curpos
, mask
+ pos
, fieldsize
);
8266 else if (!native_encode_initializer (val
,
8283 else if (curpos
+ fieldsize
> off
8284 && curpos
< (HOST_WIDE_INT
) off
+ len
)
8286 /* Partial overlap. */
8287 unsigned char *p
= NULL
;
8290 gcc_assert (mask
== NULL
);
8294 p
= ptr
+ curpos
- off
;
8295 l
= MIN ((HOST_WIDE_INT
) off
+ len
- curpos
,
8304 if (!native_encode_initializer (val
, p
, l
, no
, NULL
))
8307 curpos
+= fieldsize
;
8309 while (count
-- != 0);
8311 return MIN (total_bytes
- off
, len
);
8313 else if (TREE_CODE (type
) == RECORD_TYPE
8314 || TREE_CODE (type
) == UNION_TYPE
)
8316 unsigned HOST_WIDE_INT cnt
;
8317 constructor_elt
*ce
;
8318 tree fld_base
= TYPE_FIELDS (type
);
8319 tree to_free
= NULL_TREE
;
8321 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| mask
== NULL
);
8323 memset (ptr
, '\0', MIN (total_bytes
- o
, len
));
8324 for (cnt
= 0; ; cnt
++)
8326 tree val
= NULL_TREE
, field
= NULL_TREE
;
8327 HOST_WIDE_INT pos
= 0, fieldsize
;
8328 unsigned HOST_WIDE_INT bpos
= 0, epos
= 0;
8333 to_free
= NULL_TREE
;
8336 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init
), cnt
, &ce
))
8340 if (field
== NULL_TREE
)
8343 pos
= int_byte_position (field
);
8344 if (off
!= -1 && (HOST_WIDE_INT
) off
+ len
<= pos
)
8347 else if (mask
== NULL
8348 || CONSTRUCTOR_NO_CLEARING (init
))
8353 if (mask
&& !CONSTRUCTOR_NO_CLEARING (init
))
8356 for (fld
= fld_base
; fld
; fld
= DECL_CHAIN (fld
))
8358 if (TREE_CODE (fld
) != FIELD_DECL
)
8362 if (DECL_PADDING_P (fld
))
8364 if (DECL_SIZE_UNIT (fld
) == NULL_TREE
8365 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld
)))
8367 if (integer_zerop (DECL_SIZE_UNIT (fld
)))
8371 if (fld
== NULL_TREE
)
8377 fld_base
= DECL_CHAIN (fld
);
8382 pos
= int_byte_position (field
);
8383 val
= build_zero_cst (TREE_TYPE (fld
));
8384 if (TREE_CODE (val
) == CONSTRUCTOR
)
8389 if (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
8390 && TYPE_DOMAIN (TREE_TYPE (field
))
8391 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field
))))
8393 if (mask
|| off
!= -1)
8395 if (val
== NULL_TREE
)
8397 if (TREE_CODE (TREE_TYPE (val
)) != ARRAY_TYPE
)
8399 fieldsize
= int_size_in_bytes (TREE_TYPE (val
));
8401 || (int) fieldsize
!= fieldsize
8402 || (pos
+ fieldsize
) > INT_MAX
)
8404 if (pos
+ fieldsize
> total_bytes
)
8406 if (ptr
!= NULL
&& total_bytes
< len
)
8407 memset (ptr
+ total_bytes
, '\0',
8408 MIN (pos
+ fieldsize
, len
) - total_bytes
);
8409 total_bytes
= pos
+ fieldsize
;
8414 if (DECL_SIZE_UNIT (field
) == NULL_TREE
8415 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field
)))
8417 fieldsize
= tree_to_shwi (DECL_SIZE_UNIT (field
));
8422 if (DECL_BIT_FIELD (field
))
8424 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field
)))
8426 fieldsize
= TYPE_PRECISION (TREE_TYPE (field
));
8427 bpos
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
8428 if (bpos
% BITS_PER_UNIT
)
8429 bpos
%= BITS_PER_UNIT
;
8433 epos
= fieldsize
% BITS_PER_UNIT
;
8434 fieldsize
+= BITS_PER_UNIT
- 1;
8435 fieldsize
/= BITS_PER_UNIT
;
8438 if (off
!= -1 && pos
+ fieldsize
<= off
)
8441 if (val
== NULL_TREE
)
8444 if (DECL_BIT_FIELD (field
))
8446 /* FIXME: Handle PDP endian. */
8447 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
8450 if (TREE_CODE (val
) != INTEGER_CST
)
8453 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8454 tree repr_type
= NULL_TREE
;
8455 HOST_WIDE_INT rpos
= 0;
8456 if (repr
&& INTEGRAL_TYPE_P (TREE_TYPE (repr
)))
8458 rpos
= int_byte_position (repr
);
8459 repr_type
= TREE_TYPE (repr
);
8463 repr_type
= find_bitfield_repr_type (fieldsize
, len
);
8464 if (repr_type
== NULL_TREE
)
8466 HOST_WIDE_INT repr_size
= int_size_in_bytes (repr_type
);
8467 gcc_assert (repr_size
> 0 && repr_size
<= len
);
8468 if (pos
+ repr_size
<= o
+ len
)
8472 rpos
= o
+ len
- repr_size
;
8473 gcc_assert (rpos
<= pos
);
8479 wide_int w
= wi::to_wide (val
, TYPE_PRECISION (repr_type
));
8480 int diff
= (TYPE_PRECISION (repr_type
)
8481 - TYPE_PRECISION (TREE_TYPE (field
)));
8482 HOST_WIDE_INT bitoff
= (pos
- rpos
) * BITS_PER_UNIT
+ bpos
;
8483 if (!BYTES_BIG_ENDIAN
)
8484 w
= wi::lshift (w
, bitoff
);
8486 w
= wi::lshift (w
, diff
- bitoff
);
8487 val
= wide_int_to_tree (repr_type
, w
);
8489 unsigned char buf
[MAX_BITSIZE_MODE_ANY_INT
8490 / BITS_PER_UNIT
+ 1];
8491 int l
= native_encode_int (val
, buf
, sizeof buf
, 0);
8492 if (l
* BITS_PER_UNIT
!= TYPE_PRECISION (repr_type
))
8498 /* If the bitfield does not start at byte boundary, handle
8499 the partial byte at the start. */
8501 && (off
== -1 || (pos
>= off
&& len
>= 1)))
8503 if (!BYTES_BIG_ENDIAN
)
8505 int msk
= (1 << bpos
) - 1;
8506 buf
[pos
- rpos
] &= ~msk
;
8507 buf
[pos
- rpos
] |= ptr
[pos
- o
] & msk
;
8510 if (fieldsize
> 1 || epos
== 0)
8513 mask
[pos
] &= (msk
| ~((1 << epos
) - 1));
8518 int msk
= (1 << (BITS_PER_UNIT
- bpos
)) - 1;
8519 buf
[pos
- rpos
] &= msk
;
8520 buf
[pos
- rpos
] |= ptr
[pos
- o
] & ~msk
;
8523 if (fieldsize
> 1 || epos
== 0)
8527 | ((1 << (BITS_PER_UNIT
- epos
))
8532 /* If the bitfield does not end at byte boundary, handle
8533 the partial byte at the end. */
8536 || pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
))
8538 if (!BYTES_BIG_ENDIAN
)
8540 int msk
= (1 << epos
) - 1;
8541 buf
[pos
- rpos
+ fieldsize
- 1] &= msk
;
8542 buf
[pos
- rpos
+ fieldsize
- 1]
8543 |= ptr
[pos
+ fieldsize
- 1 - o
] & ~msk
;
8544 if (mask
&& (fieldsize
> 1 || bpos
== 0))
8545 mask
[pos
+ fieldsize
- 1] &= ~msk
;
8549 int msk
= (1 << (BITS_PER_UNIT
- epos
)) - 1;
8550 buf
[pos
- rpos
+ fieldsize
- 1] &= ~msk
;
8551 buf
[pos
- rpos
+ fieldsize
- 1]
8552 |= ptr
[pos
+ fieldsize
- 1 - o
] & msk
;
8553 if (mask
&& (fieldsize
> 1 || bpos
== 0))
8554 mask
[pos
+ fieldsize
- 1] &= msk
;
8559 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8561 memcpy (ptr
+ pos
- o
, buf
+ (pos
- rpos
), fieldsize
);
8562 if (mask
&& (fieldsize
> (bpos
!= 0) + (epos
!= 0)))
8563 memset (mask
+ pos
+ (bpos
!= 0), 0,
8564 fieldsize
- (bpos
!= 0) - (epos
!= 0));
8568 /* Partial overlap. */
8569 HOST_WIDE_INT fsz
= fieldsize
;
8570 gcc_assert (mask
== NULL
);
8576 if (pos
+ fsz
> (HOST_WIDE_INT
) off
+ len
)
8577 fsz
= (HOST_WIDE_INT
) off
+ len
- pos
;
8578 memcpy (ptr
+ pos
- off
, buf
+ (pos
- rpos
), fsz
);
8585 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8587 int fldsize
= fieldsize
;
8590 tree fld
= DECL_CHAIN (field
);
8593 if (TREE_CODE (fld
) == FIELD_DECL
)
8595 fld
= DECL_CHAIN (fld
);
8597 if (fld
== NULL_TREE
)
8598 fldsize
= len
- pos
;
8600 r
= native_encode_initializer (val
, ptr
? ptr
+ pos
- o
8604 mask
? mask
+ pos
: NULL
);
8608 && fldsize
!= fieldsize
8610 && pos
+ r
> total_bytes
)
8611 total_bytes
= pos
+ r
;
8615 /* Partial overlap. */
8616 unsigned char *p
= NULL
;
8619 gcc_assert (mask
== NULL
);
8623 p
= ptr
+ pos
- off
;
8624 l
= MIN ((HOST_WIDE_INT
) off
+ len
- pos
,
8633 if (!native_encode_initializer (val
, p
, l
, no
, NULL
))
8637 return MIN (total_bytes
- off
, len
);
8644 /* Subroutine of native_interpret_expr. Interpret the contents of
8645 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8646 If the buffer cannot be interpreted, return NULL_TREE. */
8649 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
8651 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
8653 if (total_bytes
> len
8654 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8657 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
8659 return wide_int_to_tree (type
, result
);
8663 /* Subroutine of native_interpret_expr. Interpret the contents of
8664 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8665 If the buffer cannot be interpreted, return NULL_TREE. */
8668 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
8670 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
8671 int total_bytes
= GET_MODE_SIZE (mode
);
8673 FIXED_VALUE_TYPE fixed_value
;
8675 if (total_bytes
> len
8676 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8679 result
= double_int::from_buffer (ptr
, total_bytes
);
8680 fixed_value
= fixed_from_double_int (result
, mode
);
8682 return build_fixed (type
, fixed_value
);
8686 /* Subroutine of native_interpret_expr. Interpret the contents of
8687 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8688 If the buffer cannot be interpreted, return NULL_TREE. */
8691 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
8693 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
8694 int total_bytes
= GET_MODE_SIZE (mode
);
8695 unsigned char value
;
8696 /* There are always 32 bits in each long, no matter the size of
8697 the hosts long. We handle floating point representations with
8702 if (total_bytes
> len
|| total_bytes
> 24)
8704 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
8706 memset (tmp
, 0, sizeof (tmp
));
8707 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
8708 bitpos
+= BITS_PER_UNIT
)
8710 /* Both OFFSET and BYTE index within a long;
8711 bitpos indexes the whole float. */
8712 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
8713 if (UNITS_PER_WORD
< 4)
8715 int word
= byte
/ UNITS_PER_WORD
;
8716 if (WORDS_BIG_ENDIAN
)
8717 word
= (words
- 1) - word
;
8718 offset
= word
* UNITS_PER_WORD
;
8719 if (BYTES_BIG_ENDIAN
)
8720 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8722 offset
+= byte
% UNITS_PER_WORD
;
8727 if (BYTES_BIG_ENDIAN
)
8729 /* Reverse bytes within each long, or within the entire float
8730 if it's smaller than a long (for HFmode). */
8731 offset
= MIN (3, total_bytes
- 1) - offset
;
8732 gcc_assert (offset
>= 0);
8735 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
8737 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
8740 real_from_target (&r
, tmp
, mode
);
8741 return build_real (type
, r
);
8745 /* Subroutine of native_interpret_expr. Interpret the contents of
8746 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8747 If the buffer cannot be interpreted, return NULL_TREE. */
8750 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
8752 tree etype
, rpart
, ipart
;
8755 etype
= TREE_TYPE (type
);
8756 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8759 rpart
= native_interpret_expr (etype
, ptr
, size
);
8762 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
8765 return build_complex (type
, rpart
, ipart
);
8768 /* Read a vector of type TYPE from the target memory image given by BYTES,
8769 which contains LEN bytes. The vector is known to be encodable using
8770 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8772 Return the vector on success, otherwise return null. */
8775 native_interpret_vector_part (tree type
, const unsigned char *bytes
,
8776 unsigned int len
, unsigned int npatterns
,
8777 unsigned int nelts_per_pattern
)
8779 tree elt_type
= TREE_TYPE (type
);
8780 if (VECTOR_BOOLEAN_TYPE_P (type
)
8781 && TYPE_PRECISION (elt_type
) <= BITS_PER_UNIT
)
8783 /* This is the only case in which elements can be smaller than a byte.
8784 Element 0 is always in the lsb of the containing byte. */
8785 unsigned int elt_bits
= TYPE_PRECISION (elt_type
);
8786 if (elt_bits
* npatterns
* nelts_per_pattern
> len
* BITS_PER_UNIT
)
8789 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8790 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8792 unsigned int bit_index
= i
* elt_bits
;
8793 unsigned int byte_index
= bit_index
/ BITS_PER_UNIT
;
8794 unsigned int lsb
= bit_index
% BITS_PER_UNIT
;
8795 builder
.quick_push (bytes
[byte_index
] & (1 << lsb
)
8796 ? build_all_ones_cst (elt_type
)
8797 : build_zero_cst (elt_type
));
8799 return builder
.build ();
8802 unsigned int elt_bytes
= tree_to_uhwi (TYPE_SIZE_UNIT (elt_type
));
8803 if (elt_bytes
* npatterns
* nelts_per_pattern
> len
)
8806 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8807 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8809 tree elt
= native_interpret_expr (elt_type
, bytes
, elt_bytes
);
8812 builder
.quick_push (elt
);
8815 return builder
.build ();
8818 /* Subroutine of native_interpret_expr. Interpret the contents of
8819 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8820 If the buffer cannot be interpreted, return NULL_TREE. */
8823 native_interpret_vector (tree type
, const unsigned char *ptr
, unsigned int len
)
8827 unsigned HOST_WIDE_INT count
;
8829 etype
= TREE_TYPE (type
);
8830 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8831 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&count
)
8832 || size
* count
> len
)
8835 return native_interpret_vector_part (type
, ptr
, len
, count
, 1);
8839 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8840 the buffer PTR of length LEN as a constant of type TYPE. For
8841 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8842 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8843 return NULL_TREE. */
8846 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
8848 switch (TREE_CODE (type
))
8854 case REFERENCE_TYPE
:
8856 return native_interpret_int (type
, ptr
, len
);
8859 if (tree ret
= native_interpret_real (type
, ptr
, len
))
8861 /* For floating point values in composite modes, punt if this
8862 folding doesn't preserve bit representation. As the mode doesn't
8863 have fixed precision while GCC pretends it does, there could be
8864 valid values that GCC can't really represent accurately.
8865 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8866 bit combinationations which GCC doesn't preserve. */
8867 unsigned char buf
[24];
8868 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
8869 int total_bytes
= GET_MODE_SIZE (mode
);
8870 if (native_encode_expr (ret
, buf
, total_bytes
, 0) != total_bytes
8871 || memcmp (ptr
, buf
, total_bytes
) != 0)
8877 case FIXED_POINT_TYPE
:
8878 return native_interpret_fixed (type
, ptr
, len
);
8881 return native_interpret_complex (type
, ptr
, len
);
8884 return native_interpret_vector (type
, ptr
, len
);
8891 /* Returns true if we can interpret the contents of a native encoding
8895 can_native_interpret_type_p (tree type
)
8897 switch (TREE_CODE (type
))
8903 case REFERENCE_TYPE
:
8904 case FIXED_POINT_TYPE
:
8915 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8916 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8919 native_interpret_aggregate (tree type
, const unsigned char *ptr
, int off
,
8922 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
8923 if (TREE_CODE (type
) == ARRAY_TYPE
)
8925 HOST_WIDE_INT eltsz
= int_size_in_bytes (TREE_TYPE (type
));
8926 if (eltsz
< 0 || eltsz
> len
|| TYPE_DOMAIN (type
) == NULL_TREE
)
8929 HOST_WIDE_INT cnt
= 0;
8930 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)))
8932 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
8934 cnt
= tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))) + 1;
8938 HOST_WIDE_INT pos
= 0;
8939 for (HOST_WIDE_INT i
= 0; i
< cnt
; i
++, pos
+= eltsz
)
8942 if (pos
>= len
|| pos
+ eltsz
> len
)
8944 if (can_native_interpret_type_p (TREE_TYPE (type
)))
8946 v
= native_interpret_expr (TREE_TYPE (type
),
8947 ptr
+ off
+ pos
, eltsz
);
8951 else if (TREE_CODE (TREE_TYPE (type
)) == RECORD_TYPE
8952 || TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
8953 v
= native_interpret_aggregate (TREE_TYPE (type
), ptr
, off
+ pos
,
8957 CONSTRUCTOR_APPEND_ELT (elts
, size_int (i
), v
);
8959 return build_constructor (type
, elts
);
8961 if (TREE_CODE (type
) != RECORD_TYPE
)
8963 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
8965 if (TREE_CODE (field
) != FIELD_DECL
|| DECL_PADDING_P (field
))
8968 HOST_WIDE_INT bitoff
= 0, pos
= 0, sz
= 0;
8971 if (DECL_BIT_FIELD (field
))
8973 fld
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8974 if (fld
&& INTEGRAL_TYPE_P (TREE_TYPE (fld
)))
8976 poly_int64 bitoffset
;
8977 poly_uint64 field_offset
, fld_offset
;
8978 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8979 && poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &fld_offset
))
8980 bitoffset
= (field_offset
- fld_offset
) * BITS_PER_UNIT
;
8983 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8984 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)));
8985 diff
= (TYPE_PRECISION (TREE_TYPE (fld
))
8986 - TYPE_PRECISION (TREE_TYPE (field
)));
8987 if (!bitoffset
.is_constant (&bitoff
)
8994 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field
)))
8996 int fieldsize
= TYPE_PRECISION (TREE_TYPE (field
));
8997 int bpos
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
8998 bpos
%= BITS_PER_UNIT
;
9000 fieldsize
+= BITS_PER_UNIT
- 1;
9001 fieldsize
/= BITS_PER_UNIT
;
9002 tree repr_type
= find_bitfield_repr_type (fieldsize
, len
);
9003 if (repr_type
== NULL_TREE
)
9005 sz
= int_size_in_bytes (repr_type
);
9006 if (sz
< 0 || sz
> len
)
9008 pos
= int_byte_position (field
);
9009 if (pos
< 0 || pos
> len
|| pos
+ fieldsize
> len
)
9012 if (pos
+ sz
<= len
)
9017 gcc_assert (rpos
<= pos
);
9019 bitoff
= (HOST_WIDE_INT
) (pos
- rpos
) * BITS_PER_UNIT
+ bpos
;
9021 diff
= (TYPE_PRECISION (repr_type
)
9022 - TYPE_PRECISION (TREE_TYPE (field
)));
9023 v
= native_interpret_expr (repr_type
, ptr
+ off
+ pos
, sz
);
9032 sz
= int_size_in_bytes (TREE_TYPE (fld
));
9033 if (sz
< 0 || sz
> len
)
9035 tree byte_pos
= byte_position (fld
);
9036 if (!tree_fits_shwi_p (byte_pos
))
9038 pos
= tree_to_shwi (byte_pos
);
9039 if (pos
< 0 || pos
> len
|| pos
+ sz
> len
)
9042 if (fld
== NULL_TREE
)
9043 /* Already handled above. */;
9044 else if (can_native_interpret_type_p (TREE_TYPE (fld
)))
9046 v
= native_interpret_expr (TREE_TYPE (fld
),
9047 ptr
+ off
+ pos
, sz
);
9051 else if (TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
9052 || TREE_CODE (TREE_TYPE (fld
)) == ARRAY_TYPE
)
9053 v
= native_interpret_aggregate (TREE_TYPE (fld
), ptr
, off
+ pos
, sz
);
9058 if (TREE_CODE (v
) != INTEGER_CST
)
9061 /* FIXME: Figure out how to handle PDP endian bitfields. */
9062 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
9064 if (!BYTES_BIG_ENDIAN
)
9065 v
= wide_int_to_tree (TREE_TYPE (field
),
9066 wi::lrshift (wi::to_wide (v
), bitoff
));
9068 v
= wide_int_to_tree (TREE_TYPE (field
),
9069 wi::lrshift (wi::to_wide (v
),
9072 CONSTRUCTOR_APPEND_ELT (elts
, field
, v
);
9074 return build_constructor (type
, elts
);
9077 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9078 or extracted constant positions and/or sizes aren't byte aligned. */
9080 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9081 bits between adjacent elements. AMNT should be within
9084 00011111|11100000 << 2 = 01111111|10000000
9085 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9088 shift_bytes_in_array_left (unsigned char *ptr
, unsigned int sz
,
9094 unsigned char carry_over
= 0U;
9095 unsigned char carry_mask
= (~0U) << (unsigned char) (BITS_PER_UNIT
- amnt
);
9096 unsigned char clear_mask
= (~0U) << amnt
;
9098 for (unsigned int i
= 0; i
< sz
; i
++)
9100 unsigned prev_carry_over
= carry_over
;
9101 carry_over
= (ptr
[i
] & carry_mask
) >> (BITS_PER_UNIT
- amnt
);
9106 ptr
[i
] &= clear_mask
;
9107 ptr
[i
] |= prev_carry_over
;
9112 /* Like shift_bytes_in_array_left but for big-endian.
9113 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9114 bits between adjacent elements. AMNT should be within
9117 00011111|11100000 >> 2 = 00000111|11111000
9118 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9121 shift_bytes_in_array_right (unsigned char *ptr
, unsigned int sz
,
9127 unsigned char carry_over
= 0U;
9128 unsigned char carry_mask
= ~(~0U << amnt
);
9130 for (unsigned int i
= 0; i
< sz
; i
++)
9132 unsigned prev_carry_over
= carry_over
;
9133 carry_over
= ptr
[i
] & carry_mask
;
9135 carry_over
<<= (unsigned char) BITS_PER_UNIT
- amnt
;
9137 ptr
[i
] |= prev_carry_over
;
9141 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9142 directly on the VECTOR_CST encoding, in a way that works for variable-
9143 length vectors. Return the resulting VECTOR_CST on success or null
9147 fold_view_convert_vector_encoding (tree type
, tree expr
)
9149 tree expr_type
= TREE_TYPE (expr
);
9150 poly_uint64 type_bits
, expr_bits
;
9151 if (!poly_int_tree_p (TYPE_SIZE (type
), &type_bits
)
9152 || !poly_int_tree_p (TYPE_SIZE (expr_type
), &expr_bits
))
9155 poly_uint64 type_units
= TYPE_VECTOR_SUBPARTS (type
);
9156 poly_uint64 expr_units
= TYPE_VECTOR_SUBPARTS (expr_type
);
9157 unsigned int type_elt_bits
= vector_element_size (type_bits
, type_units
);
9158 unsigned int expr_elt_bits
= vector_element_size (expr_bits
, expr_units
);
9160 /* We can only preserve the semantics of a stepped pattern if the new
9161 vector element is an integer of the same size. */
9162 if (VECTOR_CST_STEPPED_P (expr
)
9163 && (!INTEGRAL_TYPE_P (type
) || type_elt_bits
!= expr_elt_bits
))
9166 /* The number of bits needed to encode one element from every pattern
9167 of the original vector. */
9168 unsigned int expr_sequence_bits
9169 = VECTOR_CST_NPATTERNS (expr
) * expr_elt_bits
;
9171 /* The number of bits needed to encode one element from every pattern
9173 unsigned int type_sequence_bits
9174 = least_common_multiple (expr_sequence_bits
, type_elt_bits
);
9176 /* Don't try to read more bytes than are available, which can happen
9177 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9178 The general VIEW_CONVERT handling can cope with that case, so there's
9179 no point complicating things here. */
9180 unsigned int nelts_per_pattern
= VECTOR_CST_NELTS_PER_PATTERN (expr
);
9181 unsigned int buffer_bytes
= CEIL (nelts_per_pattern
* type_sequence_bits
,
9183 unsigned int buffer_bits
= buffer_bytes
* BITS_PER_UNIT
;
9184 if (known_gt (buffer_bits
, expr_bits
))
9187 /* Get enough bytes of EXPR to form the new encoding. */
9188 auto_vec
<unsigned char, 128> buffer (buffer_bytes
);
9189 buffer
.quick_grow (buffer_bytes
);
9190 if (native_encode_vector_part (expr
, buffer
.address (), buffer_bytes
, 0,
9191 buffer_bits
/ expr_elt_bits
)
9192 != (int) buffer_bytes
)
9195 /* Reencode the bytes as TYPE. */
9196 unsigned int type_npatterns
= type_sequence_bits
/ type_elt_bits
;
9197 return native_interpret_vector_part (type
, &buffer
[0], buffer
.length (),
9198 type_npatterns
, nelts_per_pattern
);
9201 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9202 TYPE at compile-time. If we're unable to perform the conversion
9203 return NULL_TREE. */
9206 fold_view_convert_expr (tree type
, tree expr
)
9208 /* We support up to 512-bit values (for V8DFmode). */
9209 unsigned char buffer
[64];
9212 /* Check that the host and target are sane. */
9213 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
9216 if (VECTOR_TYPE_P (type
) && TREE_CODE (expr
) == VECTOR_CST
)
9217 if (tree res
= fold_view_convert_vector_encoding (type
, expr
))
9220 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
9224 return native_interpret_expr (type
, buffer
, len
);
9227 /* Build an expression for the address of T. Folds away INDIRECT_REF
9228 to avoid confusing the gimplify process. */
9231 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
9233 /* The size of the object is not relevant when talking about its address. */
9234 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
9235 t
= TREE_OPERAND (t
, 0);
9237 if (TREE_CODE (t
) == INDIRECT_REF
)
9239 t
= TREE_OPERAND (t
, 0);
9241 if (TREE_TYPE (t
) != ptrtype
)
9242 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
9244 else if (TREE_CODE (t
) == MEM_REF
9245 && integer_zerop (TREE_OPERAND (t
, 1)))
9247 t
= TREE_OPERAND (t
, 0);
9249 if (TREE_TYPE (t
) != ptrtype
)
9250 t
= fold_convert_loc (loc
, ptrtype
, t
);
9252 else if (TREE_CODE (t
) == MEM_REF
9253 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
9254 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
9255 TREE_OPERAND (t
, 0),
9256 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
9257 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
9259 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
9261 if (TREE_TYPE (t
) != ptrtype
)
9262 t
= fold_convert_loc (loc
, ptrtype
, t
);
9265 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
9270 /* Build an expression for the address of T. */
9273 build_fold_addr_expr_loc (location_t loc
, tree t
)
9275 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
9277 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
9280 /* Fold a unary expression of code CODE and type TYPE with operand
9281 OP0. Return the folded expression if folding is successful.
9282 Otherwise, return NULL_TREE. */
9285 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
9289 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9291 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9292 && TREE_CODE_LENGTH (code
) == 1);
9297 if (CONVERT_EXPR_CODE_P (code
)
9298 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
9300 /* Don't use STRIP_NOPS, because signedness of argument type
9302 STRIP_SIGN_NOPS (arg0
);
9306 /* Strip any conversions that don't change the mode. This
9307 is safe for every expression, except for a comparison
9308 expression because its signedness is derived from its
9311 Note that this is done as an internal manipulation within
9312 the constant folder, in order to find the simplest
9313 representation of the arguments so that their form can be
9314 studied. In any cases, the appropriate type conversions
9315 should be put back in the tree that will get out of the
9320 if (CONSTANT_CLASS_P (arg0
))
9322 tree tem
= const_unop (code
, type
, arg0
);
9325 if (TREE_TYPE (tem
) != type
)
9326 tem
= fold_convert_loc (loc
, type
, tem
);
9332 tem
= generic_simplify (loc
, code
, type
, op0
);
9336 if (TREE_CODE_CLASS (code
) == tcc_unary
)
9338 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9339 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9340 fold_build1_loc (loc
, code
, type
,
9341 fold_convert_loc (loc
, TREE_TYPE (op0
),
9342 TREE_OPERAND (arg0
, 1))));
9343 else if (TREE_CODE (arg0
) == COND_EXPR
)
9345 tree arg01
= TREE_OPERAND (arg0
, 1);
9346 tree arg02
= TREE_OPERAND (arg0
, 2);
9347 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
9348 arg01
= fold_build1_loc (loc
, code
, type
,
9349 fold_convert_loc (loc
,
9350 TREE_TYPE (op0
), arg01
));
9351 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
9352 arg02
= fold_build1_loc (loc
, code
, type
,
9353 fold_convert_loc (loc
,
9354 TREE_TYPE (op0
), arg02
));
9355 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9358 /* If this was a conversion, and all we did was to move into
9359 inside the COND_EXPR, bring it back out. But leave it if
9360 it is a conversion from integer to integer and the
9361 result precision is no wider than a word since such a
9362 conversion is cheap and may be optimized away by combine,
9363 while it couldn't if it were outside the COND_EXPR. Then return
9364 so we don't get into an infinite recursion loop taking the
9365 conversion out and then back in. */
9367 if ((CONVERT_EXPR_CODE_P (code
)
9368 || code
== NON_LVALUE_EXPR
)
9369 && TREE_CODE (tem
) == COND_EXPR
9370 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
9371 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
9372 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
9373 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
9374 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
9375 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
9376 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
9378 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
9379 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
9380 || flag_syntax_only
))
9381 tem
= build1_loc (loc
, code
, type
,
9383 TREE_TYPE (TREE_OPERAND
9384 (TREE_OPERAND (tem
, 1), 0)),
9385 TREE_OPERAND (tem
, 0),
9386 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
9387 TREE_OPERAND (TREE_OPERAND (tem
, 2),
9395 case NON_LVALUE_EXPR
:
9396 if (!maybe_lvalue_p (op0
))
9397 return fold_convert_loc (loc
, type
, op0
);
9402 case FIX_TRUNC_EXPR
:
9403 if (COMPARISON_CLASS_P (op0
))
9405 /* If we have (type) (a CMP b) and type is an integral type, return
9406 new expression involving the new type. Canonicalize
9407 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9409 Do not fold the result as that would not simplify further, also
9410 folding again results in recursions. */
9411 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
9412 return build2_loc (loc
, TREE_CODE (op0
), type
,
9413 TREE_OPERAND (op0
, 0),
9414 TREE_OPERAND (op0
, 1));
9415 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
9416 && TREE_CODE (type
) != VECTOR_TYPE
)
9417 return build3_loc (loc
, COND_EXPR
, type
, op0
,
9418 constant_boolean_node (true, type
),
9419 constant_boolean_node (false, type
));
9422 /* Handle (T *)&A.B.C for A being of type T and B and C
9423 living at offset zero. This occurs frequently in
9424 C++ upcasting and then accessing the base. */
9425 if (TREE_CODE (op0
) == ADDR_EXPR
9426 && POINTER_TYPE_P (type
)
9427 && handled_component_p (TREE_OPERAND (op0
, 0)))
9429 poly_int64 bitsize
, bitpos
;
9432 int unsignedp
, reversep
, volatilep
;
9434 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
9435 &offset
, &mode
, &unsignedp
, &reversep
,
9437 /* If the reference was to a (constant) zero offset, we can use
9438 the address of the base if it has the same base type
9439 as the result type and the pointer type is unqualified. */
9441 && known_eq (bitpos
, 0)
9442 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
9443 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
9444 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
9445 return fold_convert_loc (loc
, type
,
9446 build_fold_addr_expr_loc (loc
, base
));
9449 if (TREE_CODE (op0
) == MODIFY_EXPR
9450 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
9451 /* Detect assigning a bitfield. */
9452 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
9454 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
9456 /* Don't leave an assignment inside a conversion
9457 unless assigning a bitfield. */
9458 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
9459 /* First do the assignment, then return converted constant. */
9460 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
9461 suppress_warning (tem
/* What warning? */);
9462 TREE_USED (tem
) = 1;
9466 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9467 constants (if x has signed type, the sign bit cannot be set
9468 in c). This folds extension into the BIT_AND_EXPR.
9469 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9470 very likely don't have maximal range for their precision and this
9471 transformation effectively doesn't preserve non-maximal ranges. */
9472 if (TREE_CODE (type
) == INTEGER_TYPE
9473 && TREE_CODE (op0
) == BIT_AND_EXPR
9474 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
9476 tree and_expr
= op0
;
9477 tree and0
= TREE_OPERAND (and_expr
, 0);
9478 tree and1
= TREE_OPERAND (and_expr
, 1);
9481 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
9482 || (TYPE_PRECISION (type
)
9483 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
9485 else if (TYPE_PRECISION (TREE_TYPE (and1
))
9486 <= HOST_BITS_PER_WIDE_INT
9487 && tree_fits_uhwi_p (and1
))
9489 unsigned HOST_WIDE_INT cst
;
9491 cst
= tree_to_uhwi (and1
);
9492 cst
&= HOST_WIDE_INT_M1U
9493 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
9494 change
= (cst
== 0);
9496 && !flag_syntax_only
9497 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
9500 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
9501 and0
= fold_convert_loc (loc
, uns
, and0
);
9502 and1
= fold_convert_loc (loc
, uns
, and1
);
9507 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
9508 TREE_OVERFLOW (and1
));
9509 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9510 fold_convert_loc (loc
, type
, and0
), tem
);
9514 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9515 cast (T1)X will fold away. We assume that this happens when X itself
9517 if (POINTER_TYPE_P (type
)
9518 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9519 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
9521 tree arg00
= TREE_OPERAND (arg0
, 0);
9522 tree arg01
= TREE_OPERAND (arg0
, 1);
9524 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9525 when the pointed type needs higher alignment than
9526 the p+ first operand's pointed type. */
9528 && sanitize_flags_p (SANITIZE_ALIGNMENT
)
9529 && (min_align_of_type (TREE_TYPE (type
))
9530 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00
)))))
9533 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9534 when type is a reference type and arg00's type is not,
9535 because arg00 could be validly nullptr and if arg01 doesn't return,
9536 we don't want false positive binding of reference to nullptr. */
9537 if (TREE_CODE (type
) == REFERENCE_TYPE
9539 && sanitize_flags_p (SANITIZE_NULL
)
9540 && TREE_CODE (TREE_TYPE (arg00
)) != REFERENCE_TYPE
)
9543 arg00
= fold_convert_loc (loc
, type
, arg00
);
9544 return fold_build_pointer_plus_loc (loc
, arg00
, arg01
);
9547 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9548 of the same precision, and X is an integer type not narrower than
9549 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9550 if (INTEGRAL_TYPE_P (type
)
9551 && TREE_CODE (op0
) == BIT_NOT_EXPR
9552 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
9553 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
9554 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
9556 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
9557 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
9558 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
9559 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
9560 fold_convert_loc (loc
, type
, tem
));
9563 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9564 type of X and Y (integer types only). */
9565 if (INTEGRAL_TYPE_P (type
)
9566 && TREE_CODE (op0
) == MULT_EXPR
9567 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
9568 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
9570 /* Be careful not to introduce new overflows. */
9572 if (TYPE_OVERFLOW_WRAPS (type
))
9575 mult_type
= unsigned_type_for (type
);
9577 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
9579 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
9580 fold_convert_loc (loc
, mult_type
,
9581 TREE_OPERAND (op0
, 0)),
9582 fold_convert_loc (loc
, mult_type
,
9583 TREE_OPERAND (op0
, 1)));
9584 return fold_convert_loc (loc
, type
, tem
);
9590 case VIEW_CONVERT_EXPR
:
9591 if (TREE_CODE (op0
) == MEM_REF
)
9593 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
9594 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
9595 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
9596 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
9597 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
9604 tem
= fold_negate_expr (loc
, arg0
);
9606 return fold_convert_loc (loc
, type
, tem
);
9610 /* Convert fabs((double)float) into (double)fabsf(float). */
9611 if (TREE_CODE (arg0
) == NOP_EXPR
9612 && TREE_CODE (type
) == REAL_TYPE
)
9614 tree targ0
= strip_float_extensions (arg0
);
9616 return fold_convert_loc (loc
, type
,
9617 fold_build1_loc (loc
, ABS_EXPR
,
9624 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9625 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9626 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9627 fold_convert_loc (loc
, type
,
9628 TREE_OPERAND (arg0
, 0)))))
9629 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
9630 fold_convert_loc (loc
, type
,
9631 TREE_OPERAND (arg0
, 1)));
9632 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9633 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9634 fold_convert_loc (loc
, type
,
9635 TREE_OPERAND (arg0
, 1)))))
9636 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
9637 fold_convert_loc (loc
, type
,
9638 TREE_OPERAND (arg0
, 0)), tem
);
9642 case TRUTH_NOT_EXPR
:
9643 /* Note that the operand of this must be an int
9644 and its values must be 0 or 1.
9645 ("true" is a fixed value perhaps depending on the language,
9646 but we don't handle values other than 1 correctly yet.) */
9647 tem
= fold_truth_not_expr (loc
, arg0
);
9650 return fold_convert_loc (loc
, type
, tem
);
9653 /* Fold *&X to X if X is an lvalue. */
9654 if (TREE_CODE (op0
) == ADDR_EXPR
)
9656 tree op00
= TREE_OPERAND (op0
, 0);
9658 || TREE_CODE (op00
) == PARM_DECL
9659 || TREE_CODE (op00
) == RESULT_DECL
)
9660 && !TREE_READONLY (op00
))
9667 } /* switch (code) */
9671 /* If the operation was a conversion do _not_ mark a resulting constant
9672 with TREE_OVERFLOW if the original constant was not. These conversions
9673 have implementation defined behavior and retaining the TREE_OVERFLOW
9674 flag here would confuse later passes such as VRP. */
9676 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
9677 tree type
, tree op0
)
9679 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
9681 && TREE_CODE (res
) == INTEGER_CST
9682 && TREE_CODE (op0
) == INTEGER_CST
9683 && CONVERT_EXPR_CODE_P (code
))
9684 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
9689 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9690 operands OP0 and OP1. LOC is the location of the resulting expression.
9691 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9692 Return the folded expression if folding is successful. Otherwise,
9693 return NULL_TREE. */
9695 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
9696 tree arg0
, tree arg1
, tree op0
, tree op1
)
9700 /* We only do these simplifications if we are optimizing. */
9704 /* Check for things like (A || B) && (A || C). We can convert this
9705 to A || (B && C). Note that either operator can be any of the four
9706 truth and/or operations and the transformation will still be
9707 valid. Also note that we only care about order for the
9708 ANDIF and ORIF operators. If B contains side effects, this
9709 might change the truth-value of A. */
9710 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9711 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
9712 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
9713 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
9714 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
9715 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
9717 tree a00
= TREE_OPERAND (arg0
, 0);
9718 tree a01
= TREE_OPERAND (arg0
, 1);
9719 tree a10
= TREE_OPERAND (arg1
, 0);
9720 tree a11
= TREE_OPERAND (arg1
, 1);
9721 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
9722 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
9723 && (code
== TRUTH_AND_EXPR
9724 || code
== TRUTH_OR_EXPR
));
9726 if (operand_equal_p (a00
, a10
, 0))
9727 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9728 fold_build2_loc (loc
, code
, type
, a01
, a11
));
9729 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
9730 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9731 fold_build2_loc (loc
, code
, type
, a01
, a10
));
9732 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
9733 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
9734 fold_build2_loc (loc
, code
, type
, a00
, a11
));
9736 /* This case if tricky because we must either have commutative
9737 operators or else A10 must not have side-effects. */
9739 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
9740 && operand_equal_p (a01
, a11
, 0))
9741 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
9742 fold_build2_loc (loc
, code
, type
, a00
, a10
),
9746 /* See if we can build a range comparison. */
9747 if ((tem
= fold_range_test (loc
, code
, type
, op0
, op1
)) != 0)
9750 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
9751 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
9753 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
9755 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
9758 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
9759 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
9761 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
9763 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
9766 /* Check for the possibility of merging component references. If our
9767 lhs is another similar operation, try to merge its rhs with our
9768 rhs. Then try to merge our lhs and rhs. */
9769 if (TREE_CODE (arg0
) == code
9770 && (tem
= fold_truth_andor_1 (loc
, code
, type
,
9771 TREE_OPERAND (arg0
, 1), arg1
)) != 0)
9772 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9774 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
9777 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
9778 if (param_logical_op_non_short_circuit
!= -1)
9779 logical_op_non_short_circuit
9780 = param_logical_op_non_short_circuit
;
9781 if (logical_op_non_short_circuit
9782 && !sanitize_coverage_p ()
9783 && (code
== TRUTH_AND_EXPR
9784 || code
== TRUTH_ANDIF_EXPR
9785 || code
== TRUTH_OR_EXPR
9786 || code
== TRUTH_ORIF_EXPR
))
9788 enum tree_code ncode
, icode
;
9790 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
9791 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
9792 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
9794 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9795 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9796 We don't want to pack more than two leafs to a non-IF AND/OR
9798 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9799 equal to IF-CODE, then we don't want to add right-hand operand.
9800 If the inner right-hand side of left-hand operand has
9801 side-effects, or isn't simple, then we can't add to it,
9802 as otherwise we might destroy if-sequence. */
9803 if (TREE_CODE (arg0
) == icode
9804 && simple_condition_p (arg1
)
9805 /* Needed for sequence points to handle trappings, and
9807 && simple_condition_p (TREE_OPERAND (arg0
, 1)))
9809 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
9811 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
9814 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9815 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9816 else if (TREE_CODE (arg1
) == icode
9817 && simple_condition_p (arg0
)
9818 /* Needed for sequence points to handle trappings, and
9820 && simple_condition_p (TREE_OPERAND (arg1
, 0)))
9822 tem
= fold_build2_loc (loc
, ncode
, type
,
9823 arg0
, TREE_OPERAND (arg1
, 0));
9824 return fold_build2_loc (loc
, icode
, type
, tem
,
9825 TREE_OPERAND (arg1
, 1));
9827 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9829 For sequence point consistancy, we need to check for trapping,
9830 and side-effects. */
9831 else if (code
== icode
&& simple_condition_p (arg0
)
9832 && simple_condition_p (arg1
))
9833 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
9839 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9840 by changing CODE to reduce the magnitude of constants involved in
9841 ARG0 of the comparison.
9842 Returns a canonicalized comparison tree if a simplification was
9843 possible, otherwise returns NULL_TREE.
9844 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9845 valid if signed overflow is undefined. */
9848 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
9849 tree arg0
, tree arg1
,
9850 bool *strict_overflow_p
)
9852 enum tree_code code0
= TREE_CODE (arg0
);
9853 tree t
, cst0
= NULL_TREE
;
9856 /* Match A +- CST code arg1. We can change this only if overflow
9858 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9859 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
9860 /* In principle pointers also have undefined overflow behavior,
9861 but that causes problems elsewhere. */
9862 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
9863 && (code0
== MINUS_EXPR
9864 || code0
== PLUS_EXPR
)
9865 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
9868 /* Identify the constant in arg0 and its sign. */
9869 cst0
= TREE_OPERAND (arg0
, 1);
9870 sgn0
= tree_int_cst_sgn (cst0
);
9872 /* Overflowed constants and zero will cause problems. */
9873 if (integer_zerop (cst0
)
9874 || TREE_OVERFLOW (cst0
))
9877 /* See if we can reduce the magnitude of the constant in
9878 arg0 by changing the comparison code. */
9879 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9881 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9883 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9884 else if (code
== GT_EXPR
9885 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9887 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9888 else if (code
== LE_EXPR
9889 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9891 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9892 else if (code
== GE_EXPR
9893 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9897 *strict_overflow_p
= true;
9899 /* Now build the constant reduced in magnitude. But not if that
9900 would produce one outside of its types range. */
9901 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
9903 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
9904 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
9906 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
9907 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
9910 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
9911 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
9912 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
9913 t
= fold_convert (TREE_TYPE (arg1
), t
);
9915 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
9918 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9919 overflow further. Try to decrease the magnitude of constants involved
9920 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9921 and put sole constants at the second argument position.
9922 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9925 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
9926 tree arg0
, tree arg1
)
9929 bool strict_overflow_p
;
9930 const char * const warnmsg
= G_("assuming signed overflow does not occur "
9931 "when reducing constant in comparison");
9933 /* Try canonicalization by simplifying arg0. */
9934 strict_overflow_p
= false;
9935 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
9936 &strict_overflow_p
);
9939 if (strict_overflow_p
)
9940 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9944 /* Try canonicalization by simplifying arg1 using the swapped
9946 code
= swap_tree_comparison (code
);
9947 strict_overflow_p
= false;
9948 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
9949 &strict_overflow_p
);
9950 if (t
&& strict_overflow_p
)
9951 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9955 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9956 space. This is used to avoid issuing overflow warnings for
9957 expressions like &p->x which cannot wrap. */
9960 pointer_may_wrap_p (tree base
, tree offset
, poly_int64 bitpos
)
9962 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
9965 if (maybe_lt (bitpos
, 0))
9968 poly_wide_int wi_offset
;
9969 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
9970 if (offset
== NULL_TREE
)
9971 wi_offset
= wi::zero (precision
);
9972 else if (!poly_int_tree_p (offset
) || TREE_OVERFLOW (offset
))
9975 wi_offset
= wi::to_poly_wide (offset
);
9977 wi::overflow_type overflow
;
9978 poly_wide_int units
= wi::shwi (bits_to_bytes_round_down (bitpos
),
9980 poly_wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
9984 poly_uint64 total_hwi
, size
;
9985 if (!total
.to_uhwi (&total_hwi
)
9986 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base
))),
9988 || known_eq (size
, 0U))
9991 if (known_le (total_hwi
, size
))
9994 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9996 if (TREE_CODE (base
) == ADDR_EXPR
9997 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base
, 0))),
9999 && maybe_ne (size
, 0U)
10000 && known_le (total_hwi
, size
))
10006 /* Return a positive integer when the symbol DECL is known to have
10007 a nonzero address, zero when it's known not to (e.g., it's a weak
10008 symbol), and a negative integer when the symbol is not yet in the
10009 symbol table and so whether or not its address is zero is unknown.
10010 For function local objects always return positive integer. */
10012 maybe_nonzero_address (tree decl
)
10014 /* Normally, don't do anything for variables and functions before symtab is
10015 built; it is quite possible that DECL will be declared weak later.
10016 But if folding_initializer, we need a constant answer now, so create
10017 the symtab entry and prevent later weak declaration. */
10018 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
10019 if (struct symtab_node
*symbol
10020 = (folding_initializer
10021 ? symtab_node::get_create (decl
)
10022 : symtab_node::get (decl
)))
10023 return symbol
->nonzero_address ();
10025 /* Function local objects are never NULL. */
10027 && (DECL_CONTEXT (decl
)
10028 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
10029 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
10035 /* Subroutine of fold_binary. This routine performs all of the
10036 transformations that are common to the equality/inequality
10037 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10038 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10039 fold_binary should call fold_binary. Fold a comparison with
10040 tree code CODE and type TYPE with operands OP0 and OP1. Return
10041 the folded comparison or NULL_TREE. */
10044 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
10045 tree op0
, tree op1
)
10047 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
10048 tree arg0
, arg1
, tem
;
10053 STRIP_SIGN_NOPS (arg0
);
10054 STRIP_SIGN_NOPS (arg1
);
10056 /* For comparisons of pointers we can decompose it to a compile time
10057 comparison of the base objects and the offsets into the object.
10058 This requires at least one operand being an ADDR_EXPR or a
10059 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10060 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
10061 && (TREE_CODE (arg0
) == ADDR_EXPR
10062 || TREE_CODE (arg1
) == ADDR_EXPR
10063 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10064 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
10066 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
10067 poly_int64 bitsize
, bitpos0
= 0, bitpos1
= 0;
10069 int volatilep
, reversep
, unsignedp
;
10070 bool indirect_base0
= false, indirect_base1
= false;
10072 /* Get base and offset for the access. Strip ADDR_EXPR for
10073 get_inner_reference, but put it back by stripping INDIRECT_REF
10074 off the base object if possible. indirect_baseN will be true
10075 if baseN is not an address but refers to the object itself. */
10077 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10080 = get_inner_reference (TREE_OPERAND (arg0
, 0),
10081 &bitsize
, &bitpos0
, &offset0
, &mode
,
10082 &unsignedp
, &reversep
, &volatilep
);
10083 if (TREE_CODE (base0
) == INDIRECT_REF
)
10084 base0
= TREE_OPERAND (base0
, 0);
10086 indirect_base0
= true;
10088 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10090 base0
= TREE_OPERAND (arg0
, 0);
10091 STRIP_SIGN_NOPS (base0
);
10092 if (TREE_CODE (base0
) == ADDR_EXPR
)
10095 = get_inner_reference (TREE_OPERAND (base0
, 0),
10096 &bitsize
, &bitpos0
, &offset0
, &mode
,
10097 &unsignedp
, &reversep
, &volatilep
);
10098 if (TREE_CODE (base0
) == INDIRECT_REF
)
10099 base0
= TREE_OPERAND (base0
, 0);
10101 indirect_base0
= true;
10103 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
10104 offset0
= TREE_OPERAND (arg0
, 1);
10106 offset0
= size_binop (PLUS_EXPR
, offset0
,
10107 TREE_OPERAND (arg0
, 1));
10108 if (poly_int_tree_p (offset0
))
10110 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset0
),
10111 TYPE_PRECISION (sizetype
));
10112 tem
<<= LOG2_BITS_PER_UNIT
;
10114 if (tem
.to_shwi (&bitpos0
))
10115 offset0
= NULL_TREE
;
10120 if (TREE_CODE (arg1
) == ADDR_EXPR
)
10123 = get_inner_reference (TREE_OPERAND (arg1
, 0),
10124 &bitsize
, &bitpos1
, &offset1
, &mode
,
10125 &unsignedp
, &reversep
, &volatilep
);
10126 if (TREE_CODE (base1
) == INDIRECT_REF
)
10127 base1
= TREE_OPERAND (base1
, 0);
10129 indirect_base1
= true;
10131 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10133 base1
= TREE_OPERAND (arg1
, 0);
10134 STRIP_SIGN_NOPS (base1
);
10135 if (TREE_CODE (base1
) == ADDR_EXPR
)
10138 = get_inner_reference (TREE_OPERAND (base1
, 0),
10139 &bitsize
, &bitpos1
, &offset1
, &mode
,
10140 &unsignedp
, &reversep
, &volatilep
);
10141 if (TREE_CODE (base1
) == INDIRECT_REF
)
10142 base1
= TREE_OPERAND (base1
, 0);
10144 indirect_base1
= true;
10146 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
10147 offset1
= TREE_OPERAND (arg1
, 1);
10149 offset1
= size_binop (PLUS_EXPR
, offset1
,
10150 TREE_OPERAND (arg1
, 1));
10151 if (poly_int_tree_p (offset1
))
10153 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset1
),
10154 TYPE_PRECISION (sizetype
));
10155 tem
<<= LOG2_BITS_PER_UNIT
;
10157 if (tem
.to_shwi (&bitpos1
))
10158 offset1
= NULL_TREE
;
10162 /* If we have equivalent bases we might be able to simplify. */
10163 if (indirect_base0
== indirect_base1
10164 && operand_equal_p (base0
, base1
,
10165 indirect_base0
? OEP_ADDRESS_OF
: 0))
10167 /* We can fold this expression to a constant if the non-constant
10168 offset parts are equal. */
10169 if ((offset0
== offset1
10170 || (offset0
&& offset1
10171 && operand_equal_p (offset0
, offset1
, 0)))
10174 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
10175 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10178 && maybe_ne (bitpos0
, bitpos1
)
10179 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
10180 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
10181 fold_overflow_warning (("assuming pointer wraparound does not "
10182 "occur when comparing P +- C1 with "
10184 WARN_STRICT_OVERFLOW_CONDITIONAL
);
10189 if (known_eq (bitpos0
, bitpos1
))
10190 return constant_boolean_node (true, type
);
10191 if (known_ne (bitpos0
, bitpos1
))
10192 return constant_boolean_node (false, type
);
10195 if (known_ne (bitpos0
, bitpos1
))
10196 return constant_boolean_node (true, type
);
10197 if (known_eq (bitpos0
, bitpos1
))
10198 return constant_boolean_node (false, type
);
10201 if (known_lt (bitpos0
, bitpos1
))
10202 return constant_boolean_node (true, type
);
10203 if (known_ge (bitpos0
, bitpos1
))
10204 return constant_boolean_node (false, type
);
10207 if (known_le (bitpos0
, bitpos1
))
10208 return constant_boolean_node (true, type
);
10209 if (known_gt (bitpos0
, bitpos1
))
10210 return constant_boolean_node (false, type
);
10213 if (known_ge (bitpos0
, bitpos1
))
10214 return constant_boolean_node (true, type
);
10215 if (known_lt (bitpos0
, bitpos1
))
10216 return constant_boolean_node (false, type
);
10219 if (known_gt (bitpos0
, bitpos1
))
10220 return constant_boolean_node (true, type
);
10221 if (known_le (bitpos0
, bitpos1
))
10222 return constant_boolean_node (false, type
);
10227 /* We can simplify the comparison to a comparison of the variable
10228 offset parts if the constant offset parts are equal.
10229 Be careful to use signed sizetype here because otherwise we
10230 mess with array offsets in the wrong way. This is possible
10231 because pointer arithmetic is restricted to retain within an
10232 object and overflow on pointer differences is undefined as of
10233 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10234 else if (known_eq (bitpos0
, bitpos1
)
10237 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
10238 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10240 /* By converting to signed sizetype we cover middle-end pointer
10241 arithmetic which operates on unsigned pointer types of size
10242 type size and ARRAY_REF offsets which are properly sign or
10243 zero extended from their type in case it is narrower than
10245 if (offset0
== NULL_TREE
)
10246 offset0
= build_int_cst (ssizetype
, 0);
10248 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
10249 if (offset1
== NULL_TREE
)
10250 offset1
= build_int_cst (ssizetype
, 0);
10252 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
10255 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
10256 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
10257 fold_overflow_warning (("assuming pointer wraparound does not "
10258 "occur when comparing P +- C1 with "
10260 WARN_STRICT_OVERFLOW_COMPARISON
);
10262 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
10265 /* For equal offsets we can simplify to a comparison of the
10267 else if (known_eq (bitpos0
, bitpos1
)
10269 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
10271 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
10272 && ((offset0
== offset1
)
10273 || (offset0
&& offset1
10274 && operand_equal_p (offset0
, offset1
, 0))))
10276 if (indirect_base0
)
10277 base0
= build_fold_addr_expr_loc (loc
, base0
);
10278 if (indirect_base1
)
10279 base1
= build_fold_addr_expr_loc (loc
, base1
);
10280 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
10282 /* Comparison between an ordinary (non-weak) symbol and a null
10283 pointer can be eliminated since such symbols must have a non
10284 null address. In C, relational expressions between pointers
10285 to objects and null pointers are undefined. The results
10286 below follow the C++ rules with the additional property that
10287 every object pointer compares greater than a null pointer.
10289 else if (((DECL_P (base0
)
10290 && maybe_nonzero_address (base0
) > 0
10291 /* Avoid folding references to struct members at offset 0 to
10292 prevent tests like '&ptr->firstmember == 0' from getting
10293 eliminated. When ptr is null, although the -> expression
10294 is strictly speaking invalid, GCC retains it as a matter
10295 of QoI. See PR c/44555. */
10296 && (offset0
== NULL_TREE
&& known_ne (bitpos0
, 0)))
10297 || CONSTANT_CLASS_P (base0
))
10299 /* The caller guarantees that when one of the arguments is
10300 constant (i.e., null in this case) it is second. */
10301 && integer_zerop (arg1
))
10308 return constant_boolean_node (false, type
);
10312 return constant_boolean_node (true, type
);
10314 gcc_unreachable ();
10319 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10320 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10321 the resulting offset is smaller in absolute value than the
10322 original one and has the same sign. */
10323 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10324 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
10325 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10326 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10327 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
10328 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
10329 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10330 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
10332 tree const1
= TREE_OPERAND (arg0
, 1);
10333 tree const2
= TREE_OPERAND (arg1
, 1);
10334 tree variable1
= TREE_OPERAND (arg0
, 0);
10335 tree variable2
= TREE_OPERAND (arg1
, 0);
10337 const char * const warnmsg
= G_("assuming signed overflow does not "
10338 "occur when combining constants around "
10341 /* Put the constant on the side where it doesn't overflow and is
10342 of lower absolute value and of same sign than before. */
10343 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10344 ? MINUS_EXPR
: PLUS_EXPR
,
10346 if (!TREE_OVERFLOW (cst
)
10347 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
10348 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
10350 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
10351 return fold_build2_loc (loc
, code
, type
,
10353 fold_build2_loc (loc
, TREE_CODE (arg1
),
10358 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10359 ? MINUS_EXPR
: PLUS_EXPR
,
10361 if (!TREE_OVERFLOW (cst
)
10362 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
10363 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
10365 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
10366 return fold_build2_loc (loc
, code
, type
,
10367 fold_build2_loc (loc
, TREE_CODE (arg0
),
10374 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
10378 /* If we are comparing an expression that just has comparisons
10379 of two integer values, arithmetic expressions of those comparisons,
10380 and constants, we can simplify it. There are only three cases
10381 to check: the two values can either be equal, the first can be
10382 greater, or the second can be greater. Fold the expression for
10383 those three values. Since each value must be 0 or 1, we have
10384 eight possibilities, each of which corresponds to the constant 0
10385 or 1 or one of the six possible comparisons.
10387 This handles common cases like (a > b) == 0 but also handles
10388 expressions like ((x > y) - (y > x)) > 0, which supposedly
10389 occur in macroized code. */
10391 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
10393 tree cval1
= 0, cval2
= 0;
10395 if (twoval_comparison_p (arg0
, &cval1
, &cval2
)
10396 /* Don't handle degenerate cases here; they should already
10397 have been handled anyway. */
10398 && cval1
!= 0 && cval2
!= 0
10399 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
10400 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
10401 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
10402 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
10403 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
10404 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
10405 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
10407 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
10408 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
10410 /* We can't just pass T to eval_subst in case cval1 or cval2
10411 was the same as ARG1. */
10414 = fold_build2_loc (loc
, code
, type
,
10415 eval_subst (loc
, arg0
, cval1
, maxval
,
10419 = fold_build2_loc (loc
, code
, type
,
10420 eval_subst (loc
, arg0
, cval1
, maxval
,
10424 = fold_build2_loc (loc
, code
, type
,
10425 eval_subst (loc
, arg0
, cval1
, minval
,
10429 /* All three of these results should be 0 or 1. Confirm they are.
10430 Then use those values to select the proper code to use. */
10432 if (TREE_CODE (high_result
) == INTEGER_CST
10433 && TREE_CODE (equal_result
) == INTEGER_CST
10434 && TREE_CODE (low_result
) == INTEGER_CST
)
10436 /* Make a 3-bit mask with the high-order bit being the
10437 value for `>', the next for '=', and the low for '<'. */
10438 switch ((integer_onep (high_result
) * 4)
10439 + (integer_onep (equal_result
) * 2)
10440 + integer_onep (low_result
))
10443 /* Always false. */
10444 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10465 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10468 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
10477 /* Subroutine of fold_binary. Optimize complex multiplications of the
10478 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10479 argument EXPR represents the expression "z" of type TYPE. */
10482 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
10484 tree itype
= TREE_TYPE (type
);
10485 tree rpart
, ipart
, tem
;
10487 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
10489 rpart
= TREE_OPERAND (expr
, 0);
10490 ipart
= TREE_OPERAND (expr
, 1);
10492 else if (TREE_CODE (expr
) == COMPLEX_CST
)
10494 rpart
= TREE_REALPART (expr
);
10495 ipart
= TREE_IMAGPART (expr
);
10499 expr
= save_expr (expr
);
10500 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
10501 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
10504 rpart
= save_expr (rpart
);
10505 ipart
= save_expr (ipart
);
10506 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
10507 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
10508 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
10509 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
10510 build_zero_cst (itype
));
10514 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10515 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10516 true if successful. */
10519 vec_cst_ctor_to_array (tree arg
, unsigned int nelts
, tree
*elts
)
10521 unsigned HOST_WIDE_INT i
, nunits
;
10523 if (TREE_CODE (arg
) == VECTOR_CST
10524 && VECTOR_CST_NELTS (arg
).is_constant (&nunits
))
10526 for (i
= 0; i
< nunits
; ++i
)
10527 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
10529 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
10531 constructor_elt
*elt
;
10533 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
10534 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
10537 elts
[i
] = elt
->value
;
10541 for (; i
< nelts
; i
++)
10543 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
10547 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10548 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10549 NULL_TREE otherwise. */
10552 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const vec_perm_indices
&sel
)
10555 unsigned HOST_WIDE_INT nelts
;
10556 bool need_ctor
= false;
10558 if (!sel
.length ().is_constant (&nelts
))
10560 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
), nelts
)
10561 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)), nelts
)
10562 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)), nelts
));
10563 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
10564 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
10567 tree
*in_elts
= XALLOCAVEC (tree
, nelts
* 2);
10568 if (!vec_cst_ctor_to_array (arg0
, nelts
, in_elts
)
10569 || !vec_cst_ctor_to_array (arg1
, nelts
, in_elts
+ nelts
))
10572 tree_vector_builder
out_elts (type
, nelts
, 1);
10573 for (i
= 0; i
< nelts
; i
++)
10575 HOST_WIDE_INT index
;
10576 if (!sel
[i
].is_constant (&index
))
10578 if (!CONSTANT_CLASS_P (in_elts
[index
]))
10580 out_elts
.quick_push (unshare_expr (in_elts
[index
]));
10585 vec
<constructor_elt
, va_gc
> *v
;
10586 vec_alloc (v
, nelts
);
10587 for (i
= 0; i
< nelts
; i
++)
10588 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, out_elts
[i
]);
10589 return build_constructor (type
, v
);
10592 return out_elts
.build ();
10595 /* Try to fold a pointer difference of type TYPE two address expressions of
10596 array references AREF0 and AREF1 using location LOC. Return a
10597 simplified expression for the difference or NULL_TREE. */
10600 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
10601 tree aref0
, tree aref1
,
10602 bool use_pointer_diff
)
10604 tree base0
= TREE_OPERAND (aref0
, 0);
10605 tree base1
= TREE_OPERAND (aref1
, 0);
10606 tree base_offset
= build_int_cst (type
, 0);
10608 /* If the bases are array references as well, recurse. If the bases
10609 are pointer indirections compute the difference of the pointers.
10610 If the bases are equal, we are set. */
10611 if ((TREE_CODE (base0
) == ARRAY_REF
10612 && TREE_CODE (base1
) == ARRAY_REF
10614 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
,
10615 use_pointer_diff
)))
10616 || (INDIRECT_REF_P (base0
)
10617 && INDIRECT_REF_P (base1
)
10620 ? fold_binary_loc (loc
, POINTER_DIFF_EXPR
, type
,
10621 TREE_OPERAND (base0
, 0),
10622 TREE_OPERAND (base1
, 0))
10623 : fold_binary_loc (loc
, MINUS_EXPR
, type
,
10624 fold_convert (type
,
10625 TREE_OPERAND (base0
, 0)),
10626 fold_convert (type
,
10627 TREE_OPERAND (base1
, 0)))))
10628 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
10630 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10631 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10632 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
10633 tree diff
= fold_build2_loc (loc
, MINUS_EXPR
, type
, op0
, op1
);
10634 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10636 fold_build2_loc (loc
, MULT_EXPR
, type
,
10642 /* If the real or vector real constant CST of type TYPE has an exact
10643 inverse, return it, else return NULL. */
10646 exact_inverse (tree type
, tree cst
)
10652 switch (TREE_CODE (cst
))
10655 r
= TREE_REAL_CST (cst
);
10657 if (exact_real_inverse (TYPE_MODE (type
), &r
))
10658 return build_real (type
, r
);
10664 unit_type
= TREE_TYPE (type
);
10665 mode
= TYPE_MODE (unit_type
);
10667 tree_vector_builder elts
;
10668 if (!elts
.new_unary_operation (type
, cst
, false))
10670 unsigned int count
= elts
.encoded_nelts ();
10671 for (unsigned int i
= 0; i
< count
; ++i
)
10673 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
10674 if (!exact_real_inverse (mode
, &r
))
10676 elts
.quick_push (build_real (unit_type
, r
));
10679 return elts
.build ();
10687 /* Mask out the tz least significant bits of X of type TYPE where
10688 tz is the number of trailing zeroes in Y. */
10690 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
10692 int tz
= wi::ctz (y
);
10694 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
10698 /* Return true when T is an address and is known to be nonzero.
10699 For floating point we further ensure that T is not denormal.
10700 Similar logic is present in nonzero_address in rtlanal.h.
10702 If the return value is based on the assumption that signed overflow
10703 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10704 change *STRICT_OVERFLOW_P. */
10707 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
10709 tree type
= TREE_TYPE (t
);
10710 enum tree_code code
;
10712 /* Doing something useful for floating point would need more work. */
10713 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10716 code
= TREE_CODE (t
);
10717 switch (TREE_CODE_CLASS (code
))
10720 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10721 strict_overflow_p
);
10723 case tcc_comparison
:
10724 return tree_binary_nonzero_warnv_p (code
, type
,
10725 TREE_OPERAND (t
, 0),
10726 TREE_OPERAND (t
, 1),
10727 strict_overflow_p
);
10729 case tcc_declaration
:
10730 case tcc_reference
:
10731 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10739 case TRUTH_NOT_EXPR
:
10740 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10741 strict_overflow_p
);
10743 case TRUTH_AND_EXPR
:
10744 case TRUTH_OR_EXPR
:
10745 case TRUTH_XOR_EXPR
:
10746 return tree_binary_nonzero_warnv_p (code
, type
,
10747 TREE_OPERAND (t
, 0),
10748 TREE_OPERAND (t
, 1),
10749 strict_overflow_p
);
10755 case WITH_SIZE_EXPR
:
10757 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10759 case COMPOUND_EXPR
:
10762 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10763 strict_overflow_p
);
10766 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10767 strict_overflow_p
);
10771 tree fndecl
= get_callee_fndecl (t
);
10772 if (!fndecl
) return false;
10773 if (flag_delete_null_pointer_checks
&& !flag_check_new
10774 && DECL_IS_OPERATOR_NEW_P (fndecl
)
10775 && !TREE_NOTHROW (fndecl
))
10777 if (flag_delete_null_pointer_checks
10778 && lookup_attribute ("returns_nonnull",
10779 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10781 return alloca_call_p (t
);
10790 /* Return true when T is an address and is known to be nonzero.
10791 Handle warnings about undefined signed overflow. */
10794 tree_expr_nonzero_p (tree t
)
10796 bool ret
, strict_overflow_p
;
10798 strict_overflow_p
= false;
10799 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10800 if (strict_overflow_p
)
10801 fold_overflow_warning (("assuming signed overflow does not occur when "
10802 "determining that expression is always "
10804 WARN_STRICT_OVERFLOW_MISC
);
10808 /* Return true if T is known not to be equal to an integer W. */
10811 expr_not_equal_to (tree t
, const wide_int
&w
)
10814 switch (TREE_CODE (t
))
10817 return wi::to_wide (t
) != w
;
10820 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
10824 get_range_query (cfun
)->range_of_expr (vr
, t
);
10826 get_global_range_query ()->range_of_expr (vr
, t
);
10828 if (!vr
.undefined_p ()
10829 && !vr
.contains_p (wide_int_to_tree (TREE_TYPE (t
), w
)))
10831 /* If T has some known zero bits and W has any of those bits set,
10832 then T is known not to be equal to W. */
10833 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
10834 TYPE_PRECISION (TREE_TYPE (t
))), 0))
10843 /* Fold a binary expression of code CODE and type TYPE with operands
10844 OP0 and OP1. LOC is the location of the resulting expression.
10845 Return the folded expression if folding is successful. Otherwise,
10846 return NULL_TREE. */
10849 fold_binary_loc (location_t loc
, enum tree_code code
, tree type
,
10850 tree op0
, tree op1
)
10852 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10853 tree arg0
, arg1
, tem
;
10854 tree t1
= NULL_TREE
;
10855 bool strict_overflow_p
;
10858 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10859 && TREE_CODE_LENGTH (code
) == 2
10860 && op0
!= NULL_TREE
10861 && op1
!= NULL_TREE
);
10866 /* Strip any conversions that don't change the mode. This is
10867 safe for every expression, except for a comparison expression
10868 because its signedness is derived from its operands. So, in
10869 the latter case, only strip conversions that don't change the
10870 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10873 Note that this is done as an internal manipulation within the
10874 constant folder, in order to find the simplest representation
10875 of the arguments so that their form can be studied. In any
10876 cases, the appropriate type conversions should be put back in
10877 the tree that will get out of the constant folder. */
10879 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10881 STRIP_SIGN_NOPS (arg0
);
10882 STRIP_SIGN_NOPS (arg1
);
10890 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10891 constant but we can't do arithmetic on them. */
10892 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
10894 tem
= const_binop (code
, type
, arg0
, arg1
);
10895 if (tem
!= NULL_TREE
)
10897 if (TREE_TYPE (tem
) != type
)
10898 tem
= fold_convert_loc (loc
, type
, tem
);
10903 /* If this is a commutative operation, and ARG0 is a constant, move it
10904 to ARG1 to reduce the number of tests below. */
10905 if (commutative_tree_code (code
)
10906 && tree_swap_operands_p (arg0
, arg1
))
10907 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10909 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10910 to ARG1 to reduce the number of tests below. */
10911 if (kind
== tcc_comparison
10912 && tree_swap_operands_p (arg0
, arg1
))
10913 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
10915 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
10919 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10921 First check for cases where an arithmetic operation is applied to a
10922 compound, conditional, or comparison operation. Push the arithmetic
10923 operation inside the compound or conditional to see if any folding
10924 can then be done. Convert comparison to conditional for this purpose.
10925 The also optimizes non-constant cases that used to be done in
10928 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10929 one of the operands is a comparison and the other is a comparison, a
10930 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10931 code below would make the expression more complex. Change it to a
10932 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10933 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10935 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10936 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10937 && !VECTOR_TYPE_P (TREE_TYPE (arg0
))
10938 && ((truth_value_p (TREE_CODE (arg0
))
10939 && (truth_value_p (TREE_CODE (arg1
))
10940 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10941 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10942 || (truth_value_p (TREE_CODE (arg1
))
10943 && (truth_value_p (TREE_CODE (arg0
))
10944 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10945 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10947 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10948 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10951 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10952 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10954 if (code
== EQ_EXPR
)
10955 tem
= invert_truthvalue_loc (loc
, tem
);
10957 return fold_convert_loc (loc
, type
, tem
);
10960 if (TREE_CODE_CLASS (code
) == tcc_binary
10961 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10963 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10965 tem
= fold_build2_loc (loc
, code
, type
,
10966 fold_convert_loc (loc
, TREE_TYPE (op0
),
10967 TREE_OPERAND (arg0
, 1)), op1
);
10968 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10971 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
10973 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10974 fold_convert_loc (loc
, TREE_TYPE (op1
),
10975 TREE_OPERAND (arg1
, 1)));
10976 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10980 if (TREE_CODE (arg0
) == COND_EXPR
10981 || TREE_CODE (arg0
) == VEC_COND_EXPR
10982 || COMPARISON_CLASS_P (arg0
))
10984 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10986 /*cond_first_p=*/1);
10987 if (tem
!= NULL_TREE
)
10991 if (TREE_CODE (arg1
) == COND_EXPR
10992 || TREE_CODE (arg1
) == VEC_COND_EXPR
10993 || COMPARISON_CLASS_P (arg1
))
10995 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10997 /*cond_first_p=*/0);
10998 if (tem
!= NULL_TREE
)
11006 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11007 if (TREE_CODE (arg0
) == ADDR_EXPR
11008 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
11010 tree iref
= TREE_OPERAND (arg0
, 0);
11011 return fold_build2 (MEM_REF
, type
,
11012 TREE_OPERAND (iref
, 0),
11013 int_const_binop (PLUS_EXPR
, arg1
,
11014 TREE_OPERAND (iref
, 1)));
11017 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11018 if (TREE_CODE (arg0
) == ADDR_EXPR
11019 && handled_component_p (TREE_OPERAND (arg0
, 0)))
11022 poly_int64 coffset
;
11023 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
11027 return fold_build2 (MEM_REF
, type
,
11028 build1 (ADDR_EXPR
, TREE_TYPE (arg0
), base
),
11029 int_const_binop (PLUS_EXPR
, arg1
,
11030 size_int (coffset
)));
11035 case POINTER_PLUS_EXPR
:
11036 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11037 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
11038 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
11039 return fold_convert_loc (loc
, type
,
11040 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
11041 fold_convert_loc (loc
, sizetype
,
11043 fold_convert_loc (loc
, sizetype
,
11049 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
11051 /* X + (X / CST) * -CST is X % CST. */
11052 if (TREE_CODE (arg1
) == MULT_EXPR
11053 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
11054 && operand_equal_p (arg0
,
11055 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
11057 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
11058 tree cst1
= TREE_OPERAND (arg1
, 1);
11059 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
11061 if (sum
&& integer_zerop (sum
))
11062 return fold_convert_loc (loc
, type
,
11063 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
11064 TREE_TYPE (arg0
), arg0
,
11069 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11070 one. Make sure the type is not saturating and has the signedness of
11071 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11072 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11073 if ((TREE_CODE (arg0
) == MULT_EXPR
11074 || TREE_CODE (arg1
) == MULT_EXPR
)
11075 && !TYPE_SATURATING (type
)
11076 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11077 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11078 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11080 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11085 if (! FLOAT_TYPE_P (type
))
11087 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11088 (plus (plus (mult) (mult)) (foo)) so that we can
11089 take advantage of the factoring cases below. */
11090 if (ANY_INTEGRAL_TYPE_P (type
)
11091 && TYPE_OVERFLOW_WRAPS (type
)
11092 && (((TREE_CODE (arg0
) == PLUS_EXPR
11093 || TREE_CODE (arg0
) == MINUS_EXPR
)
11094 && TREE_CODE (arg1
) == MULT_EXPR
)
11095 || ((TREE_CODE (arg1
) == PLUS_EXPR
11096 || TREE_CODE (arg1
) == MINUS_EXPR
)
11097 && TREE_CODE (arg0
) == MULT_EXPR
)))
11099 tree parg0
, parg1
, parg
, marg
;
11100 enum tree_code pcode
;
11102 if (TREE_CODE (arg1
) == MULT_EXPR
)
11103 parg
= arg0
, marg
= arg1
;
11105 parg
= arg1
, marg
= arg0
;
11106 pcode
= TREE_CODE (parg
);
11107 parg0
= TREE_OPERAND (parg
, 0);
11108 parg1
= TREE_OPERAND (parg
, 1);
11109 STRIP_NOPS (parg0
);
11110 STRIP_NOPS (parg1
);
11112 if (TREE_CODE (parg0
) == MULT_EXPR
11113 && TREE_CODE (parg1
) != MULT_EXPR
)
11114 return fold_build2_loc (loc
, pcode
, type
,
11115 fold_build2_loc (loc
, PLUS_EXPR
, type
,
11116 fold_convert_loc (loc
, type
,
11118 fold_convert_loc (loc
, type
,
11120 fold_convert_loc (loc
, type
, parg1
));
11121 if (TREE_CODE (parg0
) != MULT_EXPR
11122 && TREE_CODE (parg1
) == MULT_EXPR
)
11124 fold_build2_loc (loc
, PLUS_EXPR
, type
,
11125 fold_convert_loc (loc
, type
, parg0
),
11126 fold_build2_loc (loc
, pcode
, type
,
11127 fold_convert_loc (loc
, type
, marg
),
11128 fold_convert_loc (loc
, type
,
11134 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11135 to __complex__ ( x, y ). This is not the same for SNaNs or
11136 if signed zeros are involved. */
11137 if (!HONOR_SNANS (arg0
)
11138 && !HONOR_SIGNED_ZEROS (arg0
)
11139 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11141 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11142 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
11143 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
11144 bool arg0rz
= false, arg0iz
= false;
11145 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
11146 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
11148 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
11149 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
11150 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
11152 tree rp
= arg1r
? arg1r
11153 : build1 (REALPART_EXPR
, rtype
, arg1
);
11154 tree ip
= arg0i
? arg0i
11155 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
11156 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11158 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
11160 tree rp
= arg0r
? arg0r
11161 : build1 (REALPART_EXPR
, rtype
, arg0
);
11162 tree ip
= arg1i
? arg1i
11163 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
11164 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11169 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11170 We associate floats only if the user has specified
11171 -fassociative-math. */
11172 if (flag_associative_math
11173 && TREE_CODE (arg1
) == PLUS_EXPR
11174 && TREE_CODE (arg0
) != MULT_EXPR
)
11176 tree tree10
= TREE_OPERAND (arg1
, 0);
11177 tree tree11
= TREE_OPERAND (arg1
, 1);
11178 if (TREE_CODE (tree11
) == MULT_EXPR
11179 && TREE_CODE (tree10
) == MULT_EXPR
)
11182 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
11183 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
11186 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11187 We associate floats only if the user has specified
11188 -fassociative-math. */
11189 if (flag_associative_math
11190 && TREE_CODE (arg0
) == PLUS_EXPR
11191 && TREE_CODE (arg1
) != MULT_EXPR
)
11193 tree tree00
= TREE_OPERAND (arg0
, 0);
11194 tree tree01
= TREE_OPERAND (arg0
, 1);
11195 if (TREE_CODE (tree01
) == MULT_EXPR
11196 && TREE_CODE (tree00
) == MULT_EXPR
)
11199 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
11200 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
11206 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11207 is a rotate of A by C1 bits. */
11208 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11209 is a rotate of A by B bits.
11210 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11211 though in this case CODE must be | and not + or ^, otherwise
11212 it doesn't return A when B is 0. */
11214 enum tree_code code0
, code1
;
11216 code0
= TREE_CODE (arg0
);
11217 code1
= TREE_CODE (arg1
);
11218 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
11219 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
11220 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11221 TREE_OPERAND (arg1
, 0), 0)
11222 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
11223 TYPE_UNSIGNED (rtype
))
11224 /* Only create rotates in complete modes. Other cases are not
11225 expanded properly. */
11226 && (element_precision (rtype
)
11227 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
11229 tree tree01
, tree11
;
11230 tree orig_tree01
, orig_tree11
;
11231 enum tree_code code01
, code11
;
11233 tree01
= orig_tree01
= TREE_OPERAND (arg0
, 1);
11234 tree11
= orig_tree11
= TREE_OPERAND (arg1
, 1);
11235 STRIP_NOPS (tree01
);
11236 STRIP_NOPS (tree11
);
11237 code01
= TREE_CODE (tree01
);
11238 code11
= TREE_CODE (tree11
);
11239 if (code11
!= MINUS_EXPR
11240 && (code01
== MINUS_EXPR
|| code01
== BIT_AND_EXPR
))
11242 std::swap (code0
, code1
);
11243 std::swap (code01
, code11
);
11244 std::swap (tree01
, tree11
);
11245 std::swap (orig_tree01
, orig_tree11
);
11247 if (code01
== INTEGER_CST
11248 && code11
== INTEGER_CST
11249 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
11250 == element_precision (rtype
)))
11252 tem
= build2_loc (loc
, LROTATE_EXPR
,
11253 rtype
, TREE_OPERAND (arg0
, 0),
11254 code0
== LSHIFT_EXPR
11255 ? orig_tree01
: orig_tree11
);
11256 return fold_convert_loc (loc
, type
, tem
);
11258 else if (code11
== MINUS_EXPR
)
11260 tree tree110
, tree111
;
11261 tree110
= TREE_OPERAND (tree11
, 0);
11262 tree111
= TREE_OPERAND (tree11
, 1);
11263 STRIP_NOPS (tree110
);
11264 STRIP_NOPS (tree111
);
11265 if (TREE_CODE (tree110
) == INTEGER_CST
11266 && compare_tree_int (tree110
,
11267 element_precision (rtype
)) == 0
11268 && operand_equal_p (tree01
, tree111
, 0))
11270 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
11271 ? LROTATE_EXPR
: RROTATE_EXPR
),
11272 rtype
, TREE_OPERAND (arg0
, 0),
11274 return fold_convert_loc (loc
, type
, tem
);
11277 else if (code
== BIT_IOR_EXPR
11278 && code11
== BIT_AND_EXPR
11279 && pow2p_hwi (element_precision (rtype
)))
11281 tree tree110
, tree111
;
11282 tree110
= TREE_OPERAND (tree11
, 0);
11283 tree111
= TREE_OPERAND (tree11
, 1);
11284 STRIP_NOPS (tree110
);
11285 STRIP_NOPS (tree111
);
11286 if (TREE_CODE (tree110
) == NEGATE_EXPR
11287 && TREE_CODE (tree111
) == INTEGER_CST
11288 && compare_tree_int (tree111
,
11289 element_precision (rtype
) - 1) == 0
11290 && operand_equal_p (tree01
, TREE_OPERAND (tree110
, 0), 0))
11292 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
11293 ? LROTATE_EXPR
: RROTATE_EXPR
),
11294 rtype
, TREE_OPERAND (arg0
, 0),
11296 return fold_convert_loc (loc
, type
, tem
);
11303 /* In most languages, can't associate operations on floats through
11304 parentheses. Rather than remember where the parentheses were, we
11305 don't associate floats at all, unless the user has specified
11306 -fassociative-math.
11307 And, we need to make sure type is not saturating. */
11309 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
11310 && !TYPE_SATURATING (type
))
11312 tree var0
, minus_var0
, con0
, minus_con0
, lit0
, minus_lit0
;
11313 tree var1
, minus_var1
, con1
, minus_con1
, lit1
, minus_lit1
;
11317 /* Split both trees into variables, constants, and literals. Then
11318 associate each group together, the constants with literals,
11319 then the result with variables. This increases the chances of
11320 literals being recombined later and of generating relocatable
11321 expressions for the sum of a constant and literal. */
11322 var0
= split_tree (arg0
, type
, code
,
11323 &minus_var0
, &con0
, &minus_con0
,
11324 &lit0
, &minus_lit0
, 0);
11325 var1
= split_tree (arg1
, type
, code
,
11326 &minus_var1
, &con1
, &minus_con1
,
11327 &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
11329 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11330 if (code
== MINUS_EXPR
)
11333 /* With undefined overflow prefer doing association in a type
11334 which wraps on overflow, if that is one of the operand types. */
11335 if ((POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
11336 && !TYPE_OVERFLOW_WRAPS (type
))
11338 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11339 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11340 atype
= TREE_TYPE (arg0
);
11341 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
11342 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
11343 atype
= TREE_TYPE (arg1
);
11344 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
11347 /* With undefined overflow we can only associate constants with one
11348 variable, and constants whose association doesn't overflow. */
11349 if ((POINTER_TYPE_P (atype
) || INTEGRAL_TYPE_P (atype
))
11350 && !TYPE_OVERFLOW_WRAPS (atype
))
11352 if ((var0
&& var1
) || (minus_var0
&& minus_var1
))
11354 /* ??? If split_tree would handle NEGATE_EXPR we could
11355 simply reject these cases and the allowed cases would
11356 be the var0/minus_var1 ones. */
11357 tree tmp0
= var0
? var0
: minus_var0
;
11358 tree tmp1
= var1
? var1
: minus_var1
;
11359 bool one_neg
= false;
11361 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
11363 tmp0
= TREE_OPERAND (tmp0
, 0);
11364 one_neg
= !one_neg
;
11366 if (CONVERT_EXPR_P (tmp0
)
11367 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
11368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
11369 <= TYPE_PRECISION (atype
)))
11370 tmp0
= TREE_OPERAND (tmp0
, 0);
11371 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
11373 tmp1
= TREE_OPERAND (tmp1
, 0);
11374 one_neg
= !one_neg
;
11376 if (CONVERT_EXPR_P (tmp1
)
11377 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
11378 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
11379 <= TYPE_PRECISION (atype
)))
11380 tmp1
= TREE_OPERAND (tmp1
, 0);
11381 /* The only case we can still associate with two variables
11382 is if they cancel out. */
11384 || !operand_equal_p (tmp0
, tmp1
, 0))
11387 else if ((var0
&& minus_var1
11388 && ! operand_equal_p (var0
, minus_var1
, 0))
11389 || (minus_var0
&& var1
11390 && ! operand_equal_p (minus_var0
, var1
, 0)))
11394 /* Only do something if we found more than two objects. Otherwise,
11395 nothing has changed and we risk infinite recursion. */
11397 && ((var0
!= 0) + (var1
!= 0)
11398 + (minus_var0
!= 0) + (minus_var1
!= 0)
11399 + (con0
!= 0) + (con1
!= 0)
11400 + (minus_con0
!= 0) + (minus_con1
!= 0)
11401 + (lit0
!= 0) + (lit1
!= 0)
11402 + (minus_lit0
!= 0) + (minus_lit1
!= 0)) > 2)
11404 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
11405 minus_var0
= associate_trees (loc
, minus_var0
, minus_var1
,
11407 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
11408 minus_con0
= associate_trees (loc
, minus_con0
, minus_con1
,
11410 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
11411 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
11414 if (minus_var0
&& var0
)
11416 var0
= associate_trees (loc
, var0
, minus_var0
,
11417 MINUS_EXPR
, atype
);
11420 if (minus_con0
&& con0
)
11422 con0
= associate_trees (loc
, con0
, minus_con0
,
11423 MINUS_EXPR
, atype
);
11427 /* Preserve the MINUS_EXPR if the negative part of the literal is
11428 greater than the positive part. Otherwise, the multiplicative
11429 folding code (i.e extract_muldiv) may be fooled in case
11430 unsigned constants are subtracted, like in the following
11431 example: ((X*2 + 4) - 8U)/2. */
11432 if (minus_lit0
&& lit0
)
11434 if (TREE_CODE (lit0
) == INTEGER_CST
11435 && TREE_CODE (minus_lit0
) == INTEGER_CST
11436 && tree_int_cst_lt (lit0
, minus_lit0
)
11437 /* But avoid ending up with only negated parts. */
11440 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
11441 MINUS_EXPR
, atype
);
11446 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
11447 MINUS_EXPR
, atype
);
11452 /* Don't introduce overflows through reassociation. */
11453 if ((lit0
&& TREE_OVERFLOW_P (lit0
))
11454 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
)))
11457 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11458 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
11460 minus_con0
= associate_trees (loc
, minus_con0
, minus_lit0
,
11464 /* Eliminate minus_con0. */
11468 con0
= associate_trees (loc
, con0
, minus_con0
,
11469 MINUS_EXPR
, atype
);
11471 var0
= associate_trees (loc
, var0
, minus_con0
,
11472 MINUS_EXPR
, atype
);
11474 gcc_unreachable ();
11478 /* Eliminate minus_var0. */
11482 con0
= associate_trees (loc
, con0
, minus_var0
,
11483 MINUS_EXPR
, atype
);
11485 gcc_unreachable ();
11490 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
11497 case POINTER_DIFF_EXPR
:
11499 /* Fold &a[i] - &a[j] to i-j. */
11500 if (TREE_CODE (arg0
) == ADDR_EXPR
11501 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11502 && TREE_CODE (arg1
) == ADDR_EXPR
11503 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11505 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11506 TREE_OPERAND (arg0
, 0),
11507 TREE_OPERAND (arg1
, 0),
11509 == POINTER_DIFF_EXPR
);
11514 /* Further transformations are not for pointers. */
11515 if (code
== POINTER_DIFF_EXPR
)
11518 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11519 if (TREE_CODE (arg0
) == NEGATE_EXPR
11520 && negate_expr_p (op1
)
11521 /* If arg0 is e.g. unsigned int and type is int, then this could
11522 introduce UB, because if A is INT_MIN at runtime, the original
11523 expression can be well defined while the latter is not.
11525 && !(ANY_INTEGRAL_TYPE_P (type
)
11526 && TYPE_OVERFLOW_UNDEFINED (type
)
11527 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11528 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
11529 return fold_build2_loc (loc
, MINUS_EXPR
, type
, negate_expr (op1
),
11530 fold_convert_loc (loc
, type
,
11531 TREE_OPERAND (arg0
, 0)));
11533 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11534 __complex__ ( x, -y ). This is not the same for SNaNs or if
11535 signed zeros are involved. */
11536 if (!HONOR_SNANS (arg0
)
11537 && !HONOR_SIGNED_ZEROS (arg0
)
11538 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11540 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11541 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
11542 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
11543 bool arg0rz
= false, arg0iz
= false;
11544 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
11545 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
11547 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
11548 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
11549 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
11551 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11553 : build1 (REALPART_EXPR
, rtype
, arg1
));
11554 tree ip
= arg0i
? arg0i
11555 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
11556 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11558 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
11560 tree rp
= arg0r
? arg0r
11561 : build1 (REALPART_EXPR
, rtype
, arg0
);
11562 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11564 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
11565 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11570 /* A - B -> A + (-B) if B is easily negatable. */
11571 if (negate_expr_p (op1
)
11572 && ! TYPE_OVERFLOW_SANITIZED (type
)
11573 && ((FLOAT_TYPE_P (type
)
11574 /* Avoid this transformation if B is a positive REAL_CST. */
11575 && (TREE_CODE (op1
) != REAL_CST
11576 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
11577 || INTEGRAL_TYPE_P (type
)))
11578 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11579 fold_convert_loc (loc
, type
, arg0
),
11580 negate_expr (op1
));
11582 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11583 one. Make sure the type is not saturating and has the signedness of
11584 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11585 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11586 if ((TREE_CODE (arg0
) == MULT_EXPR
11587 || TREE_CODE (arg1
) == MULT_EXPR
)
11588 && !TYPE_SATURATING (type
)
11589 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11590 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11591 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11593 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11601 if (! FLOAT_TYPE_P (type
))
11603 /* Transform x * -C into -x * C if x is easily negatable. */
11604 if (TREE_CODE (op1
) == INTEGER_CST
11605 && tree_int_cst_sgn (op1
) == -1
11606 && negate_expr_p (op0
)
11607 && negate_expr_p (op1
)
11608 && (tem
= negate_expr (op1
)) != op1
11609 && ! TREE_OVERFLOW (tem
))
11610 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11611 fold_convert_loc (loc
, type
,
11612 negate_expr (op0
)), tem
);
11614 strict_overflow_p
= false;
11615 if (TREE_CODE (arg1
) == INTEGER_CST
11616 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11617 &strict_overflow_p
)) != 0)
11619 if (strict_overflow_p
)
11620 fold_overflow_warning (("assuming signed overflow does not "
11621 "occur when simplifying "
11623 WARN_STRICT_OVERFLOW_MISC
);
11624 return fold_convert_loc (loc
, type
, tem
);
11627 /* Optimize z * conj(z) for integer complex numbers. */
11628 if (TREE_CODE (arg0
) == CONJ_EXPR
11629 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11630 return fold_mult_zconjz (loc
, type
, arg1
);
11631 if (TREE_CODE (arg1
) == CONJ_EXPR
11632 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11633 return fold_mult_zconjz (loc
, type
, arg0
);
11637 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11638 This is not the same for NaNs or if signed zeros are
11640 if (!HONOR_NANS (arg0
)
11641 && !HONOR_SIGNED_ZEROS (arg0
)
11642 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11643 && TREE_CODE (arg1
) == COMPLEX_CST
11644 && real_zerop (TREE_REALPART (arg1
)))
11646 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11647 if (real_onep (TREE_IMAGPART (arg1
)))
11649 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11650 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11652 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11653 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11655 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11656 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11657 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11661 /* Optimize z * conj(z) for floating point complex numbers.
11662 Guarded by flag_unsafe_math_optimizations as non-finite
11663 imaginary components don't produce scalar results. */
11664 if (flag_unsafe_math_optimizations
11665 && TREE_CODE (arg0
) == CONJ_EXPR
11666 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11667 return fold_mult_zconjz (loc
, type
, arg1
);
11668 if (flag_unsafe_math_optimizations
11669 && TREE_CODE (arg1
) == CONJ_EXPR
11670 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11671 return fold_mult_zconjz (loc
, type
, arg0
);
11676 /* Canonicalize (X & C1) | C2. */
11677 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11678 && TREE_CODE (arg1
) == INTEGER_CST
11679 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11681 int width
= TYPE_PRECISION (type
), w
;
11682 wide_int c1
= wi::to_wide (TREE_OPERAND (arg0
, 1));
11683 wide_int c2
= wi::to_wide (arg1
);
11685 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11686 if ((c1
& c2
) == c1
)
11687 return omit_one_operand_loc (loc
, type
, arg1
,
11688 TREE_OPERAND (arg0
, 0));
11690 wide_int msk
= wi::mask (width
, false,
11691 TYPE_PRECISION (TREE_TYPE (arg1
)));
11693 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11694 if (wi::bit_and_not (msk
, c1
| c2
) == 0)
11696 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11697 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11700 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11701 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11702 mode which allows further optimizations. */
11705 wide_int c3
= wi::bit_and_not (c1
, c2
);
11706 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11708 wide_int mask
= wi::mask (w
, false,
11709 TYPE_PRECISION (type
));
11710 if (((c1
| c2
) & mask
) == mask
11711 && wi::bit_and_not (c1
, mask
) == 0)
11720 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11721 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
,
11722 wide_int_to_tree (type
, c3
));
11723 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11727 /* See if this can be simplified into a rotate first. If that
11728 is unsuccessful continue in the association code. */
11732 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11733 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11734 && INTEGRAL_TYPE_P (type
)
11735 && integer_onep (TREE_OPERAND (arg0
, 1))
11736 && integer_onep (arg1
))
11737 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11738 build_zero_cst (TREE_TYPE (arg0
)));
11740 /* See if this can be simplified into a rotate first. If that
11741 is unsuccessful continue in the association code. */
11745 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11746 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11747 && INTEGRAL_TYPE_P (type
)
11748 && integer_onep (TREE_OPERAND (arg0
, 1))
11749 && integer_onep (arg1
))
11752 tem
= TREE_OPERAND (arg0
, 0);
11753 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11754 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11756 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11757 build_zero_cst (TREE_TYPE (tem
)));
11759 /* Fold ~X & 1 as (X & 1) == 0. */
11760 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11761 && INTEGRAL_TYPE_P (type
)
11762 && integer_onep (arg1
))
11765 tem
= TREE_OPERAND (arg0
, 0);
11766 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11767 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11769 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11770 build_zero_cst (TREE_TYPE (tem
)));
11772 /* Fold !X & 1 as X == 0. */
11773 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11774 && integer_onep (arg1
))
11776 tem
= TREE_OPERAND (arg0
, 0);
11777 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11778 build_zero_cst (TREE_TYPE (tem
)));
11781 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11782 multiple of 1 << CST. */
11783 if (TREE_CODE (arg1
) == INTEGER_CST
)
11785 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
11786 wide_int ncst1
= -cst1
;
11787 if ((cst1
& ncst1
) == ncst1
11788 && multiple_of_p (type
, arg0
,
11789 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11790 return fold_convert_loc (loc
, type
, arg0
);
11793 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11795 if (TREE_CODE (arg1
) == INTEGER_CST
11796 && TREE_CODE (arg0
) == MULT_EXPR
11797 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11799 wi::tree_to_wide_ref warg1
= wi::to_wide (arg1
);
11801 = mask_with_tz (type
, warg1
, wi::to_wide (TREE_OPERAND (arg0
, 1)));
11804 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11806 else if (masked
!= warg1
)
11808 /* Avoid the transform if arg1 is a mask of some
11809 mode which allows further optimizations. */
11810 int pop
= wi::popcount (warg1
);
11811 if (!(pop
>= BITS_PER_UNIT
11813 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11814 return fold_build2_loc (loc
, code
, type
, op0
,
11815 wide_int_to_tree (type
, masked
));
11819 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11820 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11821 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11823 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11825 wide_int mask
= wide_int::from (wi::to_wide (arg1
), prec
, UNSIGNED
);
11828 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11834 /* Don't touch a floating-point divide by zero unless the mode
11835 of the constant can represent infinity. */
11836 if (TREE_CODE (arg1
) == REAL_CST
11837 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11838 && real_zerop (arg1
))
11841 /* (-A) / (-B) -> A / B */
11842 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11843 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11844 TREE_OPERAND (arg0
, 0),
11845 negate_expr (arg1
));
11846 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11847 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11848 negate_expr (arg0
),
11849 TREE_OPERAND (arg1
, 0));
11852 case TRUNC_DIV_EXPR
:
11855 case FLOOR_DIV_EXPR
:
11856 /* Simplify A / (B << N) where A and B are positive and B is
11857 a power of 2, to A >> (N + log2(B)). */
11858 strict_overflow_p
= false;
11859 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11860 && (TYPE_UNSIGNED (type
)
11861 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11863 tree sval
= TREE_OPERAND (arg1
, 0);
11864 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11866 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11867 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11868 wi::exact_log2 (wi::to_wide (sval
)));
11870 if (strict_overflow_p
)
11871 fold_overflow_warning (("assuming signed overflow does not "
11872 "occur when simplifying A / (B << N)"),
11873 WARN_STRICT_OVERFLOW_MISC
);
11875 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11877 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11878 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11884 case ROUND_DIV_EXPR
:
11885 case CEIL_DIV_EXPR
:
11886 case EXACT_DIV_EXPR
:
11887 if (integer_zerop (arg1
))
11890 /* Convert -A / -B to A / B when the type is signed and overflow is
11892 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11893 && TREE_CODE (op0
) == NEGATE_EXPR
11894 && negate_expr_p (op1
))
11896 if (ANY_INTEGRAL_TYPE_P (type
))
11897 fold_overflow_warning (("assuming signed overflow does not occur "
11898 "when distributing negation across "
11900 WARN_STRICT_OVERFLOW_MISC
);
11901 return fold_build2_loc (loc
, code
, type
,
11902 fold_convert_loc (loc
, type
,
11903 TREE_OPERAND (arg0
, 0)),
11904 negate_expr (op1
));
11906 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11907 && TREE_CODE (arg1
) == NEGATE_EXPR
11908 && negate_expr_p (op0
))
11910 if (ANY_INTEGRAL_TYPE_P (type
))
11911 fold_overflow_warning (("assuming signed overflow does not occur "
11912 "when distributing negation across "
11914 WARN_STRICT_OVERFLOW_MISC
);
11915 return fold_build2_loc (loc
, code
, type
,
11917 fold_convert_loc (loc
, type
,
11918 TREE_OPERAND (arg1
, 0)));
11921 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11922 operation, EXACT_DIV_EXPR.
11924 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11925 At one time others generated faster code, it's not clear if they do
11926 after the last round to changes to the DIV code in expmed.cc. */
11927 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11928 && multiple_of_p (type
, arg0
, arg1
))
11929 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
11930 fold_convert (type
, arg0
),
11931 fold_convert (type
, arg1
));
11933 strict_overflow_p
= false;
11934 if (TREE_CODE (arg1
) == INTEGER_CST
11935 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11936 &strict_overflow_p
)) != 0)
11938 if (strict_overflow_p
)
11939 fold_overflow_warning (("assuming signed overflow does not occur "
11940 "when simplifying division"),
11941 WARN_STRICT_OVERFLOW_MISC
);
11942 return fold_convert_loc (loc
, type
, tem
);
11947 case CEIL_MOD_EXPR
:
11948 case FLOOR_MOD_EXPR
:
11949 case ROUND_MOD_EXPR
:
11950 case TRUNC_MOD_EXPR
:
11951 strict_overflow_p
= false;
11952 if (TREE_CODE (arg1
) == INTEGER_CST
11953 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11954 &strict_overflow_p
)) != 0)
11956 if (strict_overflow_p
)
11957 fold_overflow_warning (("assuming signed overflow does not occur "
11958 "when simplifying modulus"),
11959 WARN_STRICT_OVERFLOW_MISC
);
11960 return fold_convert_loc (loc
, type
, tem
);
11969 /* Since negative shift count is not well-defined,
11970 don't try to compute it in the compiler. */
11971 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11974 prec
= element_precision (type
);
11976 /* If we have a rotate of a bit operation with the rotate count and
11977 the second operand of the bit operation both constant,
11978 permute the two operations. */
11979 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11980 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11981 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11982 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11983 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11985 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11986 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11987 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11988 fold_build2_loc (loc
, code
, type
,
11990 fold_build2_loc (loc
, code
, type
,
11994 /* Two consecutive rotates adding up to the some integer
11995 multiple of the precision of the type can be ignored. */
11996 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11997 && TREE_CODE (arg0
) == RROTATE_EXPR
11998 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11999 && wi::umod_trunc (wi::to_wide (arg1
)
12000 + wi::to_wide (TREE_OPERAND (arg0
, 1)),
12002 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12010 case TRUTH_ANDIF_EXPR
:
12011 /* Note that the operands of this must be ints
12012 and their values must be 0 or 1.
12013 ("true" is a fixed value perhaps depending on the language.) */
12014 /* If first arg is constant zero, return it. */
12015 if (integer_zerop (arg0
))
12016 return fold_convert_loc (loc
, type
, arg0
);
12018 case TRUTH_AND_EXPR
:
12019 /* If either arg is constant true, drop it. */
12020 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12021 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12022 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12023 /* Preserve sequence points. */
12024 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12025 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12026 /* If second arg is constant zero, result is zero, but first arg
12027 must be evaluated. */
12028 if (integer_zerop (arg1
))
12029 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12030 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12031 case will be handled here. */
12032 if (integer_zerop (arg0
))
12033 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12035 /* !X && X is always false. */
12036 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12037 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12038 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12039 /* X && !X is always false. */
12040 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12041 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12042 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12044 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12045 means A >= Y && A != MAX, but in this case we know that
12048 if (!TREE_SIDE_EFFECTS (arg0
)
12049 && !TREE_SIDE_EFFECTS (arg1
))
12051 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12052 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12053 return fold_convert (type
,
12054 fold_build2_loc (loc
, code
, TREE_TYPE (arg1
),
12057 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12058 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12059 return fold_convert (type
,
12060 fold_build2_loc (loc
, code
, TREE_TYPE (arg0
),
12064 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12070 case TRUTH_ORIF_EXPR
:
12071 /* Note that the operands of this must be ints
12072 and their values must be 0 or true.
12073 ("true" is a fixed value perhaps depending on the language.) */
12074 /* If first arg is constant true, return it. */
12075 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12076 return fold_convert_loc (loc
, type
, arg0
);
12078 case TRUTH_OR_EXPR
:
12079 /* If either arg is constant zero, drop it. */
12080 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12081 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12082 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12083 /* Preserve sequence points. */
12084 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12085 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12086 /* If second arg is constant true, result is true, but we must
12087 evaluate first arg. */
12088 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12089 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12090 /* Likewise for first arg, but note this only occurs here for
12092 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12093 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12095 /* !X || X is always true. */
12096 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12097 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12098 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12099 /* X || !X is always true. */
12100 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12101 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12102 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12104 /* (X && !Y) || (!X && Y) is X ^ Y */
12105 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12106 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12108 tree a0
, a1
, l0
, l1
, n0
, n1
;
12110 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12111 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12113 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12114 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12116 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12117 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12119 if ((operand_equal_p (n0
, a0
, 0)
12120 && operand_equal_p (n1
, a1
, 0))
12121 || (operand_equal_p (n0
, a1
, 0)
12122 && operand_equal_p (n1
, a0
, 0)))
12123 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12126 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12132 case TRUTH_XOR_EXPR
:
12133 /* If the second arg is constant zero, drop it. */
12134 if (integer_zerop (arg1
))
12135 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12136 /* If the second arg is constant true, this is a logical inversion. */
12137 if (integer_onep (arg1
))
12139 tem
= invert_truthvalue_loc (loc
, arg0
);
12140 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12142 /* Identical arguments cancel to zero. */
12143 if (operand_equal_p (arg0
, arg1
, 0))
12144 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12146 /* !X ^ X is always true. */
12147 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12148 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12149 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12151 /* X ^ !X is always true. */
12152 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12153 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12154 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12163 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12164 if (tem
!= NULL_TREE
)
12167 /* bool_var != 1 becomes !bool_var. */
12168 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12169 && code
== NE_EXPR
)
12170 return fold_convert_loc (loc
, type
,
12171 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12172 TREE_TYPE (arg0
), arg0
));
12174 /* bool_var == 0 becomes !bool_var. */
12175 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12176 && code
== EQ_EXPR
)
12177 return fold_convert_loc (loc
, type
,
12178 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12179 TREE_TYPE (arg0
), arg0
));
12181 /* !exp != 0 becomes !exp */
12182 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12183 && code
== NE_EXPR
)
12184 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12186 /* If this is an EQ or NE comparison with zero and ARG0 is
12187 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12188 two operations, but the latter can be done in one less insn
12189 on machines that have only two-operand insns or on which a
12190 constant cannot be the first operand. */
12191 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12192 && integer_zerop (arg1
))
12194 tree arg00
= TREE_OPERAND (arg0
, 0);
12195 tree arg01
= TREE_OPERAND (arg0
, 1);
12196 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12197 && integer_onep (TREE_OPERAND (arg00
, 0)))
12199 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12200 arg01
, TREE_OPERAND (arg00
, 1));
12201 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12202 build_one_cst (TREE_TYPE (arg0
)));
12203 return fold_build2_loc (loc
, code
, type
,
12204 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12207 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12208 && integer_onep (TREE_OPERAND (arg01
, 0)))
12210 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12211 arg00
, TREE_OPERAND (arg01
, 1));
12212 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12213 build_one_cst (TREE_TYPE (arg0
)));
12214 return fold_build2_loc (loc
, code
, type
,
12215 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12220 /* If this is a comparison of a field, we may be able to simplify it. */
12221 if ((TREE_CODE (arg0
) == COMPONENT_REF
12222 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12223 /* Handle the constant case even without -O
12224 to make sure the warnings are given. */
12225 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12227 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12232 /* Optimize comparisons of strlen vs zero to a compare of the
12233 first character of the string vs zero. To wit,
12234 strlen(ptr) == 0 => *ptr == 0
12235 strlen(ptr) != 0 => *ptr != 0
12236 Other cases should reduce to one of these two (or a constant)
12237 due to the return value of strlen being unsigned. */
12238 if (TREE_CODE (arg0
) == CALL_EXPR
&& integer_zerop (arg1
))
12240 tree fndecl
= get_callee_fndecl (arg0
);
12243 && fndecl_built_in_p (fndecl
, BUILT_IN_STRLEN
)
12244 && call_expr_nargs (arg0
) == 1
12245 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0)))
12249 = build_pointer_type (build_qualified_type (char_type_node
,
12251 tree ptr
= fold_convert_loc (loc
, ptrtype
,
12252 CALL_EXPR_ARG (arg0
, 0));
12253 tree iref
= build_fold_indirect_ref_loc (loc
, ptr
);
12254 return fold_build2_loc (loc
, code
, type
, iref
,
12255 build_int_cst (TREE_TYPE (iref
), 0));
12259 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12260 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12261 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12262 && integer_zerop (arg1
)
12263 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12265 tree arg00
= TREE_OPERAND (arg0
, 0);
12266 tree arg01
= TREE_OPERAND (arg0
, 1);
12267 tree itype
= TREE_TYPE (arg00
);
12268 if (wi::to_wide (arg01
) == element_precision (itype
) - 1)
12270 if (TYPE_UNSIGNED (itype
))
12272 itype
= signed_type_for (itype
);
12273 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12275 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12276 type
, arg00
, build_zero_cst (itype
));
12280 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12281 (X & C) == 0 when C is a single bit. */
12282 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12283 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12284 && integer_zerop (arg1
)
12285 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12287 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12288 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12289 TREE_OPERAND (arg0
, 1));
12290 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12292 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12296 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12297 constant C is a power of two, i.e. a single bit. */
12298 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12299 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12300 && integer_zerop (arg1
)
12301 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12302 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12303 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12305 tree arg00
= TREE_OPERAND (arg0
, 0);
12306 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12307 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12310 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12311 when is C is a power of two, i.e. a single bit. */
12312 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12313 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12314 && integer_zerop (arg1
)
12315 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12316 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12317 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12319 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12320 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12321 arg000
, TREE_OPERAND (arg0
, 1));
12322 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12323 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12326 if (integer_zerop (arg1
)
12327 && tree_expr_nonzero_p (arg0
))
12329 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12330 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12333 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12334 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12336 tree arg00
= TREE_OPERAND (arg0
, 0);
12337 tree arg01
= TREE_OPERAND (arg0
, 1);
12338 tree arg10
= TREE_OPERAND (arg1
, 0);
12339 tree arg11
= TREE_OPERAND (arg1
, 1);
12340 tree itype
= TREE_TYPE (arg0
);
12342 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12343 operand_equal_p guarantees no side-effects so we don't need
12344 to use omit_one_operand on Z. */
12345 if (operand_equal_p (arg01
, arg11
, 0))
12346 return fold_build2_loc (loc
, code
, type
, arg00
,
12347 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12349 if (operand_equal_p (arg01
, arg10
, 0))
12350 return fold_build2_loc (loc
, code
, type
, arg00
,
12351 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12353 if (operand_equal_p (arg00
, arg11
, 0))
12354 return fold_build2_loc (loc
, code
, type
, arg01
,
12355 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12357 if (operand_equal_p (arg00
, arg10
, 0))
12358 return fold_build2_loc (loc
, code
, type
, arg01
,
12359 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12362 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12363 if (TREE_CODE (arg01
) == INTEGER_CST
12364 && TREE_CODE (arg11
) == INTEGER_CST
)
12366 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12367 fold_convert_loc (loc
, itype
, arg11
));
12368 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12369 return fold_build2_loc (loc
, code
, type
, tem
,
12370 fold_convert_loc (loc
, itype
, arg10
));
12374 /* Attempt to simplify equality/inequality comparisons of complex
12375 values. Only lower the comparison if the result is known or
12376 can be simplified to a single scalar comparison. */
12377 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12378 || TREE_CODE (arg0
) == COMPLEX_CST
)
12379 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12380 || TREE_CODE (arg1
) == COMPLEX_CST
))
12382 tree real0
, imag0
, real1
, imag1
;
12385 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12387 real0
= TREE_OPERAND (arg0
, 0);
12388 imag0
= TREE_OPERAND (arg0
, 1);
12392 real0
= TREE_REALPART (arg0
);
12393 imag0
= TREE_IMAGPART (arg0
);
12396 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12398 real1
= TREE_OPERAND (arg1
, 0);
12399 imag1
= TREE_OPERAND (arg1
, 1);
12403 real1
= TREE_REALPART (arg1
);
12404 imag1
= TREE_IMAGPART (arg1
);
12407 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12408 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12410 if (integer_zerop (rcond
))
12412 if (code
== EQ_EXPR
)
12413 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12415 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12419 if (code
== NE_EXPR
)
12420 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12422 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12426 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12427 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12429 if (integer_zerop (icond
))
12431 if (code
== EQ_EXPR
)
12432 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12434 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12438 if (code
== NE_EXPR
)
12439 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12441 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12452 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12453 if (tem
!= NULL_TREE
)
12456 /* Transform comparisons of the form X +- C CMP X. */
12457 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12458 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12459 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12460 && !HONOR_SNANS (arg0
))
12462 tree arg01
= TREE_OPERAND (arg0
, 1);
12463 enum tree_code code0
= TREE_CODE (arg0
);
12464 int is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12466 /* (X - c) > X becomes false. */
12467 if (code
== GT_EXPR
12468 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12469 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12470 return constant_boolean_node (0, type
);
12472 /* Likewise (X + c) < X becomes false. */
12473 if (code
== LT_EXPR
12474 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12475 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12476 return constant_boolean_node (0, type
);
12478 /* Convert (X - c) <= X to true. */
12479 if (!HONOR_NANS (arg1
)
12481 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12482 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12483 return constant_boolean_node (1, type
);
12485 /* Convert (X + c) >= X to true. */
12486 if (!HONOR_NANS (arg1
)
12488 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12489 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12490 return constant_boolean_node (1, type
);
12493 /* If we are comparing an ABS_EXPR with a constant, we can
12494 convert all the cases into explicit comparisons, but they may
12495 well not be faster than doing the ABS and one comparison.
12496 But ABS (X) <= C is a range comparison, which becomes a subtraction
12497 and a comparison, and is probably faster. */
12498 if (code
== LE_EXPR
12499 && TREE_CODE (arg1
) == INTEGER_CST
12500 && TREE_CODE (arg0
) == ABS_EXPR
12501 && ! TREE_SIDE_EFFECTS (arg0
)
12502 && (tem
= negate_expr (arg1
)) != 0
12503 && TREE_CODE (tem
) == INTEGER_CST
12504 && !TREE_OVERFLOW (tem
))
12505 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12506 build2 (GE_EXPR
, type
,
12507 TREE_OPERAND (arg0
, 0), tem
),
12508 build2 (LE_EXPR
, type
,
12509 TREE_OPERAND (arg0
, 0), arg1
));
12511 /* Convert ABS_EXPR<x> >= 0 to true. */
12512 strict_overflow_p
= false;
12513 if (code
== GE_EXPR
12514 && (integer_zerop (arg1
)
12515 || (! HONOR_NANS (arg0
)
12516 && real_zerop (arg1
)))
12517 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12519 if (strict_overflow_p
)
12520 fold_overflow_warning (("assuming signed overflow does not occur "
12521 "when simplifying comparison of "
12522 "absolute value and zero"),
12523 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12524 return omit_one_operand_loc (loc
, type
,
12525 constant_boolean_node (true, type
),
12529 /* Convert ABS_EXPR<x> < 0 to false. */
12530 strict_overflow_p
= false;
12531 if (code
== LT_EXPR
12532 && (integer_zerop (arg1
) || real_zerop (arg1
))
12533 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12535 if (strict_overflow_p
)
12536 fold_overflow_warning (("assuming signed overflow does not occur "
12537 "when simplifying comparison of "
12538 "absolute value and zero"),
12539 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12540 return omit_one_operand_loc (loc
, type
,
12541 constant_boolean_node (false, type
),
12545 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12546 and similarly for >= into !=. */
12547 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12548 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12549 && TREE_CODE (arg1
) == LSHIFT_EXPR
12550 && integer_onep (TREE_OPERAND (arg1
, 0)))
12551 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12552 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12553 TREE_OPERAND (arg1
, 1)),
12554 build_zero_cst (TREE_TYPE (arg0
)));
12556 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12557 otherwise Y might be >= # of bits in X's type and thus e.g.
12558 (unsigned char) (1 << Y) for Y 15 might be 0.
12559 If the cast is widening, then 1 << Y should have unsigned type,
12560 otherwise if Y is number of bits in the signed shift type minus 1,
12561 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12562 31 might be 0xffffffff80000000. */
12563 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12564 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12565 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0
)))
12566 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12567 && CONVERT_EXPR_P (arg1
)
12568 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12569 && (element_precision (TREE_TYPE (arg1
))
12570 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12571 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12572 || (element_precision (TREE_TYPE (arg1
))
12573 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12574 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12576 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12577 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12578 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12579 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
12580 build_zero_cst (TREE_TYPE (arg0
)));
12585 case UNORDERED_EXPR
:
12593 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12595 tree targ0
= strip_float_extensions (arg0
);
12596 tree targ1
= strip_float_extensions (arg1
);
12597 tree newtype
= TREE_TYPE (targ0
);
12599 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12600 newtype
= TREE_TYPE (targ1
);
12602 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12603 return fold_build2_loc (loc
, code
, type
,
12604 fold_convert_loc (loc
, newtype
, targ0
),
12605 fold_convert_loc (loc
, newtype
, targ1
));
12610 case COMPOUND_EXPR
:
12611 /* When pedantic, a compound expression can be neither an lvalue
12612 nor an integer constant expression. */
12613 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12615 /* Don't let (0, 0) be null pointer constant. */
12616 tem
= integer_zerop (arg1
) ? build1_loc (loc
, NOP_EXPR
, type
, arg1
)
12617 : fold_convert_loc (loc
, type
, arg1
);
12622 } /* switch (code) */
12625 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12626 ((A & N) + B) & M -> (A + B) & M
12627 Similarly if (N & M) == 0,
12628 ((A | N) + B) & M -> (A + B) & M
12629 and for - instead of + (or unary - instead of +)
12630 and/or ^ instead of |.
12631 If B is constant and (B & M) == 0, fold into A & M.
12633 This function is a helper for match.pd patterns. Return non-NULL
12634 type in which the simplified operation should be performed only
12635 if any optimization is possible.
12637 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12638 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12639 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12642 fold_bit_and_mask (tree type
, tree arg1
, enum tree_code code
,
12643 tree arg00
, enum tree_code code00
, tree arg000
, tree arg001
,
12644 tree arg01
, enum tree_code code01
, tree arg010
, tree arg011
,
12647 gcc_assert (TREE_CODE (arg1
) == INTEGER_CST
);
12648 gcc_assert (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== NEGATE_EXPR
);
12649 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
12651 || (cst1
& (cst1
+ 1)) != 0
12652 || !INTEGRAL_TYPE_P (type
)
12653 || (!TYPE_OVERFLOW_WRAPS (type
)
12654 && TREE_CODE (type
) != INTEGER_TYPE
)
12655 || (wi::max_value (type
) & cst1
) != cst1
)
12658 enum tree_code codes
[2] = { code00
, code01
};
12659 tree arg0xx
[4] = { arg000
, arg001
, arg010
, arg011
};
12663 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12664 arg1 (M) is == (1LL << cst) - 1.
12665 Store C into PMOP[0] and D into PMOP[1]. */
12668 which
= code
!= NEGATE_EXPR
;
12670 for (; which
>= 0; which
--)
12671 switch (codes
[which
])
12676 gcc_assert (TREE_CODE (arg0xx
[2 * which
+ 1]) == INTEGER_CST
);
12677 cst0
= wi::to_wide (arg0xx
[2 * which
+ 1]) & cst1
;
12678 if (codes
[which
] == BIT_AND_EXPR
)
12683 else if (cst0
!= 0)
12685 /* If C or D is of the form (A & N) where
12686 (N & M) == M, or of the form (A | N) or
12687 (A ^ N) where (N & M) == 0, replace it with A. */
12688 pmop
[which
] = arg0xx
[2 * which
];
12691 if (TREE_CODE (pmop
[which
]) != INTEGER_CST
)
12693 /* If C or D is a N where (N & M) == 0, it can be
12694 omitted (replaced with 0). */
12695 if ((code
== PLUS_EXPR
12696 || (code
== MINUS_EXPR
&& which
== 0))
12697 && (cst1
& wi::to_wide (pmop
[which
])) == 0)
12698 pmop
[which
] = build_int_cst (type
, 0);
12699 /* Similarly, with C - N where (-N & M) == 0. */
12700 if (code
== MINUS_EXPR
12702 && (cst1
& -wi::to_wide (pmop
[which
])) == 0)
12703 pmop
[which
] = build_int_cst (type
, 0);
12706 gcc_unreachable ();
12709 /* Only build anything new if we optimized one or both arguments above. */
12710 if (pmop
[0] == arg00
&& pmop
[1] == arg01
)
12713 if (TYPE_OVERFLOW_WRAPS (type
))
12716 return unsigned_type_for (type
);
12719 /* Used by contains_label_[p1]. */
12721 struct contains_label_data
12723 hash_set
<tree
> *pset
;
12724 bool inside_switch_p
;
12727 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12728 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12729 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12732 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data
)
12734 contains_label_data
*d
= (contains_label_data
*) data
;
12735 switch (TREE_CODE (*tp
))
12740 case CASE_LABEL_EXPR
:
12741 if (!d
->inside_switch_p
)
12746 if (!d
->inside_switch_p
)
12748 if (walk_tree (&SWITCH_COND (*tp
), contains_label_1
, data
, d
->pset
))
12750 d
->inside_switch_p
= true;
12751 if (walk_tree (&SWITCH_BODY (*tp
), contains_label_1
, data
, d
->pset
))
12753 d
->inside_switch_p
= false;
12754 *walk_subtrees
= 0;
12759 *walk_subtrees
= 0;
12767 /* Return whether the sub-tree ST contains a label which is accessible from
12768 outside the sub-tree. */
12771 contains_label_p (tree st
)
12773 hash_set
<tree
> pset
;
12774 contains_label_data data
= { &pset
, false };
12775 return walk_tree (&st
, contains_label_1
, &data
, &pset
) != NULL_TREE
;
12778 /* Fold a ternary expression of code CODE and type TYPE with operands
12779 OP0, OP1, and OP2. Return the folded expression if folding is
12780 successful. Otherwise, return NULL_TREE. */
12783 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
12784 tree op0
, tree op1
, tree op2
)
12787 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
12788 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12790 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12791 && TREE_CODE_LENGTH (code
) == 3);
12793 /* If this is a commutative operation, and OP0 is a constant, move it
12794 to OP1 to reduce the number of tests below. */
12795 if (commutative_ternary_tree_code (code
)
12796 && tree_swap_operands_p (op0
, op1
))
12797 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
12799 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
12803 /* Strip any conversions that don't change the mode. This is safe
12804 for every expression, except for a comparison expression because
12805 its signedness is derived from its operands. So, in the latter
12806 case, only strip conversions that don't change the signedness.
12808 Note that this is done as an internal manipulation within the
12809 constant folder, in order to find the simplest representation of
12810 the arguments so that their form can be studied. In any cases,
12811 the appropriate type conversions should be put back in the tree
12812 that will get out of the constant folder. */
12833 case COMPONENT_REF
:
12834 if (TREE_CODE (arg0
) == CONSTRUCTOR
12835 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12837 unsigned HOST_WIDE_INT idx
;
12839 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12846 case VEC_COND_EXPR
:
12847 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12848 so all simple results must be passed through pedantic_non_lvalue. */
12849 if (TREE_CODE (arg0
) == INTEGER_CST
)
12851 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12852 tem
= integer_zerop (arg0
) ? op2
: op1
;
12853 /* Only optimize constant conditions when the selected branch
12854 has the same type as the COND_EXPR. This avoids optimizing
12855 away "c ? x : throw", where the throw has a void type.
12856 Avoid throwing away that operand which contains label. */
12857 if ((!TREE_SIDE_EFFECTS (unused_op
)
12858 || !contains_label_p (unused_op
))
12859 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12860 || VOID_TYPE_P (type
)))
12861 return protected_set_expr_location_unshare (tem
, loc
);
12864 else if (TREE_CODE (arg0
) == VECTOR_CST
)
12866 unsigned HOST_WIDE_INT nelts
;
12867 if ((TREE_CODE (arg1
) == VECTOR_CST
12868 || TREE_CODE (arg1
) == CONSTRUCTOR
)
12869 && (TREE_CODE (arg2
) == VECTOR_CST
12870 || TREE_CODE (arg2
) == CONSTRUCTOR
)
12871 && TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
12873 vec_perm_builder
sel (nelts
, nelts
, 1);
12874 for (unsigned int i
= 0; i
< nelts
; i
++)
12876 tree val
= VECTOR_CST_ELT (arg0
, i
);
12877 if (integer_all_onesp (val
))
12878 sel
.quick_push (i
);
12879 else if (integer_zerop (val
))
12880 sel
.quick_push (nelts
+ i
);
12881 else /* Currently unreachable. */
12884 vec_perm_indices
indices (sel
, 2, nelts
);
12885 tree t
= fold_vec_perm (type
, arg1
, arg2
, indices
);
12886 if (t
!= NULL_TREE
)
12891 /* If we have A op B ? A : C, we may be able to convert this to a
12892 simpler expression, depending on the operation and the values
12893 of B and C. Signed zeros prevent all of these transformations,
12894 for reasons given above each one.
12896 Also try swapping the arguments and inverting the conditional. */
12897 if (COMPARISON_CLASS_P (arg0
)
12898 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op1
)
12899 && !HONOR_SIGNED_ZEROS (op1
))
12901 tem
= fold_cond_expr_with_comparison (loc
, type
, TREE_CODE (arg0
),
12902 TREE_OPERAND (arg0
, 0),
12903 TREE_OPERAND (arg0
, 1),
12909 if (COMPARISON_CLASS_P (arg0
)
12910 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op2
)
12911 && !HONOR_SIGNED_ZEROS (op2
))
12913 enum tree_code comp_code
= TREE_CODE (arg0
);
12914 tree arg00
= TREE_OPERAND (arg0
, 0);
12915 tree arg01
= TREE_OPERAND (arg0
, 1);
12916 comp_code
= invert_tree_comparison (comp_code
, HONOR_NANS (arg00
));
12917 if (comp_code
!= ERROR_MARK
)
12918 tem
= fold_cond_expr_with_comparison (loc
, type
, comp_code
,
12926 /* If the second operand is simpler than the third, swap them
12927 since that produces better jump optimization results. */
12928 if (truth_value_p (TREE_CODE (arg0
))
12929 && tree_swap_operands_p (op1
, op2
))
12931 location_t loc0
= expr_location_or (arg0
, loc
);
12932 /* See if this can be inverted. If it can't, possibly because
12933 it was a floating-point inequality comparison, don't do
12935 tem
= fold_invert_truthvalue (loc0
, arg0
);
12937 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
12940 /* Convert A ? 1 : 0 to simply A. */
12941 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
12942 : (integer_onep (op1
)
12943 && !VECTOR_TYPE_P (type
)))
12944 && integer_zerop (op2
)
12945 /* If we try to convert OP0 to our type, the
12946 call to fold will try to move the conversion inside
12947 a COND, which will recurse. In that case, the COND_EXPR
12948 is probably the best choice, so leave it alone. */
12949 && type
== TREE_TYPE (arg0
))
12950 return protected_set_expr_location_unshare (arg0
, loc
);
12952 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12953 over COND_EXPR in cases such as floating point comparisons. */
12954 if (integer_zerop (op1
)
12955 && code
== COND_EXPR
12956 && integer_onep (op2
)
12957 && !VECTOR_TYPE_P (type
)
12958 && truth_value_p (TREE_CODE (arg0
)))
12959 return fold_convert_loc (loc
, type
,
12960 invert_truthvalue_loc (loc
, arg0
));
12962 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12963 if (TREE_CODE (arg0
) == LT_EXPR
12964 && integer_zerop (TREE_OPERAND (arg0
, 1))
12965 && integer_zerop (op2
)
12966 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12968 /* sign_bit_p looks through both zero and sign extensions,
12969 but for this optimization only sign extensions are
12971 tree tem2
= TREE_OPERAND (arg0
, 0);
12972 while (tem
!= tem2
)
12974 if (TREE_CODE (tem2
) != NOP_EXPR
12975 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
12980 tem2
= TREE_OPERAND (tem2
, 0);
12982 /* sign_bit_p only checks ARG1 bits within A's precision.
12983 If <sign bit of A> has wider type than A, bits outside
12984 of A's precision in <sign bit of A> need to be checked.
12985 If they are all 0, this optimization needs to be done
12986 in unsigned A's type, if they are all 1 in signed A's type,
12987 otherwise this can't be done. */
12989 && TYPE_PRECISION (TREE_TYPE (tem
))
12990 < TYPE_PRECISION (TREE_TYPE (arg1
))
12991 && TYPE_PRECISION (TREE_TYPE (tem
))
12992 < TYPE_PRECISION (type
))
12994 int inner_width
, outer_width
;
12997 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12998 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12999 if (outer_width
> TYPE_PRECISION (type
))
13000 outer_width
= TYPE_PRECISION (type
);
13002 wide_int mask
= wi::shifted_mask
13003 (inner_width
, outer_width
- inner_width
, false,
13004 TYPE_PRECISION (TREE_TYPE (arg1
)));
13006 wide_int common
= mask
& wi::to_wide (arg1
);
13007 if (common
== mask
)
13009 tem_type
= signed_type_for (TREE_TYPE (tem
));
13010 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13012 else if (common
== 0)
13014 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13015 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13023 fold_convert_loc (loc
, type
,
13024 fold_build2_loc (loc
, BIT_AND_EXPR
,
13025 TREE_TYPE (tem
), tem
,
13026 fold_convert_loc (loc
,
13031 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13032 already handled above. */
13033 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13034 && integer_onep (TREE_OPERAND (arg0
, 1))
13035 && integer_zerop (op2
)
13036 && integer_pow2p (arg1
))
13038 tree tem
= TREE_OPERAND (arg0
, 0);
13040 if (TREE_CODE (tem
) == RSHIFT_EXPR
13041 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13042 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
)
13043 == tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13044 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13045 fold_convert_loc (loc
, type
,
13046 TREE_OPERAND (tem
, 0)),
13050 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13051 is probably obsolete because the first operand should be a
13052 truth value (that's why we have the two cases above), but let's
13053 leave it in until we can confirm this for all front-ends. */
13054 if (integer_zerop (op2
)
13055 && TREE_CODE (arg0
) == NE_EXPR
13056 && integer_zerop (TREE_OPERAND (arg0
, 1))
13057 && integer_pow2p (arg1
)
13058 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13059 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13060 arg1
, OEP_ONLY_CONST
)
13061 /* operand_equal_p compares just value, not precision, so e.g.
13062 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13063 second operand 32-bit -128, which is not a power of two (or vice
13065 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)))
13066 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
13068 /* Disable the transformations below for vectors, since
13069 fold_binary_op_with_conditional_arg may undo them immediately,
13070 yielding an infinite loop. */
13071 if (code
== VEC_COND_EXPR
)
13074 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13075 if (integer_zerop (op2
)
13076 && truth_value_p (TREE_CODE (arg0
))
13077 && truth_value_p (TREE_CODE (arg1
))
13078 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13079 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13080 : TRUTH_ANDIF_EXPR
,
13081 type
, fold_convert_loc (loc
, type
, arg0
), op1
);
13083 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13084 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13085 && truth_value_p (TREE_CODE (arg0
))
13086 && truth_value_p (TREE_CODE (arg1
))
13087 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13089 location_t loc0
= expr_location_or (arg0
, loc
);
13090 /* Only perform transformation if ARG0 is easily inverted. */
13091 tem
= fold_invert_truthvalue (loc0
, arg0
);
13093 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13096 type
, fold_convert_loc (loc
, type
, tem
),
13100 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13101 if (integer_zerop (arg1
)
13102 && truth_value_p (TREE_CODE (arg0
))
13103 && truth_value_p (TREE_CODE (op2
))
13104 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13106 location_t loc0
= expr_location_or (arg0
, loc
);
13107 /* Only perform transformation if ARG0 is easily inverted. */
13108 tem
= fold_invert_truthvalue (loc0
, arg0
);
13110 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13111 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13112 type
, fold_convert_loc (loc
, type
, tem
),
13116 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13117 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13118 && truth_value_p (TREE_CODE (arg0
))
13119 && truth_value_p (TREE_CODE (op2
))
13120 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13121 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13122 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13123 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13128 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13129 of fold_ternary on them. */
13130 gcc_unreachable ();
13132 case BIT_FIELD_REF
:
13133 if (TREE_CODE (arg0
) == VECTOR_CST
13134 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13135 || (VECTOR_TYPE_P (type
)
13136 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
))))
13137 && tree_fits_uhwi_p (op1
)
13138 && tree_fits_uhwi_p (op2
))
13140 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13141 unsigned HOST_WIDE_INT width
13142 = (TREE_CODE (eltype
) == BOOLEAN_TYPE
13143 ? TYPE_PRECISION (eltype
) : tree_to_uhwi (TYPE_SIZE (eltype
)));
13144 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13145 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13148 && (idx
% width
) == 0
13149 && (n
% width
) == 0
13150 && known_le ((idx
+ n
) / width
,
13151 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))))
13156 if (TREE_CODE (arg0
) == VECTOR_CST
)
13160 tem
= VECTOR_CST_ELT (arg0
, idx
);
13161 if (VECTOR_TYPE_P (type
))
13162 tem
= fold_build1 (VIEW_CONVERT_EXPR
, type
, tem
);
13166 tree_vector_builder
vals (type
, n
, 1);
13167 for (unsigned i
= 0; i
< n
; ++i
)
13168 vals
.quick_push (VECTOR_CST_ELT (arg0
, idx
+ i
));
13169 return vals
.build ();
13174 /* On constants we can use native encode/interpret to constant
13175 fold (nearly) all BIT_FIELD_REFs. */
13176 if (CONSTANT_CLASS_P (arg0
)
13177 && can_native_interpret_type_p (type
)
13178 && BITS_PER_UNIT
== 8
13179 && tree_fits_uhwi_p (op1
)
13180 && tree_fits_uhwi_p (op2
))
13182 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13183 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13184 /* Limit us to a reasonable amount of work. To relax the
13185 other limitations we need bit-shifting of the buffer
13186 and rounding up the size. */
13187 if (bitpos
% BITS_PER_UNIT
== 0
13188 && bitsize
% BITS_PER_UNIT
== 0
13189 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
13191 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
13192 unsigned HOST_WIDE_INT len
13193 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
13194 bitpos
/ BITS_PER_UNIT
);
13196 && len
* BITS_PER_UNIT
>= bitsize
)
13198 tree v
= native_interpret_expr (type
, b
,
13199 bitsize
/ BITS_PER_UNIT
);
13208 case VEC_PERM_EXPR
:
13209 /* Perform constant folding of BIT_INSERT_EXPR. */
13210 if (TREE_CODE (arg2
) == VECTOR_CST
13211 && TREE_CODE (op0
) == VECTOR_CST
13212 && TREE_CODE (op1
) == VECTOR_CST
)
13214 /* Build a vector of integers from the tree mask. */
13215 vec_perm_builder builder
;
13216 if (!tree_to_vec_perm_builder (&builder
, arg2
))
13219 /* Create a vec_perm_indices for the integer vector. */
13220 poly_uint64 nelts
= TYPE_VECTOR_SUBPARTS (type
);
13221 bool single_arg
= (op0
== op1
);
13222 vec_perm_indices
sel (builder
, single_arg
? 1 : 2, nelts
);
13223 return fold_vec_perm (type
, op0
, op1
, sel
);
13227 case BIT_INSERT_EXPR
:
13228 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13229 if (TREE_CODE (arg0
) == INTEGER_CST
13230 && TREE_CODE (arg1
) == INTEGER_CST
)
13232 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13233 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
13234 wide_int tem
= (wi::to_wide (arg0
)
13235 & wi::shifted_mask (bitpos
, bitsize
, true,
13236 TYPE_PRECISION (type
)));
13238 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
13240 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
13242 else if (TREE_CODE (arg0
) == VECTOR_CST
13243 && CONSTANT_CLASS_P (arg1
)
13244 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
13247 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13248 unsigned HOST_WIDE_INT elsize
13249 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
13250 if (bitpos
% elsize
== 0)
13252 unsigned k
= bitpos
/ elsize
;
13253 unsigned HOST_WIDE_INT nelts
;
13254 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
13256 else if (VECTOR_CST_NELTS (arg0
).is_constant (&nelts
))
13258 tree_vector_builder
elts (type
, nelts
, 1);
13259 elts
.quick_grow (nelts
);
13260 for (unsigned HOST_WIDE_INT i
= 0; i
< nelts
; ++i
)
13261 elts
[i
] = (i
== k
? arg1
: VECTOR_CST_ELT (arg0
, i
));
13262 return elts
.build ();
13270 } /* switch (code) */
13273 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13274 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13275 constructor element index of the value returned. If the element is
13276 not found NULL_TREE is returned and *CTOR_IDX is updated to
13277 the index of the element after the ACCESS_INDEX position (which
13278 may be outside of the CTOR array). */
13281 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
,
13282 unsigned *ctor_idx
)
13284 tree index_type
= NULL_TREE
;
13285 signop index_sgn
= UNSIGNED
;
13286 offset_int low_bound
= 0;
13288 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
13290 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
13291 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
13293 /* Static constructors for variably sized objects makes no sense. */
13294 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
13295 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
13296 /* ??? When it is obvious that the range is signed, treat it so. */
13297 if (TYPE_UNSIGNED (index_type
)
13298 && TYPE_MAX_VALUE (domain_type
)
13299 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
),
13300 TYPE_MIN_VALUE (domain_type
)))
13302 index_sgn
= SIGNED
;
13304 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type
)),
13309 index_sgn
= TYPE_SIGN (index_type
);
13310 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
13316 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
13319 offset_int index
= low_bound
;
13321 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
13323 offset_int max_index
= index
;
13326 bool first_p
= true;
13328 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
13330 /* Array constructor might explicitly set index, or specify a range,
13331 or leave index NULL meaning that it is next index after previous
13335 if (TREE_CODE (cfield
) == INTEGER_CST
)
13337 = offset_int::from (wi::to_wide (cfield
), index_sgn
);
13340 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
13341 index
= offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 0)),
13344 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 1)),
13346 gcc_checking_assert (wi::le_p (index
, max_index
, index_sgn
));
13351 index
= max_index
+ 1;
13353 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
13354 gcc_checking_assert (wi::gt_p (index
, max_index
, index_sgn
));
13360 /* Do we have match? */
13361 if (wi::cmp (access_index
, index
, index_sgn
) >= 0)
13363 if (wi::cmp (access_index
, max_index
, index_sgn
) <= 0)
13370 else if (in_gimple_form
)
13371 /* We're past the element we search for. Note during parsing
13372 the elements might not be sorted.
13373 ??? We should use a binary search and a flag on the
13374 CONSTRUCTOR as to whether elements are sorted in declaration
13383 /* Perform constant folding and related simplification of EXPR.
13384 The related simplifications include x*1 => x, x*0 => 0, etc.,
13385 and application of the associative law.
13386 NOP_EXPR conversions may be removed freely (as long as we
13387 are careful not to change the type of the overall expression).
13388 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13389 but we can constant-fold them if they have constant operands. */
13391 #ifdef ENABLE_FOLD_CHECKING
13392 # define fold(x) fold_1 (x)
13393 static tree
fold_1 (tree
);
13399 const tree t
= expr
;
13400 enum tree_code code
= TREE_CODE (t
);
13401 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13403 location_t loc
= EXPR_LOCATION (expr
);
13405 /* Return right away if a constant. */
13406 if (kind
== tcc_constant
)
13409 /* CALL_EXPR-like objects with variable numbers of operands are
13410 treated specially. */
13411 if (kind
== tcc_vl_exp
)
13413 if (code
== CALL_EXPR
)
13415 tem
= fold_call_expr (loc
, expr
, false);
13416 return tem
? tem
: expr
;
13421 if (IS_EXPR_CODE_CLASS (kind
))
13423 tree type
= TREE_TYPE (t
);
13424 tree op0
, op1
, op2
;
13426 switch (TREE_CODE_LENGTH (code
))
13429 op0
= TREE_OPERAND (t
, 0);
13430 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13431 return tem
? tem
: expr
;
13433 op0
= TREE_OPERAND (t
, 0);
13434 op1
= TREE_OPERAND (t
, 1);
13435 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13436 return tem
? tem
: expr
;
13438 op0
= TREE_OPERAND (t
, 0);
13439 op1
= TREE_OPERAND (t
, 1);
13440 op2
= TREE_OPERAND (t
, 2);
13441 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13442 return tem
? tem
: expr
;
13452 tree op0
= TREE_OPERAND (t
, 0);
13453 tree op1
= TREE_OPERAND (t
, 1);
13455 if (TREE_CODE (op1
) == INTEGER_CST
13456 && TREE_CODE (op0
) == CONSTRUCTOR
13457 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13459 tree val
= get_array_ctor_element_at_index (op0
,
13460 wi::to_offset (op1
));
13468 /* Return a VECTOR_CST if possible. */
13471 tree type
= TREE_TYPE (t
);
13472 if (TREE_CODE (type
) != VECTOR_TYPE
)
13477 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
13478 if (! CONSTANT_CLASS_P (val
))
13481 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
13485 return fold (DECL_INITIAL (t
));
13489 } /* switch (code) */
13492 #ifdef ENABLE_FOLD_CHECKING
13495 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13496 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
13497 static void fold_check_failed (const_tree
, const_tree
);
13498 void print_fold_checksum (const_tree
);
13500 /* When --enable-checking=fold, compute a digest of expr before
13501 and after actual fold call to see if fold did not accidentally
13502 change original expr. */
13508 struct md5_ctx ctx
;
13509 unsigned char checksum_before
[16], checksum_after
[16];
13510 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13512 md5_init_ctx (&ctx
);
13513 fold_checksum_tree (expr
, &ctx
, &ht
);
13514 md5_finish_ctx (&ctx
, checksum_before
);
13517 ret
= fold_1 (expr
);
13519 md5_init_ctx (&ctx
);
13520 fold_checksum_tree (expr
, &ctx
, &ht
);
13521 md5_finish_ctx (&ctx
, checksum_after
);
13523 if (memcmp (checksum_before
, checksum_after
, 16))
13524 fold_check_failed (expr
, ret
);
13530 print_fold_checksum (const_tree expr
)
13532 struct md5_ctx ctx
;
13533 unsigned char checksum
[16], cnt
;
13534 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13536 md5_init_ctx (&ctx
);
13537 fold_checksum_tree (expr
, &ctx
, &ht
);
13538 md5_finish_ctx (&ctx
, checksum
);
13539 for (cnt
= 0; cnt
< 16; ++cnt
)
13540 fprintf (stderr
, "%02x", checksum
[cnt
]);
13541 putc ('\n', stderr
);
13545 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13547 internal_error ("fold check: original tree changed by fold");
13551 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
13552 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
13554 const tree_node
**slot
;
13555 enum tree_code code
;
13556 union tree_node
*buf
;
13562 slot
= ht
->find_slot (expr
, INSERT
);
13566 code
= TREE_CODE (expr
);
13567 if (TREE_CODE_CLASS (code
) == tcc_declaration
13568 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
13570 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13571 size_t sz
= tree_size (expr
);
13572 buf
= XALLOCAVAR (union tree_node
, sz
);
13573 memcpy ((char *) buf
, expr
, sz
);
13574 SET_DECL_ASSEMBLER_NAME ((tree
) buf
, NULL
);
13575 buf
->decl_with_vis
.symtab_node
= NULL
;
13576 buf
->base
.nowarning_flag
= 0;
13579 else if (TREE_CODE_CLASS (code
) == tcc_type
13580 && (TYPE_POINTER_TO (expr
)
13581 || TYPE_REFERENCE_TO (expr
)
13582 || TYPE_CACHED_VALUES_P (expr
)
13583 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13584 || TYPE_NEXT_VARIANT (expr
)
13585 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
13587 /* Allow these fields to be modified. */
13589 size_t sz
= tree_size (expr
);
13590 buf
= XALLOCAVAR (union tree_node
, sz
);
13591 memcpy ((char *) buf
, expr
, sz
);
13592 expr
= tmp
= (tree
) buf
;
13593 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13594 TYPE_POINTER_TO (tmp
) = NULL
;
13595 TYPE_REFERENCE_TO (tmp
) = NULL
;
13596 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13597 TYPE_ALIAS_SET (tmp
) = -1;
13598 if (TYPE_CACHED_VALUES_P (tmp
))
13600 TYPE_CACHED_VALUES_P (tmp
) = 0;
13601 TYPE_CACHED_VALUES (tmp
) = NULL
;
13604 else if (warning_suppressed_p (expr
) && (DECL_P (expr
) || EXPR_P (expr
)))
13606 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13607 that and change builtins.cc etc. instead - see PR89543. */
13608 size_t sz
= tree_size (expr
);
13609 buf
= XALLOCAVAR (union tree_node
, sz
);
13610 memcpy ((char *) buf
, expr
, sz
);
13611 buf
->base
.nowarning_flag
= 0;
13614 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13615 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
13616 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13617 if (TREE_CODE_CLASS (code
) != tcc_type
13618 && TREE_CODE_CLASS (code
) != tcc_declaration
13619 && code
!= TREE_LIST
13620 && code
!= SSA_NAME
13621 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13622 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13623 switch (TREE_CODE_CLASS (code
))
13629 md5_process_bytes (TREE_STRING_POINTER (expr
),
13630 TREE_STRING_LENGTH (expr
), ctx
);
13633 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13634 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13637 len
= vector_cst_encoded_nelts (expr
);
13638 for (i
= 0; i
< len
; ++i
)
13639 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr
, i
), ctx
, ht
);
13645 case tcc_exceptional
:
13649 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13650 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13651 expr
= TREE_CHAIN (expr
);
13652 goto recursive_label
;
13655 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13656 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13662 case tcc_expression
:
13663 case tcc_reference
:
13664 case tcc_comparison
:
13667 case tcc_statement
:
13669 len
= TREE_OPERAND_LENGTH (expr
);
13670 for (i
= 0; i
< len
; ++i
)
13671 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13673 case tcc_declaration
:
13674 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13675 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13676 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13678 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13679 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13680 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13681 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13682 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13685 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13687 if (TREE_CODE (expr
) == FUNCTION_DECL
)
13689 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13690 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
13692 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13696 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13697 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13698 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13699 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13700 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13701 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13702 if (INTEGRAL_TYPE_P (expr
)
13703 || SCALAR_FLOAT_TYPE_P (expr
))
13705 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13706 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13708 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13709 if (TREE_CODE (expr
) == RECORD_TYPE
13710 || TREE_CODE (expr
) == UNION_TYPE
13711 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13712 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13713 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13720 /* Helper function for outputting the checksum of a tree T. When
13721 debugging with gdb, you can "define mynext" to be "next" followed
13722 by "call debug_fold_checksum (op0)", then just trace down till the
13725 DEBUG_FUNCTION
void
13726 debug_fold_checksum (const_tree t
)
13729 unsigned char checksum
[16];
13730 struct md5_ctx ctx
;
13731 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13733 md5_init_ctx (&ctx
);
13734 fold_checksum_tree (t
, &ctx
, &ht
);
13735 md5_finish_ctx (&ctx
, checksum
);
13738 for (i
= 0; i
< 16; i
++)
13739 fprintf (stderr
, "%d ", checksum
[i
]);
13741 fprintf (stderr
, "\n");
13746 /* Fold a unary tree expression with code CODE of type TYPE with an
13747 operand OP0. LOC is the location of the resulting expression.
13748 Return a folded expression if successful. Otherwise, return a tree
13749 expression with code CODE of type TYPE with an operand OP0. */
13752 fold_build1_loc (location_t loc
,
13753 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13756 #ifdef ENABLE_FOLD_CHECKING
13757 unsigned char checksum_before
[16], checksum_after
[16];
13758 struct md5_ctx ctx
;
13759 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13761 md5_init_ctx (&ctx
);
13762 fold_checksum_tree (op0
, &ctx
, &ht
);
13763 md5_finish_ctx (&ctx
, checksum_before
);
13767 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13769 tem
= build1_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
13771 #ifdef ENABLE_FOLD_CHECKING
13772 md5_init_ctx (&ctx
);
13773 fold_checksum_tree (op0
, &ctx
, &ht
);
13774 md5_finish_ctx (&ctx
, checksum_after
);
13776 if (memcmp (checksum_before
, checksum_after
, 16))
13777 fold_check_failed (op0
, tem
);
13782 /* Fold a binary tree expression with code CODE of type TYPE with
13783 operands OP0 and OP1. LOC is the location of the resulting
13784 expression. Return a folded expression if successful. Otherwise,
13785 return a tree expression with code CODE of type TYPE with operands
13789 fold_build2_loc (location_t loc
,
13790 enum tree_code code
, tree type
, tree op0
, tree op1
13794 #ifdef ENABLE_FOLD_CHECKING
13795 unsigned char checksum_before_op0
[16],
13796 checksum_before_op1
[16],
13797 checksum_after_op0
[16],
13798 checksum_after_op1
[16];
13799 struct md5_ctx ctx
;
13800 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13802 md5_init_ctx (&ctx
);
13803 fold_checksum_tree (op0
, &ctx
, &ht
);
13804 md5_finish_ctx (&ctx
, checksum_before_op0
);
13807 md5_init_ctx (&ctx
);
13808 fold_checksum_tree (op1
, &ctx
, &ht
);
13809 md5_finish_ctx (&ctx
, checksum_before_op1
);
13813 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13815 tem
= build2_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
13817 #ifdef ENABLE_FOLD_CHECKING
13818 md5_init_ctx (&ctx
);
13819 fold_checksum_tree (op0
, &ctx
, &ht
);
13820 md5_finish_ctx (&ctx
, checksum_after_op0
);
13823 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13824 fold_check_failed (op0
, tem
);
13826 md5_init_ctx (&ctx
);
13827 fold_checksum_tree (op1
, &ctx
, &ht
);
13828 md5_finish_ctx (&ctx
, checksum_after_op1
);
13830 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13831 fold_check_failed (op1
, tem
);
13836 /* Fold a ternary tree expression with code CODE of type TYPE with
13837 operands OP0, OP1, and OP2. Return a folded expression if
13838 successful. Otherwise, return a tree expression with code CODE of
13839 type TYPE with operands OP0, OP1, and OP2. */
13842 fold_build3_loc (location_t loc
, enum tree_code code
, tree type
,
13843 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13846 #ifdef ENABLE_FOLD_CHECKING
13847 unsigned char checksum_before_op0
[16],
13848 checksum_before_op1
[16],
13849 checksum_before_op2
[16],
13850 checksum_after_op0
[16],
13851 checksum_after_op1
[16],
13852 checksum_after_op2
[16];
13853 struct md5_ctx ctx
;
13854 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13856 md5_init_ctx (&ctx
);
13857 fold_checksum_tree (op0
, &ctx
, &ht
);
13858 md5_finish_ctx (&ctx
, checksum_before_op0
);
13861 md5_init_ctx (&ctx
);
13862 fold_checksum_tree (op1
, &ctx
, &ht
);
13863 md5_finish_ctx (&ctx
, checksum_before_op1
);
13866 md5_init_ctx (&ctx
);
13867 fold_checksum_tree (op2
, &ctx
, &ht
);
13868 md5_finish_ctx (&ctx
, checksum_before_op2
);
13872 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13873 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13875 tem
= build3_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13877 #ifdef ENABLE_FOLD_CHECKING
13878 md5_init_ctx (&ctx
);
13879 fold_checksum_tree (op0
, &ctx
, &ht
);
13880 md5_finish_ctx (&ctx
, checksum_after_op0
);
13883 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13884 fold_check_failed (op0
, tem
);
13886 md5_init_ctx (&ctx
);
13887 fold_checksum_tree (op1
, &ctx
, &ht
);
13888 md5_finish_ctx (&ctx
, checksum_after_op1
);
13891 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13892 fold_check_failed (op1
, tem
);
13894 md5_init_ctx (&ctx
);
13895 fold_checksum_tree (op2
, &ctx
, &ht
);
13896 md5_finish_ctx (&ctx
, checksum_after_op2
);
13898 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13899 fold_check_failed (op2
, tem
);
13904 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13905 arguments in ARGARRAY, and a null static chain.
13906 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13907 of type TYPE from the given operands as constructed by build_call_array. */
13910 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13911 int nargs
, tree
*argarray
)
13914 #ifdef ENABLE_FOLD_CHECKING
13915 unsigned char checksum_before_fn
[16],
13916 checksum_before_arglist
[16],
13917 checksum_after_fn
[16],
13918 checksum_after_arglist
[16];
13919 struct md5_ctx ctx
;
13920 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13923 md5_init_ctx (&ctx
);
13924 fold_checksum_tree (fn
, &ctx
, &ht
);
13925 md5_finish_ctx (&ctx
, checksum_before_fn
);
13928 md5_init_ctx (&ctx
);
13929 for (i
= 0; i
< nargs
; i
++)
13930 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13931 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13935 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13937 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13939 #ifdef ENABLE_FOLD_CHECKING
13940 md5_init_ctx (&ctx
);
13941 fold_checksum_tree (fn
, &ctx
, &ht
);
13942 md5_finish_ctx (&ctx
, checksum_after_fn
);
13945 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13946 fold_check_failed (fn
, tem
);
13948 md5_init_ctx (&ctx
);
13949 for (i
= 0; i
< nargs
; i
++)
13950 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13951 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13953 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13954 fold_check_failed (NULL_TREE
, tem
);
13959 /* Perform constant folding and related simplification of initializer
13960 expression EXPR. These behave identically to "fold_buildN" but ignore
13961 potential run-time traps and exceptions that fold must preserve. */
13963 #define START_FOLD_INIT \
13964 int saved_signaling_nans = flag_signaling_nans;\
13965 int saved_trapping_math = flag_trapping_math;\
13966 int saved_rounding_math = flag_rounding_math;\
13967 int saved_trapv = flag_trapv;\
13968 int saved_folding_initializer = folding_initializer;\
13969 flag_signaling_nans = 0;\
13970 flag_trapping_math = 0;\
13971 flag_rounding_math = 0;\
13973 folding_initializer = 1;
13975 #define END_FOLD_INIT \
13976 flag_signaling_nans = saved_signaling_nans;\
13977 flag_trapping_math = saved_trapping_math;\
13978 flag_rounding_math = saved_rounding_math;\
13979 flag_trapv = saved_trapv;\
13980 folding_initializer = saved_folding_initializer;
13983 fold_init (tree expr
)
13988 result
= fold (expr
);
13995 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
13996 tree type
, tree op
)
14001 result
= fold_build1_loc (loc
, code
, type
, op
);
14008 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14009 tree type
, tree op0
, tree op1
)
14014 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14021 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14022 int nargs
, tree
*argarray
)
14027 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14034 fold_binary_initializer_loc (location_t loc
, tree_code code
, tree type
,
14035 tree lhs
, tree rhs
)
14040 result
= fold_binary_loc (loc
, code
, type
, lhs
, rhs
);
14046 #undef START_FOLD_INIT
14047 #undef END_FOLD_INIT
14049 /* Determine if first argument is a multiple of second argument. Return 0 if
14050 it is not, or we cannot easily determined it to be.
14052 An example of the sort of thing we care about (at this point; this routine
14053 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14054 fold cases do now) is discovering that
14056 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14062 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14064 This code also handles discovering that
14066 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14068 is a multiple of 8 so we don't have to worry about dealing with a
14069 possible remainder.
14071 Note that we *look* inside a SAVE_EXPR only to determine how it was
14072 calculated; it is not safe for fold to do much of anything else with the
14073 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14074 at run time. For example, the latter example above *cannot* be implemented
14075 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14076 evaluation time of the original SAVE_EXPR is not necessarily the same at
14077 the time the new expression is evaluated. The only optimization of this
14078 sort that would be valid is changing
14080 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14084 SAVE_EXPR (I) * SAVE_EXPR (J)
14086 (where the same SAVE_EXPR (J) is used in the original and the
14087 transformed version).
14089 NOWRAP specifies whether all outer operations in TYPE should
14090 be considered not wrapping. Any type conversion within TOP acts
14091 as a barrier and we will fall back to NOWRAP being false.
14092 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14093 as not wrapping even though they are generally using unsigned arithmetic. */
14096 multiple_of_p (tree type
, const_tree top
, const_tree bottom
, bool nowrap
)
14101 if (operand_equal_p (top
, bottom
, 0))
14104 if (TREE_CODE (type
) != INTEGER_TYPE
)
14107 switch (TREE_CODE (top
))
14110 /* Bitwise and provides a power of two multiple. If the mask is
14111 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14112 if (!integer_pow2p (bottom
))
14114 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
, nowrap
)
14115 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
));
14118 /* If the multiplication can wrap we cannot recurse further unless
14119 the bottom is a power of two which is where wrapping does not
14122 && !TYPE_OVERFLOW_UNDEFINED (type
)
14123 && !integer_pow2p (bottom
))
14125 if (TREE_CODE (bottom
) == INTEGER_CST
)
14127 op1
= TREE_OPERAND (top
, 0);
14128 op2
= TREE_OPERAND (top
, 1);
14129 if (TREE_CODE (op1
) == INTEGER_CST
)
14130 std::swap (op1
, op2
);
14131 if (TREE_CODE (op2
) == INTEGER_CST
)
14133 if (multiple_of_p (type
, op2
, bottom
, nowrap
))
14135 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14136 if (multiple_of_p (type
, bottom
, op2
, nowrap
))
14138 widest_int w
= wi::sdiv_trunc (wi::to_widest (bottom
),
14139 wi::to_widest (op2
));
14140 if (wi::fits_to_tree_p (w
, TREE_TYPE (bottom
)))
14142 op2
= wide_int_to_tree (TREE_TYPE (bottom
), w
);
14143 return multiple_of_p (type
, op1
, op2
, nowrap
);
14146 return multiple_of_p (type
, op1
, bottom
, nowrap
);
14149 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
, nowrap
)
14150 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
));
14153 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14154 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14156 op1
= TREE_OPERAND (top
, 1);
14157 if (wi::to_widest (op1
) < TYPE_PRECISION (type
))
14160 = wi::one (TYPE_PRECISION (type
)) << wi::to_wide (op1
);
14161 return multiple_of_p (type
,
14162 wide_int_to_tree (type
, mul_op
), bottom
,
14170 /* If the addition or subtraction can wrap we cannot recurse further
14171 unless bottom is a power of two which is where wrapping does not
14174 && !TYPE_OVERFLOW_UNDEFINED (type
)
14175 && !integer_pow2p (bottom
))
14178 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14179 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14180 but 0xfffffffd is not. */
14181 op1
= TREE_OPERAND (top
, 1);
14182 if (TREE_CODE (top
) == PLUS_EXPR
14184 && TYPE_UNSIGNED (type
)
14185 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
14186 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
14188 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14189 precisely, so be conservative here checking if both op0 and op1
14190 are multiple of bottom. Note we check the second operand first
14191 since it's usually simpler. */
14192 return (multiple_of_p (type
, op1
, bottom
, nowrap
)
14193 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
));
14196 /* Can't handle conversions from non-integral or wider integral type. */
14197 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14198 || (TYPE_PRECISION (type
)
14199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14201 /* NOWRAP only extends to operations in the outermost type so
14202 make sure to strip it off here. */
14203 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top
, 0)),
14204 TREE_OPERAND (top
, 0), bottom
, false);
14207 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
, nowrap
);
14210 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
, nowrap
)
14211 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
, nowrap
));
14214 if (TREE_CODE (bottom
) != INTEGER_CST
|| integer_zerop (bottom
))
14216 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14220 if (TREE_CODE (bottom
) == INTEGER_CST
14221 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
14222 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
14224 enum tree_code code
= gimple_assign_rhs_code (stmt
);
14226 /* Check for special cases to see if top is defined as multiple
14229 top = (X & ~(bottom - 1) ; bottom is power of 2
14235 if (code
== BIT_AND_EXPR
14236 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
14237 && TREE_CODE (op2
) == INTEGER_CST
14238 && integer_pow2p (bottom
)
14239 && wi::multiple_of_p (wi::to_widest (op2
),
14240 wi::to_widest (bottom
), UNSIGNED
))
14243 op1
= gimple_assign_rhs1 (stmt
);
14244 if (code
== MINUS_EXPR
14245 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
14246 && TREE_CODE (op2
) == SSA_NAME
14247 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
14248 && gimple_code (stmt
) == GIMPLE_ASSIGN
14249 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
14250 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
14251 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
14258 if (POLY_INT_CST_P (top
) && poly_int_tree_p (bottom
))
14259 return multiple_p (wi::to_poly_widest (top
),
14260 wi::to_poly_widest (bottom
));
14266 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14267 This function returns true for integer expressions, and returns
14268 false if uncertain. */
14271 tree_expr_finite_p (const_tree x
)
14273 machine_mode mode
= element_mode (x
);
14274 if (!HONOR_NANS (mode
) && !HONOR_INFINITIES (mode
))
14276 switch (TREE_CODE (x
))
14279 return real_isfinite (TREE_REAL_CST_PTR (x
));
14281 return tree_expr_finite_p (TREE_REALPART (x
))
14282 && tree_expr_finite_p (TREE_IMAGPART (x
));
14287 case NON_LVALUE_EXPR
:
14290 return tree_expr_finite_p (TREE_OPERAND (x
, 0));
14293 return tree_expr_finite_p (TREE_OPERAND (x
, 0))
14294 && tree_expr_finite_p (TREE_OPERAND (x
, 1));
14296 return tree_expr_finite_p (TREE_OPERAND (x
, 1))
14297 && tree_expr_finite_p (TREE_OPERAND (x
, 2));
14299 switch (get_call_combined_fn (x
))
14303 return tree_expr_finite_p (CALL_EXPR_ARG (x
, 0));
14308 return tree_expr_finite_p (CALL_EXPR_ARG (x
, 0))
14309 && tree_expr_finite_p (CALL_EXPR_ARG (x
, 1));
14319 /* Return true if expression X evaluates to an infinity.
14320 This function returns false for integer expressions. */
14323 tree_expr_infinite_p (const_tree x
)
14325 if (!HONOR_INFINITIES (x
))
14327 switch (TREE_CODE (x
))
14330 return real_isinf (TREE_REAL_CST_PTR (x
));
14333 case NON_LVALUE_EXPR
:
14335 return tree_expr_infinite_p (TREE_OPERAND (x
, 0));
14337 return tree_expr_infinite_p (TREE_OPERAND (x
, 1))
14338 && tree_expr_infinite_p (TREE_OPERAND (x
, 2));
14344 /* Return true if expression X could evaluate to an infinity.
14345 This function returns false for integer expressions, and returns
14346 true if uncertain. */
14349 tree_expr_maybe_infinite_p (const_tree x
)
14351 if (!HONOR_INFINITIES (x
))
14353 switch (TREE_CODE (x
))
14356 return real_isinf (TREE_REAL_CST_PTR (x
));
14361 return tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 0));
14363 return tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 1))
14364 || tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 2));
14370 /* Return true if expression X evaluates to a signaling NaN.
14371 This function returns false for integer expressions. */
14374 tree_expr_signaling_nan_p (const_tree x
)
14376 if (!HONOR_SNANS (x
))
14378 switch (TREE_CODE (x
))
14381 return real_issignaling_nan (TREE_REAL_CST_PTR (x
));
14382 case NON_LVALUE_EXPR
:
14384 return tree_expr_signaling_nan_p (TREE_OPERAND (x
, 0));
14386 return tree_expr_signaling_nan_p (TREE_OPERAND (x
, 1))
14387 && tree_expr_signaling_nan_p (TREE_OPERAND (x
, 2));
14393 /* Return true if expression X could evaluate to a signaling NaN.
14394 This function returns false for integer expressions, and returns
14395 true if uncertain. */
14398 tree_expr_maybe_signaling_nan_p (const_tree x
)
14400 if (!HONOR_SNANS (x
))
14402 switch (TREE_CODE (x
))
14405 return real_issignaling_nan (TREE_REAL_CST_PTR (x
));
14411 case NON_LVALUE_EXPR
:
14413 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 0));
14416 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 0))
14417 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 1));
14419 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 1))
14420 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 2));
14422 switch (get_call_combined_fn (x
))
14426 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 0));
14431 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 0))
14432 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 1));
14441 /* Return true if expression X evaluates to a NaN.
14442 This function returns false for integer expressions. */
14445 tree_expr_nan_p (const_tree x
)
14447 if (!HONOR_NANS (x
))
14449 switch (TREE_CODE (x
))
14452 return real_isnan (TREE_REAL_CST_PTR (x
));
14453 case NON_LVALUE_EXPR
:
14455 return tree_expr_nan_p (TREE_OPERAND (x
, 0));
14457 return tree_expr_nan_p (TREE_OPERAND (x
, 1))
14458 && tree_expr_nan_p (TREE_OPERAND (x
, 2));
14464 /* Return true if expression X could evaluate to a NaN.
14465 This function returns false for integer expressions, and returns
14466 true if uncertain. */
14469 tree_expr_maybe_nan_p (const_tree x
)
14471 if (!HONOR_NANS (x
))
14473 switch (TREE_CODE (x
))
14476 return real_isnan (TREE_REAL_CST_PTR (x
));
14482 return !tree_expr_finite_p (TREE_OPERAND (x
, 0))
14483 || !tree_expr_finite_p (TREE_OPERAND (x
, 1));
14487 case NON_LVALUE_EXPR
:
14489 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 0));
14492 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 0))
14493 || tree_expr_maybe_nan_p (TREE_OPERAND (x
, 1));
14495 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 1))
14496 || tree_expr_maybe_nan_p (TREE_OPERAND (x
, 2));
14498 switch (get_call_combined_fn (x
))
14502 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 0));
14507 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 0))
14508 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 1));
14517 /* Return true if expression X could evaluate to -0.0.
14518 This function returns true if uncertain. */
14521 tree_expr_maybe_real_minus_zero_p (const_tree x
)
14523 if (!HONOR_SIGNED_ZEROS (x
))
14525 switch (TREE_CODE (x
))
14528 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x
));
14533 case NON_LVALUE_EXPR
:
14535 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x
, 0));
14537 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x
, 1))
14538 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x
, 2));
14540 switch (get_call_combined_fn (x
))
14551 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14552 * but currently those predicates require tree and not const_tree. */
14556 #define tree_expr_nonnegative_warnv_p(X, Y) \
14557 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14559 #define RECURSE(X) \
14560 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14562 /* Return true if CODE or TYPE is known to be non-negative. */
14565 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14567 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14568 && truth_value_p (code
))
14569 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14570 have a signed:1 type (where the value is -1 and 0). */
14575 /* Return true if (CODE OP0) is known to be non-negative. If the return
14576 value is based on the assumption that signed overflow is undefined,
14577 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14578 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14581 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14582 bool *strict_overflow_p
, int depth
)
14584 if (TYPE_UNSIGNED (type
))
14590 /* We can't return 1 if flag_wrapv is set because
14591 ABS_EXPR<INT_MIN> = INT_MIN. */
14592 if (!ANY_INTEGRAL_TYPE_P (type
))
14594 if (TYPE_OVERFLOW_UNDEFINED (type
))
14596 *strict_overflow_p
= true;
14601 case NON_LVALUE_EXPR
:
14603 case FIX_TRUNC_EXPR
:
14604 return RECURSE (op0
);
14608 tree inner_type
= TREE_TYPE (op0
);
14609 tree outer_type
= type
;
14611 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14613 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14614 return RECURSE (op0
);
14615 if (INTEGRAL_TYPE_P (inner_type
))
14617 if (TYPE_UNSIGNED (inner_type
))
14619 return RECURSE (op0
);
14622 else if (INTEGRAL_TYPE_P (outer_type
))
14624 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14625 return RECURSE (op0
);
14626 if (INTEGRAL_TYPE_P (inner_type
))
14627 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14628 && TYPE_UNSIGNED (inner_type
);
14634 return tree_simple_nonnegative_warnv_p (code
, type
);
14637 /* We don't know sign of `t', so be conservative and return false. */
14641 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14642 value is based on the assumption that signed overflow is undefined,
14643 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14644 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14647 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14648 tree op1
, bool *strict_overflow_p
,
14651 if (TYPE_UNSIGNED (type
))
14656 case POINTER_PLUS_EXPR
:
14658 if (FLOAT_TYPE_P (type
))
14659 return RECURSE (op0
) && RECURSE (op1
);
14661 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14662 both unsigned and at least 2 bits shorter than the result. */
14663 if (TREE_CODE (type
) == INTEGER_TYPE
14664 && TREE_CODE (op0
) == NOP_EXPR
14665 && TREE_CODE (op1
) == NOP_EXPR
)
14667 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14668 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14669 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14670 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14672 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14673 TYPE_PRECISION (inner2
)) + 1;
14674 return prec
< TYPE_PRECISION (type
);
14680 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14682 /* x * x is always non-negative for floating point x
14683 or without overflow. */
14684 if (operand_equal_p (op0
, op1
, 0)
14685 || (RECURSE (op0
) && RECURSE (op1
)))
14687 if (ANY_INTEGRAL_TYPE_P (type
)
14688 && TYPE_OVERFLOW_UNDEFINED (type
))
14689 *strict_overflow_p
= true;
14694 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14695 both unsigned and their total bits is shorter than the result. */
14696 if (TREE_CODE (type
) == INTEGER_TYPE
14697 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14698 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14700 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14701 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14703 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14704 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14707 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14708 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14710 if (TREE_CODE (op0
) == INTEGER_CST
)
14711 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14713 if (TREE_CODE (op1
) == INTEGER_CST
)
14714 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14716 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14717 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14719 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14720 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14721 : TYPE_PRECISION (inner0
);
14723 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14724 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14725 : TYPE_PRECISION (inner1
);
14727 return precision0
+ precision1
< TYPE_PRECISION (type
);
14733 return RECURSE (op0
) || RECURSE (op1
);
14736 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14738 if (tree_expr_maybe_nan_p (op0
) || tree_expr_maybe_nan_p (op1
))
14739 return RECURSE (op0
) && RECURSE (op1
);
14740 return RECURSE (op0
) || RECURSE (op1
);
14746 case TRUNC_DIV_EXPR
:
14747 case CEIL_DIV_EXPR
:
14748 case FLOOR_DIV_EXPR
:
14749 case ROUND_DIV_EXPR
:
14750 return RECURSE (op0
) && RECURSE (op1
);
14752 case TRUNC_MOD_EXPR
:
14753 return RECURSE (op0
);
14755 case FLOOR_MOD_EXPR
:
14756 return RECURSE (op1
);
14758 case CEIL_MOD_EXPR
:
14759 case ROUND_MOD_EXPR
:
14761 return tree_simple_nonnegative_warnv_p (code
, type
);
14764 /* We don't know sign of `t', so be conservative and return false. */
14768 /* Return true if T is known to be non-negative. If the return
14769 value is based on the assumption that signed overflow is undefined,
14770 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14771 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14774 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14776 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14779 switch (TREE_CODE (t
))
14782 return tree_int_cst_sgn (t
) >= 0;
14785 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14788 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14791 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
14794 /* Limit the depth of recursion to avoid quadratic behavior.
14795 This is expected to catch almost all occurrences in practice.
14796 If this code misses important cases that unbounded recursion
14797 would not, passes that need this information could be revised
14798 to provide it through dataflow propagation. */
14799 return (!name_registered_for_update_p (t
)
14800 && depth
< param_max_ssa_name_query_depth
14801 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
14802 strict_overflow_p
, depth
));
14805 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
14809 /* Return true if T is known to be non-negative. If the return
14810 value is based on the assumption that signed overflow is undefined,
14811 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14812 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14815 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
14816 bool *strict_overflow_p
, int depth
)
14847 case CFN_BUILT_IN_BSWAP16
:
14848 case CFN_BUILT_IN_BSWAP32
:
14849 case CFN_BUILT_IN_BSWAP64
:
14850 case CFN_BUILT_IN_BSWAP128
:
14856 /* sqrt(-0.0) is -0.0. */
14857 if (!HONOR_SIGNED_ZEROS (type
))
14859 return RECURSE (arg0
);
14891 CASE_CFN_LLRINT_FN
:
14893 CASE_CFN_LLROUND_FN
:
14897 CASE_CFN_LROUND_FN
:
14900 CASE_CFN_NEARBYINT
:
14901 CASE_CFN_NEARBYINT_FN
:
14906 CASE_CFN_ROUNDEVEN
:
14907 CASE_CFN_ROUNDEVEN_FN
:
14910 CASE_CFN_SCALBLN_FN
:
14912 CASE_CFN_SCALBN_FN
:
14914 CASE_CFN_SIGNIFICAND
:
14921 /* True if the 1st argument is nonnegative. */
14922 return RECURSE (arg0
);
14926 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14927 things. In the presence of sNaNs, we're only guaranteed to be
14928 non-negative if both operands are non-negative. In the presence
14929 of qNaNs, we're non-negative if either operand is non-negative
14930 and can't be a qNaN, or if both operands are non-negative. */
14931 if (tree_expr_maybe_signaling_nan_p (arg0
) ||
14932 tree_expr_maybe_signaling_nan_p (arg1
))
14933 return RECURSE (arg0
) && RECURSE (arg1
);
14934 return RECURSE (arg0
) ? (!tree_expr_maybe_nan_p (arg0
)
14937 && !tree_expr_maybe_nan_p (arg1
));
14941 /* True if the 1st AND 2nd arguments are nonnegative. */
14942 return RECURSE (arg0
) && RECURSE (arg1
);
14945 CASE_CFN_COPYSIGN_FN
:
14946 /* True if the 2nd argument is nonnegative. */
14947 return RECURSE (arg1
);
14950 /* True if the 1st argument is nonnegative or the second
14951 argument is an even integer. */
14952 if (TREE_CODE (arg1
) == INTEGER_CST
14953 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14955 return RECURSE (arg0
);
14959 /* True if the 1st argument is nonnegative or the second
14960 argument is an even integer valued real. */
14961 if (TREE_CODE (arg1
) == REAL_CST
)
14966 c
= TREE_REAL_CST (arg1
);
14967 n
= real_to_integer (&c
);
14970 REAL_VALUE_TYPE cint
;
14971 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14972 if (real_identical (&c
, &cint
))
14976 return RECURSE (arg0
);
14981 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
14984 /* Return true if T is known to be non-negative. If the return
14985 value is based on the assumption that signed overflow is undefined,
14986 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14987 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14990 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14992 enum tree_code code
= TREE_CODE (t
);
14993 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15000 tree temp
= TARGET_EXPR_SLOT (t
);
15001 t
= TARGET_EXPR_INITIAL (t
);
15003 /* If the initializer is non-void, then it's a normal expression
15004 that will be assigned to the slot. */
15005 if (!VOID_TYPE_P (t
))
15006 return RECURSE (t
);
15008 /* Otherwise, the initializer sets the slot in some way. One common
15009 way is an assignment statement at the end of the initializer. */
15012 if (TREE_CODE (t
) == BIND_EXPR
)
15013 t
= expr_last (BIND_EXPR_BODY (t
));
15014 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15015 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15016 t
= expr_last (TREE_OPERAND (t
, 0));
15017 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15022 if (TREE_CODE (t
) == MODIFY_EXPR
15023 && TREE_OPERAND (t
, 0) == temp
)
15024 return RECURSE (TREE_OPERAND (t
, 1));
15031 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15032 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15034 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15035 get_call_combined_fn (t
),
15038 strict_overflow_p
, depth
);
15040 case COMPOUND_EXPR
:
15042 return RECURSE (TREE_OPERAND (t
, 1));
15045 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
15048 return RECURSE (TREE_OPERAND (t
, 0));
15051 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
15056 #undef tree_expr_nonnegative_warnv_p
15058 /* Return true if T is known to be non-negative. If the return
15059 value is based on the assumption that signed overflow is undefined,
15060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15061 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15064 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
15066 enum tree_code code
;
15067 if (t
== error_mark_node
)
15070 code
= TREE_CODE (t
);
15071 switch (TREE_CODE_CLASS (code
))
15074 case tcc_comparison
:
15075 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15077 TREE_OPERAND (t
, 0),
15078 TREE_OPERAND (t
, 1),
15079 strict_overflow_p
, depth
);
15082 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15084 TREE_OPERAND (t
, 0),
15085 strict_overflow_p
, depth
);
15088 case tcc_declaration
:
15089 case tcc_reference
:
15090 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
15098 case TRUTH_AND_EXPR
:
15099 case TRUTH_OR_EXPR
:
15100 case TRUTH_XOR_EXPR
:
15101 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15103 TREE_OPERAND (t
, 0),
15104 TREE_OPERAND (t
, 1),
15105 strict_overflow_p
, depth
);
15106 case TRUTH_NOT_EXPR
:
15107 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15109 TREE_OPERAND (t
, 0),
15110 strict_overflow_p
, depth
);
15116 case WITH_SIZE_EXPR
:
15118 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
15121 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
15125 /* Return true if `t' is known to be non-negative. Handle warnings
15126 about undefined signed overflow. */
15129 tree_expr_nonnegative_p (tree t
)
15131 bool ret
, strict_overflow_p
;
15133 strict_overflow_p
= false;
15134 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15135 if (strict_overflow_p
)
15136 fold_overflow_warning (("assuming signed overflow does not occur when "
15137 "determining that expression is always "
15139 WARN_STRICT_OVERFLOW_MISC
);
15144 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15145 For floating point we further ensure that T is not denormal.
15146 Similar logic is present in nonzero_address in rtlanal.h.
15148 If the return value is based on the assumption that signed overflow
15149 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15150 change *STRICT_OVERFLOW_P. */
15153 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15154 bool *strict_overflow_p
)
15159 return tree_expr_nonzero_warnv_p (op0
,
15160 strict_overflow_p
);
15164 tree inner_type
= TREE_TYPE (op0
);
15165 tree outer_type
= type
;
15167 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15168 && tree_expr_nonzero_warnv_p (op0
,
15169 strict_overflow_p
));
15173 case NON_LVALUE_EXPR
:
15174 return tree_expr_nonzero_warnv_p (op0
,
15175 strict_overflow_p
);
15184 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15185 For floating point we further ensure that T is not denormal.
15186 Similar logic is present in nonzero_address in rtlanal.h.
15188 If the return value is based on the assumption that signed overflow
15189 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15190 change *STRICT_OVERFLOW_P. */
15193 tree_binary_nonzero_warnv_p (enum tree_code code
,
15196 tree op1
, bool *strict_overflow_p
)
15198 bool sub_strict_overflow_p
;
15201 case POINTER_PLUS_EXPR
:
15203 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
15205 /* With the presence of negative values it is hard
15206 to say something. */
15207 sub_strict_overflow_p
= false;
15208 if (!tree_expr_nonnegative_warnv_p (op0
,
15209 &sub_strict_overflow_p
)
15210 || !tree_expr_nonnegative_warnv_p (op1
,
15211 &sub_strict_overflow_p
))
15213 /* One of operands must be positive and the other non-negative. */
15214 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15215 overflows, on a twos-complement machine the sum of two
15216 nonnegative numbers can never be zero. */
15217 return (tree_expr_nonzero_warnv_p (op0
,
15219 || tree_expr_nonzero_warnv_p (op1
,
15220 strict_overflow_p
));
15225 if (TYPE_OVERFLOW_UNDEFINED (type
))
15227 if (tree_expr_nonzero_warnv_p (op0
,
15229 && tree_expr_nonzero_warnv_p (op1
,
15230 strict_overflow_p
))
15232 *strict_overflow_p
= true;
15239 sub_strict_overflow_p
= false;
15240 if (tree_expr_nonzero_warnv_p (op0
,
15241 &sub_strict_overflow_p
)
15242 && tree_expr_nonzero_warnv_p (op1
,
15243 &sub_strict_overflow_p
))
15245 if (sub_strict_overflow_p
)
15246 *strict_overflow_p
= true;
15251 sub_strict_overflow_p
= false;
15252 if (tree_expr_nonzero_warnv_p (op0
,
15253 &sub_strict_overflow_p
))
15255 if (sub_strict_overflow_p
)
15256 *strict_overflow_p
= true;
15258 /* When both operands are nonzero, then MAX must be too. */
15259 if (tree_expr_nonzero_warnv_p (op1
,
15260 strict_overflow_p
))
15263 /* MAX where operand 0 is positive is positive. */
15264 return tree_expr_nonnegative_warnv_p (op0
,
15265 strict_overflow_p
);
15267 /* MAX where operand 1 is positive is positive. */
15268 else if (tree_expr_nonzero_warnv_p (op1
,
15269 &sub_strict_overflow_p
)
15270 && tree_expr_nonnegative_warnv_p (op1
,
15271 &sub_strict_overflow_p
))
15273 if (sub_strict_overflow_p
)
15274 *strict_overflow_p
= true;
15280 return (tree_expr_nonzero_warnv_p (op1
,
15282 || tree_expr_nonzero_warnv_p (op0
,
15283 strict_overflow_p
));
15292 /* Return true when T is an address and is known to be nonzero.
15293 For floating point we further ensure that T is not denormal.
15294 Similar logic is present in nonzero_address in rtlanal.h.
15296 If the return value is based on the assumption that signed overflow
15297 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15298 change *STRICT_OVERFLOW_P. */
15301 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15303 bool sub_strict_overflow_p
;
15304 switch (TREE_CODE (t
))
15307 return !integer_zerop (t
);
15311 tree base
= TREE_OPERAND (t
, 0);
15313 if (!DECL_P (base
))
15314 base
= get_base_address (base
);
15316 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
15317 base
= TARGET_EXPR_SLOT (base
);
15322 /* For objects in symbol table check if we know they are non-zero.
15323 Don't do anything for variables and functions before symtab is built;
15324 it is quite possible that they will be declared weak later. */
15325 int nonzero_addr
= maybe_nonzero_address (base
);
15326 if (nonzero_addr
>= 0)
15327 return nonzero_addr
;
15329 /* Constants are never weak. */
15330 if (CONSTANT_CLASS_P (base
))
15337 sub_strict_overflow_p
= false;
15338 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15339 &sub_strict_overflow_p
)
15340 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15341 &sub_strict_overflow_p
))
15343 if (sub_strict_overflow_p
)
15344 *strict_overflow_p
= true;
15350 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
15352 return expr_not_equal_to (t
, wi::zero (TYPE_PRECISION (TREE_TYPE (t
))));
15360 #define integer_valued_real_p(X) \
15361 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15363 #define RECURSE(X) \
15364 ((integer_valued_real_p) (X, depth + 1))
15366 /* Return true if the floating point result of (CODE OP0) has an
15367 integer value. We also allow +Inf, -Inf and NaN to be considered
15368 integer values. Return false for signaling NaN.
15370 DEPTH is the current nesting depth of the query. */
15373 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
15381 return RECURSE (op0
);
15385 tree type
= TREE_TYPE (op0
);
15386 if (TREE_CODE (type
) == INTEGER_TYPE
)
15388 if (TREE_CODE (type
) == REAL_TYPE
)
15389 return RECURSE (op0
);
15399 /* Return true if the floating point result of (CODE OP0 OP1) has an
15400 integer value. We also allow +Inf, -Inf and NaN to be considered
15401 integer values. Return false for signaling NaN.
15403 DEPTH is the current nesting depth of the query. */
15406 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
15415 return RECURSE (op0
) && RECURSE (op1
);
15423 /* Return true if the floating point result of calling FNDECL with arguments
15424 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15425 considered integer values. Return false for signaling NaN. If FNDECL
15426 takes fewer than 2 arguments, the remaining ARGn are null.
15428 DEPTH is the current nesting depth of the query. */
15431 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
15439 CASE_CFN_NEARBYINT
:
15440 CASE_CFN_NEARBYINT_FN
:
15445 CASE_CFN_ROUNDEVEN
:
15446 CASE_CFN_ROUNDEVEN_FN
:
15455 return RECURSE (arg0
) && RECURSE (arg1
);
15463 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15464 has an integer value. We also allow +Inf, -Inf and NaN to be
15465 considered integer values. Return false for signaling NaN.
15467 DEPTH is the current nesting depth of the query. */
15470 integer_valued_real_single_p (tree t
, int depth
)
15472 switch (TREE_CODE (t
))
15475 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
15478 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
15481 /* Limit the depth of recursion to avoid quadratic behavior.
15482 This is expected to catch almost all occurrences in practice.
15483 If this code misses important cases that unbounded recursion
15484 would not, passes that need this information could be revised
15485 to provide it through dataflow propagation. */
15486 return (!name_registered_for_update_p (t
)
15487 && depth
< param_max_ssa_name_query_depth
15488 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
15497 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15498 has an integer value. We also allow +Inf, -Inf and NaN to be
15499 considered integer values. Return false for signaling NaN.
15501 DEPTH is the current nesting depth of the query. */
15504 integer_valued_real_invalid_p (tree t
, int depth
)
15506 switch (TREE_CODE (t
))
15508 case COMPOUND_EXPR
:
15511 return RECURSE (TREE_OPERAND (t
, 1));
15514 return RECURSE (TREE_OPERAND (t
, 0));
15523 #undef integer_valued_real_p
15525 /* Return true if the floating point expression T has an integer value.
15526 We also allow +Inf, -Inf and NaN to be considered integer values.
15527 Return false for signaling NaN.
15529 DEPTH is the current nesting depth of the query. */
15532 integer_valued_real_p (tree t
, int depth
)
15534 if (t
== error_mark_node
)
15537 STRIP_ANY_LOCATION_WRAPPER (t
);
15539 tree_code code
= TREE_CODE (t
);
15540 switch (TREE_CODE_CLASS (code
))
15543 case tcc_comparison
:
15544 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
15545 TREE_OPERAND (t
, 1), depth
);
15548 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
15551 case tcc_declaration
:
15552 case tcc_reference
:
15553 return integer_valued_real_single_p (t
, depth
);
15563 return integer_valued_real_single_p (t
, depth
);
15567 tree arg0
= (call_expr_nargs (t
) > 0
15568 ? CALL_EXPR_ARG (t
, 0)
15570 tree arg1
= (call_expr_nargs (t
) > 1
15571 ? CALL_EXPR_ARG (t
, 1)
15573 return integer_valued_real_call_p (get_call_combined_fn (t
),
15574 arg0
, arg1
, depth
);
15578 return integer_valued_real_invalid_p (t
, depth
);
15582 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15583 attempt to fold the expression to a constant without modifying TYPE,
15586 If the expression could be simplified to a constant, then return
15587 the constant. If the expression would not be simplified to a
15588 constant, then return NULL_TREE. */
15591 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15593 tree tem
= fold_binary (code
, type
, op0
, op1
);
15594 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15597 /* Given the components of a unary expression CODE, TYPE and OP0,
15598 attempt to fold the expression to a constant without modifying
15601 If the expression could be simplified to a constant, then return
15602 the constant. If the expression would not be simplified to a
15603 constant, then return NULL_TREE. */
15606 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15608 tree tem
= fold_unary (code
, type
, op0
);
15609 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15612 /* If EXP represents referencing an element in a constant string
15613 (either via pointer arithmetic or array indexing), return the
15614 tree representing the value accessed, otherwise return NULL. */
15617 fold_read_from_constant_string (tree exp
)
15619 if ((TREE_CODE (exp
) == INDIRECT_REF
15620 || TREE_CODE (exp
) == ARRAY_REF
)
15621 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15623 tree exp1
= TREE_OPERAND (exp
, 0);
15626 location_t loc
= EXPR_LOCATION (exp
);
15628 if (TREE_CODE (exp
) == INDIRECT_REF
)
15629 string
= string_constant (exp1
, &index
, NULL
, NULL
);
15632 tree low_bound
= array_ref_low_bound (exp
);
15633 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15635 /* Optimize the special-case of a zero lower bound.
15637 We convert the low_bound to sizetype to avoid some problems
15638 with constant folding. (E.g. suppose the lower bound is 1,
15639 and its mode is QI. Without the conversion,l (ARRAY
15640 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15641 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15642 if (! integer_zerop (low_bound
))
15643 index
= size_diffop_loc (loc
, index
,
15644 fold_convert_loc (loc
, sizetype
, low_bound
));
15649 scalar_int_mode char_mode
;
15651 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15652 && TREE_CODE (string
) == STRING_CST
15653 && tree_fits_uhwi_p (index
)
15654 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15655 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))),
15657 && GET_MODE_SIZE (char_mode
) == 1)
15658 return build_int_cst_type (TREE_TYPE (exp
),
15659 (TREE_STRING_POINTER (string
)
15660 [TREE_INT_CST_LOW (index
)]));
15665 /* Folds a read from vector element at IDX of vector ARG. */
15668 fold_read_from_vector (tree arg
, poly_uint64 idx
)
15670 unsigned HOST_WIDE_INT i
;
15671 if (known_lt (idx
, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)))
15672 && known_ge (idx
, 0u)
15673 && idx
.is_constant (&i
))
15675 if (TREE_CODE (arg
) == VECTOR_CST
)
15676 return VECTOR_CST_ELT (arg
, i
);
15677 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
15679 if (CONSTRUCTOR_NELTS (arg
)
15680 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg
, 0)->value
)))
15682 if (i
>= CONSTRUCTOR_NELTS (arg
))
15683 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg
)));
15684 return CONSTRUCTOR_ELT (arg
, i
)->value
;
15690 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15691 an integer constant, real, or fixed-point constant.
15693 TYPE is the type of the result. */
15696 fold_negate_const (tree arg0
, tree type
)
15698 tree t
= NULL_TREE
;
15700 switch (TREE_CODE (arg0
))
15703 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15708 FIXED_VALUE_TYPE f
;
15709 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15710 &(TREE_FIXED_CST (arg0
)), NULL
,
15711 TYPE_SATURATING (type
));
15712 t
= build_fixed (type
, f
);
15713 /* Propagate overflow flags. */
15714 if (overflow_p
| TREE_OVERFLOW (arg0
))
15715 TREE_OVERFLOW (t
) = 1;
15720 if (poly_int_tree_p (arg0
))
15722 wi::overflow_type overflow
;
15723 poly_wide_int res
= wi::neg (wi::to_poly_wide (arg0
), &overflow
);
15724 t
= force_fit_type (type
, res
, 1,
15725 (overflow
&& ! TYPE_UNSIGNED (type
))
15726 || TREE_OVERFLOW (arg0
));
15730 gcc_unreachable ();
15736 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15737 an integer constant or real constant.
15739 TYPE is the type of the result. */
15742 fold_abs_const (tree arg0
, tree type
)
15744 tree t
= NULL_TREE
;
15746 switch (TREE_CODE (arg0
))
15750 /* If the value is unsigned or non-negative, then the absolute value
15751 is the same as the ordinary value. */
15752 wide_int val
= wi::to_wide (arg0
);
15753 wi::overflow_type overflow
= wi::OVF_NONE
;
15754 if (!wi::neg_p (val
, TYPE_SIGN (TREE_TYPE (arg0
))))
15757 /* If the value is negative, then the absolute value is
15760 val
= wi::neg (val
, &overflow
);
15762 /* Force to the destination type, set TREE_OVERFLOW for signed
15764 t
= force_fit_type (type
, val
, 1, overflow
| TREE_OVERFLOW (arg0
));
15769 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15770 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15776 gcc_unreachable ();
15782 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15783 constant. TYPE is the type of the result. */
15786 fold_not_const (const_tree arg0
, tree type
)
15788 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15790 return force_fit_type (type
, ~wi::to_wide (arg0
), 0, TREE_OVERFLOW (arg0
));
15793 /* Given CODE, a relational operator, the target type, TYPE and two
15794 constant operands OP0 and OP1, return the result of the
15795 relational operation. If the result is not a compile time
15796 constant, then return NULL_TREE. */
15799 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15801 int result
, invert
;
15803 /* From here on, the only cases we handle are when the result is
15804 known to be a constant. */
15806 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15808 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15809 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15811 /* Handle the cases where either operand is a NaN. */
15812 if (real_isnan (c0
) || real_isnan (c1
))
15822 case UNORDERED_EXPR
:
15836 if (flag_trapping_math
)
15842 gcc_unreachable ();
15845 return constant_boolean_node (result
, type
);
15848 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15851 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15853 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15854 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15855 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15858 /* Handle equality/inequality of complex constants. */
15859 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15861 tree rcond
= fold_relational_const (code
, type
,
15862 TREE_REALPART (op0
),
15863 TREE_REALPART (op1
));
15864 tree icond
= fold_relational_const (code
, type
,
15865 TREE_IMAGPART (op0
),
15866 TREE_IMAGPART (op1
));
15867 if (code
== EQ_EXPR
)
15868 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15869 else if (code
== NE_EXPR
)
15870 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15875 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15877 if (!VECTOR_TYPE_P (type
))
15879 /* Have vector comparison with scalar boolean result. */
15880 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
15881 && known_eq (VECTOR_CST_NELTS (op0
),
15882 VECTOR_CST_NELTS (op1
)));
15883 unsigned HOST_WIDE_INT nunits
;
15884 if (!VECTOR_CST_NELTS (op0
).is_constant (&nunits
))
15886 for (unsigned i
= 0; i
< nunits
; i
++)
15888 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15889 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15890 tree tmp
= fold_relational_const (EQ_EXPR
, type
, elem0
, elem1
);
15891 if (tmp
== NULL_TREE
)
15893 if (integer_zerop (tmp
))
15894 return constant_boolean_node (code
== NE_EXPR
, type
);
15896 return constant_boolean_node (code
== EQ_EXPR
, type
);
15898 tree_vector_builder elts
;
15899 if (!elts
.new_binary_operation (type
, op0
, op1
, false))
15901 unsigned int count
= elts
.encoded_nelts ();
15902 for (unsigned i
= 0; i
< count
; i
++)
15904 tree elem_type
= TREE_TYPE (type
);
15905 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15906 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15908 tree tem
= fold_relational_const (code
, elem_type
,
15911 if (tem
== NULL_TREE
)
15914 elts
.quick_push (build_int_cst (elem_type
,
15915 integer_zerop (tem
) ? 0 : -1));
15918 return elts
.build ();
15921 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15923 To compute GT, swap the arguments and do LT.
15924 To compute GE, do LT and invert the result.
15925 To compute LE, swap the arguments, do LT and invert the result.
15926 To compute NE, do EQ and invert the result.
15928 Therefore, the code below must handle only EQ and LT. */
15930 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15932 std::swap (op0
, op1
);
15933 code
= swap_tree_comparison (code
);
15936 /* Note that it is safe to invert for real values here because we
15937 have already handled the one case that it matters. */
15940 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15943 code
= invert_tree_comparison (code
, false);
15946 /* Compute a result for LT or EQ if args permit;
15947 Otherwise return T. */
15948 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15950 if (code
== EQ_EXPR
)
15951 result
= tree_int_cst_equal (op0
, op1
);
15953 result
= tree_int_cst_lt (op0
, op1
);
15960 return constant_boolean_node (result
, type
);
15963 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15964 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15968 fold_build_cleanup_point_expr (tree type
, tree expr
)
15970 /* If the expression does not have side effects then we don't have to wrap
15971 it with a cleanup point expression. */
15972 if (!TREE_SIDE_EFFECTS (expr
))
15975 /* If the expression is a return, check to see if the expression inside the
15976 return has no side effects or the right hand side of the modify expression
15977 inside the return. If either don't have side effects set we don't need to
15978 wrap the expression in a cleanup point expression. Note we don't check the
15979 left hand side of the modify because it should always be a return decl. */
15980 if (TREE_CODE (expr
) == RETURN_EXPR
)
15982 tree op
= TREE_OPERAND (expr
, 0);
15983 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15985 op
= TREE_OPERAND (op
, 1);
15986 if (!TREE_SIDE_EFFECTS (op
))
15990 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
15993 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15994 of an indirection through OP0, or NULL_TREE if no simplification is
15998 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16002 poly_uint64 const_op01
;
16005 subtype
= TREE_TYPE (sub
);
16006 if (!POINTER_TYPE_P (subtype
)
16007 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0
)))
16010 if (TREE_CODE (sub
) == ADDR_EXPR
)
16012 tree op
= TREE_OPERAND (sub
, 0);
16013 tree optype
= TREE_TYPE (op
);
16015 /* *&CONST_DECL -> to the value of the const decl. */
16016 if (TREE_CODE (op
) == CONST_DECL
)
16017 return DECL_INITIAL (op
);
16018 /* *&p => p; make sure to handle *&"str"[cst] here. */
16019 if (type
== optype
)
16021 tree fop
= fold_read_from_constant_string (op
);
16027 /* *(foo *)&fooarray => fooarray[0] */
16028 else if (TREE_CODE (optype
) == ARRAY_TYPE
16029 && type
== TREE_TYPE (optype
)
16030 && (!in_gimple_form
16031 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16033 tree type_domain
= TYPE_DOMAIN (optype
);
16034 tree min_val
= size_zero_node
;
16035 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16036 min_val
= TYPE_MIN_VALUE (type_domain
);
16038 && TREE_CODE (min_val
) != INTEGER_CST
)
16040 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16041 NULL_TREE
, NULL_TREE
);
16043 /* *(foo *)&complexfoo => __real__ complexfoo */
16044 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16045 && type
== TREE_TYPE (optype
))
16046 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16047 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16048 else if (VECTOR_TYPE_P (optype
)
16049 && type
== TREE_TYPE (optype
))
16051 tree part_width
= TYPE_SIZE (type
);
16052 tree index
= bitsize_int (0);
16053 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
,
16058 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16059 && poly_int_tree_p (TREE_OPERAND (sub
, 1), &const_op01
))
16061 tree op00
= TREE_OPERAND (sub
, 0);
16062 tree op01
= TREE_OPERAND (sub
, 1);
16065 if (TREE_CODE (op00
) == ADDR_EXPR
)
16068 op00
= TREE_OPERAND (op00
, 0);
16069 op00type
= TREE_TYPE (op00
);
16071 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16072 if (VECTOR_TYPE_P (op00type
)
16073 && type
== TREE_TYPE (op00type
)
16074 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16075 but we want to treat offsets with MSB set as negative.
16076 For the code below negative offsets are invalid and
16077 TYPE_SIZE of the element is something unsigned, so
16078 check whether op01 fits into poly_int64, which implies
16079 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16080 then just use poly_uint64 because we want to treat the
16081 value as unsigned. */
16082 && tree_fits_poly_int64_p (op01
))
16084 tree part_width
= TYPE_SIZE (type
);
16085 poly_uint64 max_offset
16086 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
16087 * TYPE_VECTOR_SUBPARTS (op00type
));
16088 if (known_lt (const_op01
, max_offset
))
16090 tree index
= bitsize_int (const_op01
* BITS_PER_UNIT
);
16091 return fold_build3_loc (loc
,
16092 BIT_FIELD_REF
, type
, op00
,
16093 part_width
, index
);
16096 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16097 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16098 && type
== TREE_TYPE (op00type
))
16100 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type
)),
16102 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16104 /* ((foo *)&fooarray)[1] => fooarray[1] */
16105 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16106 && type
== TREE_TYPE (op00type
))
16108 tree type_domain
= TYPE_DOMAIN (op00type
);
16109 tree min_val
= size_zero_node
;
16110 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16111 min_val
= TYPE_MIN_VALUE (type_domain
);
16112 poly_uint64 type_size
, index
;
16113 if (poly_int_tree_p (min_val
)
16114 && poly_int_tree_p (TYPE_SIZE_UNIT (type
), &type_size
)
16115 && multiple_p (const_op01
, type_size
, &index
))
16117 poly_offset_int off
= index
+ wi::to_poly_offset (min_val
);
16118 op01
= wide_int_to_tree (sizetype
, off
);
16119 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16120 NULL_TREE
, NULL_TREE
);
16126 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16127 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16128 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16129 && (!in_gimple_form
16130 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16133 tree min_val
= size_zero_node
;
16134 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16135 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16136 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16137 min_val
= TYPE_MIN_VALUE (type_domain
);
16139 && TREE_CODE (min_val
) != INTEGER_CST
)
16141 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16148 /* Builds an expression for an indirection through T, simplifying some
16152 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16154 tree type
= TREE_TYPE (TREE_TYPE (t
));
16155 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16160 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16163 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16166 fold_indirect_ref_loc (location_t loc
, tree t
)
16168 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16176 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16177 whose result is ignored. The type of the returned tree need not be
16178 the same as the original expression. */
16181 fold_ignored_result (tree t
)
16183 if (!TREE_SIDE_EFFECTS (t
))
16184 return integer_zero_node
;
16187 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16190 t
= TREE_OPERAND (t
, 0);
16194 case tcc_comparison
:
16195 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16196 t
= TREE_OPERAND (t
, 0);
16197 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16198 t
= TREE_OPERAND (t
, 1);
16203 case tcc_expression
:
16204 switch (TREE_CODE (t
))
16206 case COMPOUND_EXPR
:
16207 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16209 t
= TREE_OPERAND (t
, 0);
16213 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16214 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16216 t
= TREE_OPERAND (t
, 0);
16229 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16232 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16234 tree div
= NULL_TREE
;
16239 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16240 have to do anything. Only do this when we are not given a const,
16241 because in that case, this check is more expensive than just
16243 if (TREE_CODE (value
) != INTEGER_CST
)
16245 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16247 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16251 /* If divisor is a power of two, simplify this to bit manipulation. */
16252 if (pow2_or_zerop (divisor
))
16254 if (TREE_CODE (value
) == INTEGER_CST
)
16256 wide_int val
= wi::to_wide (value
);
16259 if ((val
& (divisor
- 1)) == 0)
16262 overflow_p
= TREE_OVERFLOW (value
);
16263 val
+= divisor
- 1;
16264 val
&= (int) -divisor
;
16268 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16274 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16275 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16276 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
16277 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16283 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16284 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16285 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16291 /* Likewise, but round down. */
16294 round_down_loc (location_t loc
, tree value
, int divisor
)
16296 tree div
= NULL_TREE
;
16298 gcc_assert (divisor
> 0);
16302 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16303 have to do anything. Only do this when we are not given a const,
16304 because in that case, this check is more expensive than just
16306 if (TREE_CODE (value
) != INTEGER_CST
)
16308 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16310 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16314 /* If divisor is a power of two, simplify this to bit manipulation. */
16315 if (pow2_or_zerop (divisor
))
16319 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16320 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16325 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16326 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16327 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16333 /* Returns the pointer to the base of the object addressed by EXP and
16334 extracts the information about the offset of the access, storing it
16335 to PBITPOS and POFFSET. */
16338 split_address_to_core_and_offset (tree exp
,
16339 poly_int64_pod
*pbitpos
, tree
*poffset
)
16343 int unsignedp
, reversep
, volatilep
;
16344 poly_int64 bitsize
;
16345 location_t loc
= EXPR_LOCATION (exp
);
16347 if (TREE_CODE (exp
) == ADDR_EXPR
)
16349 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16350 poffset
, &mode
, &unsignedp
, &reversep
,
16352 core
= build_fold_addr_expr_loc (loc
, core
);
16354 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
16356 core
= TREE_OPERAND (exp
, 0);
16359 *poffset
= TREE_OPERAND (exp
, 1);
16360 if (poly_int_tree_p (*poffset
))
16362 poly_offset_int tem
16363 = wi::sext (wi::to_poly_offset (*poffset
),
16364 TYPE_PRECISION (TREE_TYPE (*poffset
)));
16365 tem
<<= LOG2_BITS_PER_UNIT
;
16366 if (tem
.to_shwi (pbitpos
))
16367 *poffset
= NULL_TREE
;
16374 *poffset
= NULL_TREE
;
16380 /* Returns true if addresses of E1 and E2 differ by a constant, false
16381 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16384 ptr_difference_const (tree e1
, tree e2
, poly_int64_pod
*diff
)
16387 poly_int64 bitpos1
, bitpos2
;
16388 tree toffset1
, toffset2
, tdiff
, type
;
16390 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16391 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16393 poly_int64 bytepos1
, bytepos2
;
16394 if (!multiple_p (bitpos1
, BITS_PER_UNIT
, &bytepos1
)
16395 || !multiple_p (bitpos2
, BITS_PER_UNIT
, &bytepos2
)
16396 || !operand_equal_p (core1
, core2
, 0))
16399 if (toffset1
&& toffset2
)
16401 type
= TREE_TYPE (toffset1
);
16402 if (type
!= TREE_TYPE (toffset2
))
16403 toffset2
= fold_convert (type
, toffset2
);
16405 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16406 if (!cst_and_fits_in_hwi (tdiff
))
16409 *diff
= int_cst_value (tdiff
);
16411 else if (toffset1
|| toffset2
)
16413 /* If only one of the offsets is non-constant, the difference cannot
16420 *diff
+= bytepos1
- bytepos2
;
16424 /* Return OFF converted to a pointer offset type suitable as offset for
16425 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16427 convert_to_ptrofftype_loc (location_t loc
, tree off
)
16429 if (ptrofftype_p (TREE_TYPE (off
)))
16431 return fold_convert_loc (loc
, sizetype
, off
);
16434 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16436 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
16438 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16439 ptr
, convert_to_ptrofftype_loc (loc
, off
));
16442 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16444 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
16446 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16447 ptr
, size_int (off
));
16450 /* Return a pointer to a NUL-terminated string containing the sequence
16451 of bytes corresponding to the representation of the object referred to
16452 by SRC (or a subsequence of such bytes within it if SRC is a reference
16453 to an initialized constant array plus some constant offset).
16454 Set *STRSIZE the number of bytes in the constant sequence including
16455 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16456 where A is the array that stores the constant sequence that SRC points
16457 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16458 need not point to a string or even an array of characters but may point
16459 to an object of any type. */
16462 getbyterep (tree src
, unsigned HOST_WIDE_INT
*strsize
)
16464 /* The offset into the array A storing the string, and A's byte size. */
16472 src
= byte_representation (src
, &offset_node
, &mem_size
, NULL
);
16474 src
= string_constant (src
, &offset_node
, &mem_size
, NULL
);
16478 unsigned HOST_WIDE_INT offset
= 0;
16479 if (offset_node
!= NULL_TREE
)
16481 if (!tree_fits_uhwi_p (offset_node
))
16484 offset
= tree_to_uhwi (offset_node
);
16487 if (!tree_fits_uhwi_p (mem_size
))
16490 /* ARRAY_SIZE is the byte size of the array the constant sequence
16491 is stored in and equal to sizeof A. INIT_BYTES is the number
16492 of bytes in the constant sequence used to initialize the array,
16493 including any embedded NULs as well as the terminating NUL (for
16494 strings), but not including any trailing zeros/NULs past
16495 the terminating one appended implicitly to a string literal to
16496 zero out the remainder of the array it's stored in. For example,
16498 const char a[7] = "abc\0d";
16499 n = strlen (a + 1);
16500 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16501 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16502 is equal to strlen (A) + 1. */
16503 const unsigned HOST_WIDE_INT array_size
= tree_to_uhwi (mem_size
);
16504 unsigned HOST_WIDE_INT init_bytes
= TREE_STRING_LENGTH (src
);
16505 const char *string
= TREE_STRING_POINTER (src
);
16507 /* Ideally this would turn into a gcc_checking_assert over time. */
16508 if (init_bytes
> array_size
)
16509 init_bytes
= array_size
;
16511 if (init_bytes
== 0 || offset
>= array_size
)
16516 /* Compute and store the number of characters from the beginning
16517 of the substring at OFFSET to the end, including the terminating
16518 nul. Offsets past the initial length refer to null strings. */
16519 if (offset
< init_bytes
)
16520 *strsize
= init_bytes
- offset
;
16526 tree eltype
= TREE_TYPE (TREE_TYPE (src
));
16527 /* Support only properly NUL-terminated single byte strings. */
16528 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype
)) != 1)
16530 if (string
[init_bytes
- 1] != '\0')
16534 return offset
< init_bytes
? string
+ offset
: "";
16537 /* Return a pointer to a NUL-terminated string corresponding to
16538 the expression STR referencing a constant string, possibly
16539 involving a constant offset. Return null if STR either doesn't
16540 reference a constant string or if it involves a nonconstant
16544 c_getstr (tree str
)
16546 return getbyterep (str
, NULL
);
16549 /* Given a tree T, compute which bits in T may be nonzero. */
16552 tree_nonzero_bits (const_tree t
)
16554 switch (TREE_CODE (t
))
16557 return wi::to_wide (t
);
16559 return get_nonzero_bits (t
);
16560 case NON_LVALUE_EXPR
:
16562 return tree_nonzero_bits (TREE_OPERAND (t
, 0));
16564 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16565 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
16568 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16569 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
16571 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 1)),
16572 tree_nonzero_bits (TREE_OPERAND (t
, 2)));
16574 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16575 TYPE_PRECISION (TREE_TYPE (t
)),
16576 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t
, 0))));
16578 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
16580 wide_int nzbits1
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16581 wide_int nzbits2
= tree_nonzero_bits (TREE_OPERAND (t
, 1));
16582 if (wi::bit_and (nzbits1
, nzbits2
) == 0)
16583 return wi::bit_or (nzbits1
, nzbits2
);
16587 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
16589 tree type
= TREE_TYPE (t
);
16590 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16591 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
16592 TYPE_PRECISION (type
));
16593 return wi::neg_p (arg1
)
16594 ? wi::rshift (nzbits
, -arg1
, TYPE_SIGN (type
))
16595 : wi::lshift (nzbits
, arg1
);
16599 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
16601 tree type
= TREE_TYPE (t
);
16602 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16603 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
16604 TYPE_PRECISION (type
));
16605 return wi::neg_p (arg1
)
16606 ? wi::lshift (nzbits
, -arg1
)
16607 : wi::rshift (nzbits
, arg1
, TYPE_SIGN (type
));
16614 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t
)));
16617 /* Helper function for address compare simplifications in match.pd.
16618 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16619 TYPE is the type of comparison operands.
16620 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16621 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16622 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16623 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16624 and 2 if unknown. */
16627 address_compare (tree_code code
, tree type
, tree op0
, tree op1
,
16628 tree
&base0
, tree
&base1
, poly_int64
&off0
, poly_int64
&off1
,
16631 gcc_checking_assert (TREE_CODE (op0
) == ADDR_EXPR
);
16632 gcc_checking_assert (TREE_CODE (op1
) == ADDR_EXPR
);
16633 base0
= get_addr_base_and_unit_offset (TREE_OPERAND (op0
, 0), &off0
);
16634 base1
= get_addr_base_and_unit_offset (TREE_OPERAND (op1
, 0), &off1
);
16635 if (base0
&& TREE_CODE (base0
) == MEM_REF
)
16637 off0
+= mem_ref_offset (base0
).force_shwi ();
16638 base0
= TREE_OPERAND (base0
, 0);
16640 if (base1
&& TREE_CODE (base1
) == MEM_REF
)
16642 off1
+= mem_ref_offset (base1
).force_shwi ();
16643 base1
= TREE_OPERAND (base1
, 0);
16645 if (base0
== NULL_TREE
|| base1
== NULL_TREE
)
16649 /* Punt in GENERIC on variables with value expressions;
16650 the value expressions might point to fields/elements
16651 of other vars etc. */
16653 && ((VAR_P (base0
) && DECL_HAS_VALUE_EXPR_P (base0
))
16654 || (VAR_P (base1
) && DECL_HAS_VALUE_EXPR_P (base1
))))
16656 else if (decl_in_symtab_p (base0
) && decl_in_symtab_p (base1
))
16658 symtab_node
*node0
= symtab_node::get_create (base0
);
16659 symtab_node
*node1
= symtab_node::get_create (base1
);
16660 equal
= node0
->equal_address_to (node1
);
16662 else if ((DECL_P (base0
)
16663 || TREE_CODE (base0
) == SSA_NAME
16664 || TREE_CODE (base0
) == STRING_CST
)
16666 || TREE_CODE (base1
) == SSA_NAME
16667 || TREE_CODE (base1
) == STRING_CST
))
16668 equal
= (base0
== base1
);
16669 /* Assume different STRING_CSTs with the same content will be
16672 && TREE_CODE (base0
) == STRING_CST
16673 && TREE_CODE (base1
) == STRING_CST
16674 && TREE_STRING_LENGTH (base0
) == TREE_STRING_LENGTH (base1
)
16675 && memcmp (TREE_STRING_POINTER (base0
), TREE_STRING_POINTER (base1
),
16676 TREE_STRING_LENGTH (base0
)) == 0)
16680 if (code
== EQ_EXPR
16682 /* If the offsets are equal we can ignore overflow. */
16683 || known_eq (off0
, off1
)
16684 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
16685 /* Or if we compare using pointers to decls or strings. */
16686 || (POINTER_TYPE_P (type
)
16687 && (DECL_P (base0
) || TREE_CODE (base0
) == STRING_CST
)))
16693 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
16696 /* At this point we know (or assume) the two pointers point at
16697 different objects. */
16698 HOST_WIDE_INT ioff0
= -1, ioff1
= -1;
16699 off0
.is_constant (&ioff0
);
16700 off1
.is_constant (&ioff1
);
16701 /* Punt on non-zero offsets from functions. */
16702 if ((TREE_CODE (base0
) == FUNCTION_DECL
&& ioff0
)
16703 || (TREE_CODE (base1
) == FUNCTION_DECL
&& ioff1
))
16705 /* Or if the bases are neither decls nor string literals. */
16706 if (!DECL_P (base0
) && TREE_CODE (base0
) != STRING_CST
)
16708 if (!DECL_P (base1
) && TREE_CODE (base1
) != STRING_CST
)
16710 /* For initializers, assume addresses of different functions are
16712 if (folding_initializer
16713 && TREE_CODE (base0
) == FUNCTION_DECL
16714 && TREE_CODE (base1
) == FUNCTION_DECL
)
16717 /* Compute whether one address points to the start of one
16718 object and another one to the end of another one. */
16719 poly_int64 size0
= 0, size1
= 0;
16720 if (TREE_CODE (base0
) == STRING_CST
)
16722 if (ioff0
< 0 || ioff0
> TREE_STRING_LENGTH (base0
))
16725 size0
= TREE_STRING_LENGTH (base0
);
16727 else if (TREE_CODE (base0
) == FUNCTION_DECL
)
16731 tree sz0
= DECL_SIZE_UNIT (base0
);
16732 if (!tree_fits_poly_int64_p (sz0
))
16735 size0
= tree_to_poly_int64 (sz0
);
16737 if (TREE_CODE (base1
) == STRING_CST
)
16739 if (ioff1
< 0 || ioff1
> TREE_STRING_LENGTH (base1
))
16742 size1
= TREE_STRING_LENGTH (base1
);
16744 else if (TREE_CODE (base1
) == FUNCTION_DECL
)
16748 tree sz1
= DECL_SIZE_UNIT (base1
);
16749 if (!tree_fits_poly_int64_p (sz1
))
16752 size1
= tree_to_poly_int64 (sz1
);
16756 /* If one offset is pointing (or could be) to the beginning of one
16757 object and the other is pointing to one past the last byte of the
16758 other object, punt. */
16759 if (maybe_eq (off0
, 0) && maybe_eq (off1
, size1
))
16761 else if (maybe_eq (off1
, 0) && maybe_eq (off0
, size0
))
16763 /* If both offsets are the same, there are some cases we know that are
16764 ok. Either if we know they aren't zero, or if we know both sizes
16767 && known_eq (off0
, off1
)
16768 && (known_ne (off0
, 0)
16769 || (known_ne (size0
, 0) && known_ne (size1
, 0))))
16773 /* At this point, equal is 2 if either one or both pointers are out of
16774 bounds of their object, or one points to start of its object and the
16775 other points to end of its object. This is unspecified behavior
16776 e.g. in C++. Otherwise equal is 0. */
16777 if (folding_cxx_constexpr
&& equal
)
16780 /* When both pointers point to string literals, even when equal is 0,
16781 due to tail merging of string literals the pointers might be the same. */
16782 if (TREE_CODE (base0
) == STRING_CST
&& TREE_CODE (base1
) == STRING_CST
)
16786 || ioff0
> TREE_STRING_LENGTH (base0
)
16787 || ioff1
> TREE_STRING_LENGTH (base1
))
16790 /* If the bytes in the string literals starting at the pointers
16791 differ, the pointers need to be different. */
16792 if (memcmp (TREE_STRING_POINTER (base0
) + ioff0
,
16793 TREE_STRING_POINTER (base1
) + ioff1
,
16794 MIN (TREE_STRING_LENGTH (base0
) - ioff0
,
16795 TREE_STRING_LENGTH (base1
) - ioff1
)) == 0)
16797 HOST_WIDE_INT ioffmin
= MIN (ioff0
, ioff1
);
16798 if (memcmp (TREE_STRING_POINTER (base0
) + ioff0
- ioffmin
,
16799 TREE_STRING_POINTER (base1
) + ioff1
- ioffmin
,
16801 /* If even the bytes in the string literal before the
16802 pointers are the same, the string literals could be
16809 if (folding_cxx_constexpr
)
16812 /* If this is a pointer comparison, ignore for now even
16813 valid equalities where one pointer is the offset zero
16814 of one object and the other to one past end of another one. */
16815 if (!INTEGRAL_TYPE_P (type
))
16818 /* Assume that string literals can't be adjacent to variables
16819 (automatic or global). */
16820 if (TREE_CODE (base0
) == STRING_CST
|| TREE_CODE (base1
) == STRING_CST
)
16823 /* Assume that automatic variables can't be adjacent to global
16825 if (is_global_var (base0
) != is_global_var (base1
))
16831 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16833 ctor_single_nonzero_element (const_tree t
)
16835 unsigned HOST_WIDE_INT idx
;
16836 constructor_elt
*ce
;
16837 tree elt
= NULL_TREE
;
16839 if (TREE_CODE (t
) != CONSTRUCTOR
)
16841 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t
), idx
, &ce
); idx
++)
16842 if (!integer_zerop (ce
->value
) && !real_zerop (ce
->value
))
16853 namespace selftest
{
16855 /* Helper functions for writing tests of folding trees. */
16857 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16860 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
16863 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
16866 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16867 wrapping WRAPPED_EXPR. */
16870 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
16873 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
16874 ASSERT_NE (wrapped_expr
, result
);
16875 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
16876 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
16879 /* Verify that various arithmetic binary operations are folded
16883 test_arithmetic_folding ()
16885 tree type
= integer_type_node
;
16886 tree x
= create_tmp_var_raw (type
, "x");
16887 tree zero
= build_zero_cst (type
);
16888 tree one
= build_int_cst (type
, 1);
16891 /* 1 <-- (0 + 1) */
16892 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
16894 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
16897 /* (nonlvalue)x <-- (x + 0) */
16898 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
16902 /* 0 <-- (x - x) */
16903 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
16905 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
16908 /* Multiplication. */
16909 /* 0 <-- (x * 0) */
16910 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
16913 /* (nonlvalue)x <-- (x * 1) */
16914 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
16918 /* Verify that various binary operations on vectors are folded
16922 test_vector_folding ()
16924 tree inner_type
= integer_type_node
;
16925 tree type
= build_vector_type (inner_type
, 4);
16926 tree zero
= build_zero_cst (type
);
16927 tree one
= build_one_cst (type
);
16928 tree index
= build_index_vector (type
, 0, 1);
16930 /* Verify equality tests that return a scalar boolean result. */
16931 tree res_type
= boolean_type_node
;
16932 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
16933 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
16934 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
16935 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
16936 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, index
, one
)));
16937 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
16939 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
,
16941 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
16945 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16948 test_vec_duplicate_folding ()
16950 scalar_int_mode int_mode
= SCALAR_INT_TYPE_MODE (ssizetype
);
16951 machine_mode vec_mode
= targetm
.vectorize
.preferred_simd_mode (int_mode
);
16952 /* This will be 1 if VEC_MODE isn't a vector mode. */
16953 poly_uint64 nunits
= GET_MODE_NUNITS (vec_mode
);
16955 tree type
= build_vector_type (ssizetype
, nunits
);
16956 tree dup5_expr
= fold_unary (VEC_DUPLICATE_EXPR
, type
, ssize_int (5));
16957 tree dup5_cst
= build_vector_from_val (type
, ssize_int (5));
16958 ASSERT_TRUE (operand_equal_p (dup5_expr
, dup5_cst
, 0));
16961 /* Run all of the selftests within this file. */
16964 fold_const_cc_tests ()
16966 test_arithmetic_folding ();
16967 test_vector_folding ();
16968 test_vec_duplicate_folding ();
16971 } // namespace selftest
16973 #endif /* CHECKING_P */