1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
79 #include "tree-ssanames.h"
81 #include "stringpool.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
88 int folding_initializer
= 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code
{
112 static bool negate_expr_p (tree
);
113 static tree
negate_expr (tree
);
114 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
115 static enum comparison_code
comparison_to_compcode (enum tree_code
);
116 static enum tree_code
compcode_to_comparison (enum comparison_code
);
117 static bool twoval_comparison_p (tree
, tree
*, tree
*);
118 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
119 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
121 static bool simple_operand_p (const_tree
);
122 static bool simple_operand_p_2 (tree
);
123 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
124 static tree
range_predecessor (tree
);
125 static tree
range_successor (tree
);
126 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
130 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
fold_binary_op_with_conditional_arg (location_t
,
132 enum tree_code
, tree
,
135 static tree
fold_negate_const (tree
, tree
);
136 static tree
fold_not_const (const_tree
, tree
);
137 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 static tree
fold_view_convert_expr (tree
, tree
);
140 static tree
fold_negate_expr (location_t
, tree
);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
147 expr_location_or (tree t
, location_t loc
)
149 location_t tloc
= EXPR_LOCATION (t
);
150 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
157 protected_set_expr_location_unshare (tree x
, location_t loc
)
159 if (CAN_HAVE_LOCATION_P (x
)
160 && EXPR_LOCATION (x
) != loc
161 && !(TREE_CODE (x
) == SAVE_EXPR
162 || TREE_CODE (x
) == TARGET_EXPR
163 || TREE_CODE (x
) == BIND_EXPR
))
166 SET_EXPR_LOCATION (x
, loc
);
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
176 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
180 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
182 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
196 static int fold_deferring_overflow_warnings
;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning
;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings
;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
229 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
234 gcc_assert (fold_deferring_overflow_warnings
> 0);
235 --fold_deferring_overflow_warnings
;
236 if (fold_deferring_overflow_warnings
> 0)
238 if (fold_deferred_overflow_warning
!= NULL
240 && code
< (int) fold_deferred_overflow_code
)
241 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
245 warnmsg
= fold_deferred_overflow_warning
;
246 fold_deferred_overflow_warning
= NULL
;
248 if (!issue
|| warnmsg
== NULL
)
251 if (gimple_no_warning_p (stmt
))
254 /* Use the smallest code level when deciding to issue the
256 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
257 code
= fold_deferred_overflow_code
;
259 if (!issue_strict_overflow_warning (code
))
263 locus
= input_location
;
265 locus
= gimple_location (stmt
);
266 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
269 /* Stop deferring overflow warnings, ignoring any deferred
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL
, 0);
278 /* Whether we are deferring overflow warnings. */
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings
> 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
290 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
292 if (fold_deferring_overflow_warnings
> 0)
294 if (fold_deferred_overflow_warning
== NULL
295 || wc
< fold_deferred_overflow_code
)
297 fold_deferred_overflow_warning
= gmsgid
;
298 fold_deferred_overflow_code
= wc
;
301 else if (issue_strict_overflow_warning (wc
))
302 warning (OPT_Wstrict_overflow
, gmsgid
);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
309 negate_mathfn_p (combined_fn fn
)
332 CASE_CFN_ROUNDEVEN_FN
:
344 return !flag_rounding_math
;
352 /* Check whether we may negate an integer constant T without causing
356 may_negate_without_overflow_p (const_tree t
)
360 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
362 type
= TREE_TYPE (t
);
363 if (TYPE_UNSIGNED (type
))
366 return !wi::only_sign_bit_p (wi::to_wide (t
));
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
373 negate_expr_p (tree t
)
380 type
= TREE_TYPE (t
);
383 switch (TREE_CODE (t
))
386 if (INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
))
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t
);
392 return (INTEGRAL_TYPE_P (type
)
393 && TYPE_OVERFLOW_WRAPS (type
));
399 return !TYPE_OVERFLOW_SANITIZED (type
);
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
407 return negate_expr_p (TREE_REALPART (t
))
408 && negate_expr_p (TREE_IMAGPART (t
));
412 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
415 /* Steps don't prevent negation. */
416 unsigned int count
= vector_cst_encoded_nelts (t
);
417 for (unsigned int i
= 0; i
< count
; ++i
)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t
, i
)))
425 return negate_expr_p (TREE_OPERAND (t
, 0))
426 && negate_expr_p (TREE_OPERAND (t
, 1));
429 return negate_expr_p (TREE_OPERAND (t
, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
433 || HONOR_SIGNED_ZEROS (element_mode (type
))
434 || (ANY_INTEGRAL_TYPE_P (type
)
435 && ! TYPE_OVERFLOW_WRAPS (type
)))
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t
, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t
, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
446 && !HONOR_SIGNED_ZEROS (element_mode (type
))
447 && (! ANY_INTEGRAL_TYPE_P (type
)
448 || TYPE_OVERFLOW_WRAPS (type
));
451 if (TYPE_UNSIGNED (type
))
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
457 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
459 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
462 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 1))))) != 1)))
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
469 return negate_expr_p (TREE_OPERAND (t
, 1))
470 || negate_expr_p (TREE_OPERAND (t
, 0));
476 if (TYPE_UNSIGNED (type
))
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t
, 0)))
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
488 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t
, 1))))
490 return negate_expr_p (TREE_OPERAND (t
, 1));
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type
) == REAL_TYPE
)
497 tree tem
= strip_float_extensions (t
);
499 return negate_expr_p (tem
);
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t
)))
506 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
513 tree op1
= TREE_OPERAND (t
, 1);
514 if (wi::to_wide (op1
) == TYPE_PRECISION (type
) - 1)
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
531 fold_negate_expr_1 (location_t loc
, tree t
)
533 tree type
= TREE_TYPE (t
);
536 switch (TREE_CODE (t
))
538 /* Convert - (~A) to A + 1. */
540 if (INTEGRAL_TYPE_P (type
))
541 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
542 build_one_cst (type
));
546 tem
= fold_negate_const (t
, type
);
547 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
548 || (ANY_INTEGRAL_TYPE_P (type
)
549 && !TYPE_OVERFLOW_TRAPS (type
)
550 && TYPE_OVERFLOW_WRAPS (type
))
551 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
558 tem
= fold_negate_const (t
, type
);
563 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
564 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
566 return build_complex (type
, rpart
, ipart
);
572 tree_vector_builder elts
;
573 elts
.new_unary_operation (type
, t
, true);
574 unsigned int count
= elts
.encoded_nelts ();
575 for (unsigned int i
= 0; i
< count
; ++i
)
577 tree elt
= fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
578 if (elt
== NULL_TREE
)
580 elts
.quick_push (elt
);
583 return elts
.build ();
587 if (negate_expr_p (t
))
588 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
589 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
594 if (negate_expr_p (t
))
595 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
596 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
600 if (!TYPE_OVERFLOW_SANITIZED (type
))
601 return TREE_OPERAND (t
, 0);
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
606 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t
, 1)))
611 tem
= negate_expr (TREE_OPERAND (t
, 1));
612 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
613 tem
, TREE_OPERAND (t
, 0));
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t
, 0)))
619 tem
= negate_expr (TREE_OPERAND (t
, 0));
620 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
621 tem
, TREE_OPERAND (t
, 1));
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
629 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
630 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
631 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
635 if (TYPE_UNSIGNED (type
))
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
643 tem
= TREE_OPERAND (t
, 1);
644 if (negate_expr_p (tem
))
645 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
646 TREE_OPERAND (t
, 0), negate_expr (tem
));
647 tem
= TREE_OPERAND (t
, 0);
648 if (negate_expr_p (tem
))
649 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
650 negate_expr (tem
), TREE_OPERAND (t
, 1));
657 if (TYPE_UNSIGNED (type
))
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t
, 0)))
663 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
664 negate_expr (TREE_OPERAND (t
, 0)),
665 TREE_OPERAND (t
, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
671 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t
, 1))))
673 && negate_expr_p (TREE_OPERAND (t
, 1)))
674 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
676 negate_expr (TREE_OPERAND (t
, 1)));
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type
) == REAL_TYPE
)
683 tem
= strip_float_extensions (t
);
684 if (tem
!= t
&& negate_expr_p (tem
))
685 return fold_convert_loc (loc
, type
, negate_expr (tem
));
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t
))
692 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
696 fndecl
= get_callee_fndecl (t
);
697 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
698 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
706 tree op1
= TREE_OPERAND (t
, 1);
707 if (wi::to_wide (op1
) == TYPE_PRECISION (type
) - 1)
709 tree ntype
= TYPE_UNSIGNED (type
)
710 ? signed_type_for (type
)
711 : unsigned_type_for (type
);
712 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
713 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
714 return fold_convert_loc (loc
, type
, temp
);
726 /* A wrapper for fold_negate_expr_1. */
729 fold_negate_expr (location_t loc
, tree t
)
731 tree type
= TREE_TYPE (t
);
733 tree tem
= fold_negate_expr_1 (loc
, t
);
734 if (tem
== NULL_TREE
)
736 return fold_convert_loc (loc
, type
, tem
);
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
752 loc
= EXPR_LOCATION (t
);
753 type
= TREE_TYPE (t
);
756 tem
= fold_negate_expr (loc
, t
);
758 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
759 return fold_convert_loc (loc
, type
, tem
);
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
786 split_tree (tree in
, tree type
, enum tree_code code
,
787 tree
*minus_varp
, tree
*conp
, tree
*minus_conp
,
788 tree
*litp
, tree
*minus_litp
, int negate_p
)
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in
);
800 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
801 || TREE_CODE (in
) == FIXED_CST
)
803 else if (TREE_CODE (in
) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == POINTER_PLUS_EXPR
)
811 || (code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
812 || (code
== MINUS_EXPR
813 && (TREE_CODE (in
) == PLUS_EXPR
814 || TREE_CODE (in
) == POINTER_PLUS_EXPR
)))))
816 tree op0
= TREE_OPERAND (in
, 0);
817 tree op1
= TREE_OPERAND (in
, 1);
818 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
819 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
823 || TREE_CODE (op0
) == FIXED_CST
)
824 *litp
= op0
, op0
= 0;
825 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
826 || TREE_CODE (op1
) == FIXED_CST
)
827 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
829 if (op0
!= 0 && TREE_CONSTANT (op0
))
830 *conp
= op0
, op0
= 0;
831 else if (op1
!= 0 && TREE_CONSTANT (op1
))
832 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0
!= 0 && op1
!= 0)
841 var
= op1
, neg_var_p
= neg1_p
;
843 /* Now do any needed negations. */
845 *minus_litp
= *litp
, *litp
= 0;
846 if (neg_conp_p
&& *conp
)
847 *minus_conp
= *conp
, *conp
= 0;
848 if (neg_var_p
&& var
)
849 *minus_varp
= var
, var
= 0;
851 else if (TREE_CONSTANT (in
))
853 else if (TREE_CODE (in
) == BIT_NOT_EXPR
854 && code
== PLUS_EXPR
)
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp
= build_minus_one_cst (type
);
859 *minus_varp
= TREE_OPERAND (in
, 0);
867 *minus_litp
= *litp
, *litp
= 0;
868 else if (*minus_litp
)
869 *litp
= *minus_litp
, *minus_litp
= 0;
871 *minus_conp
= *conp
, *conp
= 0;
872 else if (*minus_conp
)
873 *conp
= *minus_conp
, *minus_conp
= 0;
875 *minus_varp
= var
, var
= 0;
876 else if (*minus_varp
)
877 var
= *minus_varp
, *minus_varp
= 0;
881 && TREE_OVERFLOW_P (*litp
))
882 *litp
= drop_tree_overflow (*litp
);
884 && TREE_OVERFLOW_P (*minus_litp
))
885 *minus_litp
= drop_tree_overflow (*minus_litp
);
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
896 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
900 gcc_assert (t2
== 0 || code
!= MINUS_EXPR
);
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
910 || TREE_CODE (t1
) == PLUS_EXPR
|| TREE_CODE (t2
) == PLUS_EXPR
911 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
913 if (code
== PLUS_EXPR
)
915 if (TREE_CODE (t1
) == NEGATE_EXPR
)
916 return build2_loc (loc
, MINUS_EXPR
, type
,
917 fold_convert_loc (loc
, type
, t2
),
918 fold_convert_loc (loc
, type
,
919 TREE_OPERAND (t1
, 0)));
920 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
921 return build2_loc (loc
, MINUS_EXPR
, type
,
922 fold_convert_loc (loc
, type
, t1
),
923 fold_convert_loc (loc
, type
,
924 TREE_OPERAND (t2
, 0)));
925 else if (integer_zerop (t2
))
926 return fold_convert_loc (loc
, type
, t1
);
928 else if (code
== MINUS_EXPR
)
930 if (integer_zerop (t2
))
931 return fold_convert_loc (loc
, type
, t1
);
934 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
935 fold_convert_loc (loc
, type
, t2
));
938 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
939 fold_convert_loc (loc
, type
, t2
));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
946 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
948 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
950 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
965 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
966 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
967 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
975 wide_int_binop (wide_int
&res
,
976 enum tree_code code
, const wide_int
&arg1
, const wide_int
&arg2
,
977 signop sign
, wi::overflow_type
*overflow
)
980 *overflow
= wi::OVF_NONE
;
984 res
= wi::bit_or (arg1
, arg2
);
988 res
= wi::bit_xor (arg1
, arg2
);
992 res
= wi::bit_and (arg1
, arg2
);
997 if (wi::neg_p (arg2
))
1000 if (code
== RSHIFT_EXPR
)
1008 if (code
== RSHIFT_EXPR
)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res
= wi::rshift (arg1
, tmp
, sign
);
1014 res
= wi::lshift (arg1
, tmp
);
1019 if (wi::neg_p (arg2
))
1022 if (code
== RROTATE_EXPR
)
1023 code
= LROTATE_EXPR
;
1025 code
= RROTATE_EXPR
;
1030 if (code
== RROTATE_EXPR
)
1031 res
= wi::rrotate (arg1
, tmp
);
1033 res
= wi::lrotate (arg1
, tmp
);
1037 res
= wi::add (arg1
, arg2
, sign
, overflow
);
1041 res
= wi::sub (arg1
, arg2
, sign
, overflow
);
1045 res
= wi::mul (arg1
, arg2
, sign
, overflow
);
1048 case MULT_HIGHPART_EXPR
:
1049 res
= wi::mul_high (arg1
, arg2
, sign
);
1052 case TRUNC_DIV_EXPR
:
1053 case EXACT_DIV_EXPR
:
1056 res
= wi::div_trunc (arg1
, arg2
, sign
, overflow
);
1059 case FLOOR_DIV_EXPR
:
1062 res
= wi::div_floor (arg1
, arg2
, sign
, overflow
);
1068 res
= wi::div_ceil (arg1
, arg2
, sign
, overflow
);
1071 case ROUND_DIV_EXPR
:
1074 res
= wi::div_round (arg1
, arg2
, sign
, overflow
);
1077 case TRUNC_MOD_EXPR
:
1080 res
= wi::mod_trunc (arg1
, arg2
, sign
, overflow
);
1083 case FLOOR_MOD_EXPR
:
1086 res
= wi::mod_floor (arg1
, arg2
, sign
, overflow
);
1092 res
= wi::mod_ceil (arg1
, arg2
, sign
, overflow
);
1095 case ROUND_MOD_EXPR
:
1098 res
= wi::mod_round (arg1
, arg2
, sign
, overflow
);
1102 res
= wi::min (arg1
, arg2
, sign
);
1106 res
= wi::max (arg1
, arg2
, sign
);
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1120 poly_int_binop (poly_wide_int
&res
, enum tree_code code
,
1121 const_tree arg1
, const_tree arg2
,
1122 signop sign
, wi::overflow_type
*overflow
)
1124 gcc_assert (NUM_POLY_INT_COEFFS
!= 1);
1125 gcc_assert (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
));
1129 res
= wi::add (wi::to_poly_wide (arg1
),
1130 wi::to_poly_wide (arg2
), sign
, overflow
);
1134 res
= wi::sub (wi::to_poly_wide (arg1
),
1135 wi::to_poly_wide (arg2
), sign
, overflow
);
1139 if (TREE_CODE (arg2
) == INTEGER_CST
)
1140 res
= wi::mul (wi::to_poly_wide (arg1
),
1141 wi::to_wide (arg2
), sign
, overflow
);
1142 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1143 res
= wi::mul (wi::to_poly_wide (arg2
),
1144 wi::to_wide (arg1
), sign
, overflow
);
1150 if (TREE_CODE (arg2
) == INTEGER_CST
)
1151 res
= wi::to_poly_wide (arg1
) << wi::to_wide (arg2
);
1157 if (TREE_CODE (arg2
) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1
), wi::to_wide (arg2
),
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1174 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
,
1177 poly_wide_int poly_res
;
1178 tree type
= TREE_TYPE (arg1
);
1179 signop sign
= TYPE_SIGN (type
);
1180 wi::overflow_type overflow
= wi::OVF_NONE
;
1182 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1184 wide_int warg1
= wi::to_wide (arg1
), res
;
1185 wide_int warg2
= wi::to_wide (arg2
, TYPE_PRECISION (type
));
1186 if (!wide_int_binop (res
, code
, warg1
, warg2
, sign
, &overflow
))
1190 else if (!poly_int_tree_p (arg1
)
1191 || !poly_int_tree_p (arg2
)
1192 || !poly_int_binop (poly_res
, code
, arg1
, arg2
, sign
, &overflow
))
1194 return force_fit_type (type
, poly_res
, overflowable
,
1195 (((sign
== SIGNED
|| overflowable
== -1)
1197 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
)));
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1204 distributes_over_addition_p (tree_code op
, int opno
)
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1227 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1229 /* Sanity check for the recursive cases. */
1236 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1238 if (code
== POINTER_PLUS_EXPR
)
1239 return int_const_binop (PLUS_EXPR
,
1240 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1242 return int_const_binop (code
, arg1
, arg2
);
1245 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1250 REAL_VALUE_TYPE value
;
1251 REAL_VALUE_TYPE result
;
1255 /* The following codes are handled by real_arithmetic. */
1270 d1
= TREE_REAL_CST (arg1
);
1271 d2
= TREE_REAL_CST (arg2
);
1273 type
= TREE_TYPE (arg1
);
1274 mode
= TYPE_MODE (type
);
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode
)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code
== RDIV_EXPR
1286 && real_equal (&d2
, &dconst0
)
1287 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1
))
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 t
= build_real (type
, d1
);
1300 else if (REAL_VALUE_ISNAN (d2
))
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1305 t
= build_real (type
, d2
);
1309 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1310 real_convert (&result
, mode
, &value
);
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode
)
1316 && REAL_VALUE_ISINF (result
)
1317 && !REAL_VALUE_ISINF (d1
)
1318 && !REAL_VALUE_ISINF (d2
))
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1327 && (inexact
|| !real_identical (&result
, &value
)))
1330 t
= build_real (type
, result
);
1332 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1336 if (TREE_CODE (arg1
) == FIXED_CST
)
1338 FIXED_VALUE_TYPE f1
;
1339 FIXED_VALUE_TYPE f2
;
1340 FIXED_VALUE_TYPE result
;
1345 /* The following codes are handled by fixed_arithmetic. */
1351 case TRUNC_DIV_EXPR
:
1352 if (TREE_CODE (arg2
) != FIXED_CST
)
1354 f2
= TREE_FIXED_CST (arg2
);
1360 if (TREE_CODE (arg2
) != INTEGER_CST
)
1362 wi::tree_to_wide_ref w2
= wi::to_wide (arg2
);
1363 f2
.data
.high
= w2
.elt (1);
1364 f2
.data
.low
= w2
.ulow ();
1373 f1
= TREE_FIXED_CST (arg1
);
1374 type
= TREE_TYPE (arg1
);
1375 sat_p
= TYPE_SATURATING (type
);
1376 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1377 t
= build_fixed (type
, result
);
1378 /* Propagate overflow flags. */
1379 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1380 TREE_OVERFLOW (t
) = 1;
1384 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1386 tree type
= TREE_TYPE (arg1
);
1387 tree r1
= TREE_REALPART (arg1
);
1388 tree i1
= TREE_IMAGPART (arg1
);
1389 tree r2
= TREE_REALPART (arg2
);
1390 tree i2
= TREE_IMAGPART (arg2
);
1397 real
= const_binop (code
, r1
, r2
);
1398 imag
= const_binop (code
, i1
, i2
);
1402 if (COMPLEX_FLOAT_TYPE_P (type
))
1403 return do_mpc_arg2 (arg1
, arg2
, type
,
1404 /* do_nonfinite= */ folding_initializer
,
1407 real
= const_binop (MINUS_EXPR
,
1408 const_binop (MULT_EXPR
, r1
, r2
),
1409 const_binop (MULT_EXPR
, i1
, i2
));
1410 imag
= const_binop (PLUS_EXPR
,
1411 const_binop (MULT_EXPR
, r1
, i2
),
1412 const_binop (MULT_EXPR
, i1
, r2
));
1416 if (COMPLEX_FLOAT_TYPE_P (type
))
1417 return do_mpc_arg2 (arg1
, arg2
, type
,
1418 /* do_nonfinite= */ folding_initializer
,
1421 case TRUNC_DIV_EXPR
:
1423 case FLOOR_DIV_EXPR
:
1424 case ROUND_DIV_EXPR
:
1425 if (flag_complex_method
== 0)
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1435 = const_binop (PLUS_EXPR
,
1436 const_binop (MULT_EXPR
, r2
, r2
),
1437 const_binop (MULT_EXPR
, i2
, i2
));
1439 = const_binop (PLUS_EXPR
,
1440 const_binop (MULT_EXPR
, r1
, r2
),
1441 const_binop (MULT_EXPR
, i1
, i2
));
1443 = const_binop (MINUS_EXPR
,
1444 const_binop (MULT_EXPR
, i1
, r2
),
1445 const_binop (MULT_EXPR
, r1
, i2
));
1447 real
= const_binop (code
, t1
, magsquared
);
1448 imag
= const_binop (code
, t2
, magsquared
);
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1458 fold_abs_const (r2
, TREE_TYPE (type
)),
1459 fold_abs_const (i2
, TREE_TYPE (type
)));
1461 if (integer_nonzerop (compare
))
1463 /* In the TRUE branch, we compute
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1470 tree ratio
= const_binop (code
, r2
, i2
);
1471 tree div
= const_binop (PLUS_EXPR
, i2
,
1472 const_binop (MULT_EXPR
, r2
, ratio
));
1473 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1474 real
= const_binop (PLUS_EXPR
, real
, i1
);
1475 real
= const_binop (code
, real
, div
);
1477 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1478 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1479 imag
= const_binop (code
, imag
, div
);
1483 /* In the FALSE branch, we compute
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1490 tree ratio
= const_binop (code
, i2
, r2
);
1491 tree div
= const_binop (PLUS_EXPR
, r2
,
1492 const_binop (MULT_EXPR
, i2
, ratio
));
1494 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1495 real
= const_binop (PLUS_EXPR
, real
, r1
);
1496 real
= const_binop (code
, real
, div
);
1498 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1499 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1500 imag
= const_binop (code
, imag
, div
);
1510 return build_complex (type
, real
, imag
);
1513 if (TREE_CODE (arg1
) == VECTOR_CST
1514 && TREE_CODE (arg2
) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
))))
1518 tree type
= TREE_TYPE (arg1
);
1520 if (VECTOR_CST_STEPPED_P (arg1
)
1521 && VECTOR_CST_STEPPED_P (arg2
))
1522 /* We can operate directly on the encoding if:
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p
= (code
== PLUS_EXPR
|| code
== MINUS_EXPR
);
1531 else if (VECTOR_CST_STEPPED_P (arg1
))
1532 /* We can operate directly on stepped encodings if:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p
= distributes_over_addition_p (code
, 1);
1541 /* Similarly in reverse. */
1542 step_ok_p
= distributes_over_addition_p (code
, 2);
1543 tree_vector_builder elts
;
1544 if (!elts
.new_binary_operation (type
, arg1
, arg2
, step_ok_p
))
1546 unsigned int count
= elts
.encoded_nelts ();
1547 for (unsigned int i
= 0; i
< count
; ++i
)
1549 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1550 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1552 tree elt
= const_binop (code
, elem1
, elem2
);
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt
== NULL_TREE
)
1558 elts
.quick_push (elt
);
1561 return elts
.build ();
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1
) == VECTOR_CST
1566 && TREE_CODE (arg2
) == INTEGER_CST
)
1568 tree type
= TREE_TYPE (arg1
);
1569 bool step_ok_p
= distributes_over_addition_p (code
, 1);
1570 tree_vector_builder elts
;
1571 if (!elts
.new_unary_operation (type
, arg1
, step_ok_p
))
1573 unsigned int count
= elts
.encoded_nelts ();
1574 for (unsigned int i
= 0; i
< count
; ++i
)
1576 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1578 tree elt
= const_binop (code
, elem1
, arg2
);
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt
== NULL_TREE
)
1584 elts
.quick_push (elt
);
1587 return elts
.build ();
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1596 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1598 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1599 return fold_relational_const (code
, type
, arg1
, arg2
);
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1605 case VEC_SERIES_EXPR
:
1606 if (CONSTANT_CLASS_P (arg1
)
1607 && CONSTANT_CLASS_P (arg2
))
1608 return build_vec_series (type
, arg1
, arg2
);
1612 if ((TREE_CODE (arg1
) == REAL_CST
1613 && TREE_CODE (arg2
) == REAL_CST
)
1614 || (TREE_CODE (arg1
) == INTEGER_CST
1615 && TREE_CODE (arg2
) == INTEGER_CST
))
1616 return build_complex (type
, arg1
, arg2
);
1619 case POINTER_DIFF_EXPR
:
1620 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1622 poly_offset_int res
= (wi::to_poly_offset (arg1
)
1623 - wi::to_poly_offset (arg2
));
1624 return force_fit_type (type
, res
, 1,
1625 TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1629 case VEC_PACK_TRUNC_EXPR
:
1630 case VEC_PACK_FIX_TRUNC_EXPR
:
1631 case VEC_PACK_FLOAT_EXPR
:
1633 unsigned int HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1635 if (TREE_CODE (arg1
) != VECTOR_CST
1636 || TREE_CODE (arg2
) != VECTOR_CST
)
1639 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1642 out_nelts
= in_nelts
* 2;
1643 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1644 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1646 tree_vector_builder
elts (type
, out_nelts
, 1);
1647 for (i
= 0; i
< out_nelts
; i
++)
1649 tree elt
= (i
< in_nelts
1650 ? VECTOR_CST_ELT (arg1
, i
)
1651 : VECTOR_CST_ELT (arg2
, i
- in_nelts
));
1652 elt
= fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1654 : code
== VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR
: FIX_TRUNC_EXPR
,
1656 TREE_TYPE (type
), elt
);
1657 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1659 elts
.quick_push (elt
);
1662 return elts
.build ();
1665 case VEC_WIDEN_MULT_LO_EXPR
:
1666 case VEC_WIDEN_MULT_HI_EXPR
:
1667 case VEC_WIDEN_MULT_EVEN_EXPR
:
1668 case VEC_WIDEN_MULT_ODD_EXPR
:
1670 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, out
, ofs
, scale
;
1672 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1675 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1677 out_nelts
= in_nelts
/ 2;
1678 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1679 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1681 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1682 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? out_nelts
: 0;
1683 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1684 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : out_nelts
;
1685 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1690 tree_vector_builder
elts (type
, out_nelts
, 1);
1691 for (out
= 0; out
< out_nelts
; out
++)
1693 unsigned int in
= (out
<< scale
) + ofs
;
1694 tree t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1695 VECTOR_CST_ELT (arg1
, in
));
1696 tree t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1697 VECTOR_CST_ELT (arg2
, in
));
1699 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1701 tree elt
= const_binop (MULT_EXPR
, t1
, t2
);
1702 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1704 elts
.quick_push (elt
);
1707 return elts
.build ();
1713 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type
)
1718 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1720 return const_binop (code
, arg1
, arg2
);
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1727 const_unop (enum tree_code code
, tree type
, tree arg0
)
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0
) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1734 && code
!= NEGATE_EXPR
1736 && code
!= ABSU_EXPR
)
1743 case FIX_TRUNC_EXPR
:
1744 case FIXED_CONVERT_EXPR
:
1745 return fold_convert_const (code
, type
, arg0
);
1747 case ADDR_SPACE_CONVERT_EXPR
:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0
)
1751 && !(targetm
.addr_space
.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1753 return fold_convert_const (code
, type
, arg0
);
1756 case VIEW_CONVERT_EXPR
:
1757 return fold_view_convert_expr (type
, arg0
);
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1764 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1765 if (tem
&& CONSTANT_CLASS_P (tem
))
1772 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1773 return fold_abs_const (arg0
, type
);
1777 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1779 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1781 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1786 if (TREE_CODE (arg0
) == INTEGER_CST
)
1787 return fold_not_const (arg0
, type
);
1788 else if (POLY_INT_CST_P (arg0
))
1789 return wide_int_to_tree (type
, -poly_int_cst_value (arg0
));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements
;
1797 elements
.new_unary_operation (type
, arg0
, true);
1798 unsigned int i
, count
= elements
.encoded_nelts ();
1799 for (i
= 0; i
< count
; ++i
)
1801 elem
= VECTOR_CST_ELT (arg0
, i
);
1802 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1803 if (elem
== NULL_TREE
)
1805 elements
.quick_push (elem
);
1808 return elements
.build ();
1812 case TRUTH_NOT_EXPR
:
1813 if (TREE_CODE (arg0
) == INTEGER_CST
)
1814 return constant_boolean_node (integer_zerop (arg0
), type
);
1818 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1819 return fold_convert (type
, TREE_REALPART (arg0
));
1823 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1824 return fold_convert (type
, TREE_IMAGPART (arg0
));
1827 case VEC_UNPACK_LO_EXPR
:
1828 case VEC_UNPACK_HI_EXPR
:
1829 case VEC_UNPACK_FLOAT_LO_EXPR
:
1830 case VEC_UNPACK_FLOAT_HI_EXPR
:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
1834 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1835 enum tree_code subcode
;
1837 if (TREE_CODE (arg0
) != VECTOR_CST
)
1840 if (!VECTOR_CST_NELTS (arg0
).is_constant (&in_nelts
))
1842 out_nelts
= in_nelts
/ 2;
1843 gcc_assert (known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1845 unsigned int offset
= 0;
1846 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1847 || code
== VEC_UNPACK_FLOAT_LO_EXPR
1848 || code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
))
1851 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1853 else if (code
== VEC_UNPACK_FLOAT_LO_EXPR
1854 || code
== VEC_UNPACK_FLOAT_HI_EXPR
)
1855 subcode
= FLOAT_EXPR
;
1857 subcode
= FIX_TRUNC_EXPR
;
1859 tree_vector_builder
elts (type
, out_nelts
, 1);
1860 for (i
= 0; i
< out_nelts
; i
++)
1862 tree elt
= fold_convert_const (subcode
, TREE_TYPE (type
),
1863 VECTOR_CST_ELT (arg0
, i
+ offset
));
1864 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1866 elts
.quick_push (elt
);
1869 return elts
.build ();
1872 case VEC_DUPLICATE_EXPR
:
1873 if (CONSTANT_CLASS_P (arg0
))
1874 return build_vector_from_val (type
, arg0
);
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1888 size_int_kind (poly_int64 number
, enum size_type_kind kind
)
1890 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1899 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1901 tree type
= TREE_TYPE (arg0
);
1903 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1904 return error_mark_node
;
1906 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0
) && poly_int_tree_p (arg1
))
1912 /* And some specific cases even faster than that. */
1913 if (code
== PLUS_EXPR
)
1915 if (integer_zerop (arg0
)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1918 if (integer_zerop (arg1
)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1922 else if (code
== MINUS_EXPR
)
1924 if (integer_zerop (arg1
)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1928 else if (code
== MULT_EXPR
)
1930 if (integer_onep (arg0
)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res
= int_const_binop (code
, arg0
, arg1
, -1);
1939 if (res
!= NULL_TREE
)
1943 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1951 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1953 tree type
= TREE_TYPE (arg0
);
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type
))
1961 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1963 if (type
== sizetype
)
1965 else if (type
== bitsizetype
)
1966 ctype
= sbitsizetype
;
1968 ctype
= signed_type_for (type
);
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1974 return size_binop_loc (loc
, MINUS_EXPR
,
1975 fold_convert_loc (loc
, ctype
, arg0
),
1976 fold_convert_loc (loc
, ctype
, arg1
));
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0
, arg1
))
1983 return build_int_cst (ctype
, 0);
1984 else if (tree_int_cst_lt (arg1
, arg0
))
1985 return fold_convert_loc (loc
, ctype
,
1986 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1988 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1989 fold_convert_loc (loc
, ctype
,
1990 size_binop_loc (loc
,
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1999 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type
, wi::to_widest (arg1
),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
2006 TREE_OVERFLOW (arg1
));
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2013 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2015 bool overflow
= false;
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2029 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2033 case FIX_TRUNC_EXPR
:
2034 real_trunc (&r
, VOIDmode
, &x
);
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r
))
2045 val
= wi::zero (TYPE_PRECISION (type
));
2048 /* See if R is less than the lower bound or greater than the
2053 tree lt
= TYPE_MIN_VALUE (type
);
2054 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2055 if (real_less (&r
, &l
))
2058 val
= wi::to_wide (lt
);
2064 tree ut
= TYPE_MAX_VALUE (type
);
2067 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2068 if (real_less (&u
, &r
))
2071 val
= wi::to_wide (ut
);
2077 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
2079 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2087 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2090 double_int temp
, temp_trunc
;
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp
= TREE_FIXED_CST (arg1
).data
;
2095 mode
= TREE_FIXED_CST (arg1
).mode
;
2096 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
2098 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
2099 HOST_BITS_PER_DOUBLE_INT
,
2100 SIGNED_FIXED_POINT_MODE_P (mode
));
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
2104 HOST_BITS_PER_DOUBLE_INT
,
2105 SIGNED_FIXED_POINT_MODE_P (mode
));
2109 temp
= double_int_zero
;
2110 temp_trunc
= double_int_zero
;
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode
)
2116 && temp_trunc
.is_negative ()
2117 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
2118 temp
+= double_int_one
;
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t
= force_fit_type (type
, temp
, -1,
2123 (temp
.is_negative ()
2124 && (TYPE_UNSIGNED (type
)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2126 | TREE_OVERFLOW (arg1
));
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2135 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2137 REAL_VALUE_TYPE value
;
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
2146 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2147 t
= build_real (type
, value
);
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2155 TREE_OVERFLOW (t
) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2157 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2158 TREE_OVERFLOW (t
) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2162 && REAL_VALUE_ISINF (value
)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2164 TREE_OVERFLOW (t
) = 1;
2166 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2174 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2176 REAL_VALUE_TYPE value
;
2179 real_convert_from_fixed (&value
, SCALAR_FLOAT_TYPE_MODE (type
),
2180 &TREE_FIXED_CST (arg1
));
2181 t
= build_real (type
, value
);
2183 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2191 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2193 FIXED_VALUE_TYPE value
;
2197 overflow_p
= fixed_convert (&value
, SCALAR_TYPE_MODE (type
),
2198 &TREE_FIXED_CST (arg1
), TYPE_SATURATING (type
));
2199 t
= build_fixed (type
, value
);
2201 /* Propagate overflow flags. */
2202 if (overflow_p
| TREE_OVERFLOW (arg1
))
2203 TREE_OVERFLOW (t
) = 1;
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2211 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2213 FIXED_VALUE_TYPE value
;
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2220 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2221 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2222 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2224 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2226 overflow_p
= fixed_convert_from_int (&value
, SCALAR_TYPE_MODE (type
), di
,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2228 TYPE_SATURATING (type
));
2229 t
= build_fixed (type
, value
);
2231 /* Propagate overflow flags. */
2232 if (overflow_p
| TREE_OVERFLOW (arg1
))
2233 TREE_OVERFLOW (t
) = 1;
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2241 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2243 FIXED_VALUE_TYPE value
;
2247 overflow_p
= fixed_convert_from_real (&value
, SCALAR_TYPE_MODE (type
),
2248 &TREE_REAL_CST (arg1
),
2249 TYPE_SATURATING (type
));
2250 t
= build_fixed (type
, value
);
2252 /* Propagate overflow flags. */
2253 if (overflow_p
| TREE_OVERFLOW (arg1
))
2254 TREE_OVERFLOW (t
) = 1;
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2262 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2264 tree arg_type
= TREE_TYPE (arg1
);
2265 if (arg_type
== type
)
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1
)
2271 && (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2272 && TYPE_PRECISION (type
) <= TYPE_PRECISION (arg_type
))
2273 return build_poly_int_cst (type
,
2274 poly_wide_int::from (poly_int_cst_value (arg1
),
2275 TYPE_PRECISION (type
),
2276 TYPE_SIGN (arg_type
)));
2278 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2279 || TREE_CODE (type
) == OFFSET_TYPE
)
2281 if (TREE_CODE (arg1
) == INTEGER_CST
)
2282 return fold_convert_const_int_from_int (type
, arg1
);
2283 else if (TREE_CODE (arg1
) == REAL_CST
)
2284 return fold_convert_const_int_from_real (code
, type
, arg1
);
2285 else if (TREE_CODE (arg1
) == FIXED_CST
)
2286 return fold_convert_const_int_from_fixed (type
, arg1
);
2288 else if (TREE_CODE (type
) == REAL_TYPE
)
2290 if (TREE_CODE (arg1
) == INTEGER_CST
)
2291 return build_real_from_int_cst (type
, arg1
);
2292 else if (TREE_CODE (arg1
) == REAL_CST
)
2293 return fold_convert_const_real_from_real (type
, arg1
);
2294 else if (TREE_CODE (arg1
) == FIXED_CST
)
2295 return fold_convert_const_real_from_fixed (type
, arg1
);
2297 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2299 if (TREE_CODE (arg1
) == FIXED_CST
)
2300 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2301 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2302 return fold_convert_const_fixed_from_int (type
, arg1
);
2303 else if (TREE_CODE (arg1
) == REAL_CST
)
2304 return fold_convert_const_fixed_from_real (type
, arg1
);
2306 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2308 if (TREE_CODE (arg1
) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type
), VECTOR_CST_NELTS (arg1
)))
2311 tree elttype
= TREE_TYPE (type
);
2312 tree arg1_elttype
= TREE_TYPE (TREE_TYPE (arg1
));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2316 = (INTEGRAL_TYPE_P (elttype
)
2317 && INTEGRAL_TYPE_P (arg1_elttype
)
2318 && TYPE_PRECISION (elttype
) <= TYPE_PRECISION (arg1_elttype
));
2319 tree_vector_builder v
;
2320 if (!v
.new_unary_operation (type
, arg1
, step_ok_p
))
2322 unsigned int len
= v
.encoded_nelts ();
2323 for (unsigned int i
= 0; i
< len
; ++i
)
2325 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2326 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2327 if (cvt
== NULL_TREE
)
2337 /* Construct a vector of zero elements of vector type TYPE. */
2340 build_zero_vector (tree type
)
2344 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2345 return build_vector_from_val (type
, t
);
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2351 fold_convertible_p (const_tree type
, const_tree arg
)
2353 tree orig
= TREE_TYPE (arg
);
2358 if (TREE_CODE (arg
) == ERROR_MARK
2359 || TREE_CODE (type
) == ERROR_MARK
2360 || TREE_CODE (orig
) == ERROR_MARK
)
2363 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2366 switch (TREE_CODE (type
))
2368 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2369 case POINTER_TYPE
: case REFERENCE_TYPE
:
2371 return (INTEGRAL_TYPE_P (orig
)
2372 || (POINTER_TYPE_P (orig
)
2373 && TYPE_PRECISION (type
) <= TYPE_PRECISION (orig
))
2374 || TREE_CODE (orig
) == OFFSET_TYPE
);
2377 case FIXED_POINT_TYPE
:
2379 return TREE_CODE (type
) == TREE_CODE (orig
);
2382 return (VECTOR_TYPE_P (orig
)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
2384 TYPE_VECTOR_SUBPARTS (orig
))
2385 && fold_convertible_p (TREE_TYPE (type
), TREE_TYPE (orig
)));
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2396 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2398 tree orig
= TREE_TYPE (arg
);
2404 if (TREE_CODE (arg
) == ERROR_MARK
2405 || TREE_CODE (type
) == ERROR_MARK
2406 || TREE_CODE (orig
) == ERROR_MARK
)
2407 return error_mark_node
;
2409 switch (TREE_CODE (type
))
2412 case REFERENCE_TYPE
:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig
)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2417 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2420 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2422 if (TREE_CODE (arg
) == INTEGER_CST
)
2424 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2425 if (tem
!= NULL_TREE
)
2428 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2429 || TREE_CODE (orig
) == OFFSET_TYPE
)
2430 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2431 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2432 return fold_convert_loc (loc
, type
,
2433 fold_build1_loc (loc
, REALPART_EXPR
,
2434 TREE_TYPE (orig
), arg
));
2435 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2437 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2440 if (TREE_CODE (arg
) == INTEGER_CST
)
2442 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2443 if (tem
!= NULL_TREE
)
2446 else if (TREE_CODE (arg
) == REAL_CST
)
2448 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2449 if (tem
!= NULL_TREE
)
2452 else if (TREE_CODE (arg
) == FIXED_CST
)
2454 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2455 if (tem
!= NULL_TREE
)
2459 switch (TREE_CODE (orig
))
2462 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2463 case POINTER_TYPE
: case REFERENCE_TYPE
:
2464 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2467 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2469 case FIXED_POINT_TYPE
:
2470 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2473 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2474 return fold_convert_loc (loc
, type
, tem
);
2480 case FIXED_POINT_TYPE
:
2481 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2482 || TREE_CODE (arg
) == REAL_CST
)
2484 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2485 if (tem
!= NULL_TREE
)
2486 goto fold_convert_exit
;
2489 switch (TREE_CODE (orig
))
2491 case FIXED_POINT_TYPE
:
2496 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2499 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2500 return fold_convert_loc (loc
, type
, tem
);
2507 switch (TREE_CODE (orig
))
2510 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2511 case POINTER_TYPE
: case REFERENCE_TYPE
:
2513 case FIXED_POINT_TYPE
:
2514 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2515 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2516 fold_convert_loc (loc
, TREE_TYPE (type
),
2517 integer_zero_node
));
2522 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2524 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2525 TREE_OPERAND (arg
, 0));
2526 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2527 TREE_OPERAND (arg
, 1));
2528 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2531 arg
= save_expr (arg
);
2532 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2533 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2534 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2535 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2536 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2544 if (integer_zerop (arg
))
2545 return build_zero_vector (type
);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2548 || TREE_CODE (orig
) == VECTOR_TYPE
);
2549 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2552 tem
= fold_ignored_result (arg
);
2553 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2556 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2557 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2561 protected_set_expr_location_unshare (tem
, loc
);
2565 /* Return false if expr can be assumed not to be an lvalue, true
2569 maybe_lvalue_p (const_tree x
)
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x
))
2585 case ARRAY_RANGE_REF
:
2591 case PREINCREMENT_EXPR
:
2592 case PREDECREMENT_EXPR
:
2594 case TRY_CATCH_EXPR
:
2595 case WITH_CLEANUP_EXPR
:
2601 case VIEW_CONVERT_EXPR
:
2605 /* Assume the worst for front-end tree codes. */
2606 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2614 /* Return an expr equal to X but certainly not valid as an lvalue. */
2617 non_lvalue_loc (location_t loc
, tree x
)
2619 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2624 if (! maybe_lvalue_p (x
))
2626 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630 pedantic lvalue. Otherwise, return X. */
2633 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2635 return protected_set_expr_location_unshare (x
, loc
);
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639 It is generally not safe to do this for floating-point comparisons, except
2640 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641 ERROR_MARK in this case. */
2644 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2646 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2647 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2657 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2659 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2661 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2663 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2677 return UNORDERED_EXPR
;
2678 case UNORDERED_EXPR
:
2679 return ORDERED_EXPR
;
2685 /* Similar, but return the comparison that results if the operands are
2686 swapped. This is safe for floating-point. */
2689 swap_tree_comparison (enum tree_code code
)
2696 case UNORDERED_EXPR
:
2722 /* Convert a comparison tree code from an enum tree_code representation
2723 into a compcode bit-based encoding. This function is the inverse of
2724 compcode_to_comparison. */
2726 static enum comparison_code
2727 comparison_to_compcode (enum tree_code code
)
2744 return COMPCODE_ORD
;
2745 case UNORDERED_EXPR
:
2746 return COMPCODE_UNORD
;
2748 return COMPCODE_UNLT
;
2750 return COMPCODE_UNEQ
;
2752 return COMPCODE_UNLE
;
2754 return COMPCODE_UNGT
;
2756 return COMPCODE_LTGT
;
2758 return COMPCODE_UNGE
;
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765 to GCC's enum tree_code representation. This function is the
2766 inverse of comparison_to_compcode. */
2768 static enum tree_code
2769 compcode_to_comparison (enum comparison_code code
)
2786 return ORDERED_EXPR
;
2787 case COMPCODE_UNORD
:
2788 return UNORDERED_EXPR
;
2806 /* Return true if COND1 tests the opposite condition of COND2. */
2809 inverse_conditions_p (const_tree cond1
, const_tree cond2
)
2811 return (COMPARISON_CLASS_P (cond1
)
2812 && COMPARISON_CLASS_P (cond2
)
2813 && (invert_tree_comparison
2815 HONOR_NANS (TREE_OPERAND (cond1
, 0))) == TREE_CODE (cond2
))
2816 && operand_equal_p (TREE_OPERAND (cond1
, 0),
2817 TREE_OPERAND (cond2
, 0), 0)
2818 && operand_equal_p (TREE_OPERAND (cond1
, 1),
2819 TREE_OPERAND (cond2
, 1), 0));
2822 /* Return a tree for the comparison which is the combination of
2823 doing the AND or OR (depending on CODE) of the two operations LCODE
2824 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2825 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826 if this makes the transformation invalid. */
2829 combine_comparisons (location_t loc
,
2830 enum tree_code code
, enum tree_code lcode
,
2831 enum tree_code rcode
, tree truth_type
,
2832 tree ll_arg
, tree lr_arg
)
2834 bool honor_nans
= HONOR_NANS (ll_arg
);
2835 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2836 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2841 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2842 compcode
= lcompcode
& rcompcode
;
2845 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2846 compcode
= lcompcode
| rcompcode
;
2855 /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 which are not used unless the mode has NaNs. */
2857 compcode
&= ~COMPCODE_UNORD
;
2858 if (compcode
== COMPCODE_LTGT
)
2859 compcode
= COMPCODE_NE
;
2860 else if (compcode
== COMPCODE_ORD
)
2861 compcode
= COMPCODE_TRUE
;
2863 else if (flag_trapping_math
)
2865 /* Check that the original operation and the optimized ones will trap
2866 under the same condition. */
2867 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2868 && (lcompcode
!= COMPCODE_EQ
)
2869 && (lcompcode
!= COMPCODE_ORD
);
2870 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2871 && (rcompcode
!= COMPCODE_EQ
)
2872 && (rcompcode
!= COMPCODE_ORD
);
2873 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2874 && (compcode
!= COMPCODE_EQ
)
2875 && (compcode
!= COMPCODE_ORD
);
2877 /* In a short-circuited boolean expression the LHS might be
2878 such that the RHS, if evaluated, will never trap. For
2879 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 if neither x nor y is NaN. (This is a mixed blessing: for
2881 example, the expression above will never trap, hence
2882 optimizing it to x < y would be invalid). */
2883 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2884 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2887 /* If the comparison was short-circuited, and only the RHS
2888 trapped, we may now generate a spurious trap. */
2890 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2893 /* If we changed the conditions that cause a trap, we lose. */
2894 if ((ltrap
|| rtrap
) != trap
)
2898 if (compcode
== COMPCODE_TRUE
)
2899 return constant_boolean_node (true, truth_type
);
2900 else if (compcode
== COMPCODE_FALSE
)
2901 return constant_boolean_node (false, truth_type
);
2904 enum tree_code tcode
;
2906 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2907 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2911 /* Return nonzero if two operands (typically of the same tree node)
2912 are necessarily equal. FLAGS modifies behavior as follows:
2914 If OEP_ONLY_CONST is set, only return nonzero for constants.
2915 This function tests whether the operands are indistinguishable;
2916 it does not test whether they are equal using C's == operation.
2917 The distinction is important for IEEE floating point, because
2918 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2921 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922 even though it may hold multiple values during a function.
2923 This is because a GCC tree node guarantees that nothing else is
2924 executed between the evaluation of its "operands" (which may often
2925 be evaluated in arbitrary order). Hence if the operands themselves
2926 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2928 unset means assuming isochronic (or instantaneous) tree equivalence.
2929 Unless comparing arbitrary expression trees, such as from different
2930 statements, this flag can usually be left unset.
2932 If OEP_PURE_SAME is set, then pure functions with identical arguments
2933 are considered the same. It is used when the caller has other ways
2934 to ensure that global memory is unchanged in between.
2936 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937 not values of expressions.
2939 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2942 If OEP_BITWISE is set, then require the values to be bitwise identical
2943 rather than simply numerically equal. Do not take advantage of things
2944 like math-related flags or undefined behavior; only return true for
2945 values that are provably bitwise identical in all circumstances.
2947 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948 any operand with side effect. This is unnecesarily conservative in the
2949 case we know that arg0 and arg1 are in disjoint code paths (such as in
2950 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951 addresses with TREE_CONSTANT flag set so we know that &var == &var
2952 even if var is volatile. */
2955 operand_compare::operand_equal_p (const_tree arg0
, const_tree arg1
,
2959 if (verify_hash_value (arg0
, arg1
, flags
, &r
))
2962 STRIP_ANY_LOCATION_WRAPPER (arg0
);
2963 STRIP_ANY_LOCATION_WRAPPER (arg1
);
2965 /* If either is ERROR_MARK, they aren't equal. */
2966 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2967 || TREE_TYPE (arg0
) == error_mark_node
2968 || TREE_TYPE (arg1
) == error_mark_node
)
2971 /* Similar, if either does not have a type (like a template id),
2972 they aren't equal. */
2973 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2976 /* Bitwise identity makes no sense if the values have different layouts. */
2977 if ((flags
& OEP_BITWISE
)
2978 && !tree_nop_conversion_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
2981 /* We cannot consider pointers to different address space equal. */
2982 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2983 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2984 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2985 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2988 /* Check equality of integer constants before bailing out due to
2989 precision differences. */
2990 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2992 /* Address of INTEGER_CST is not defined; check that we did not forget
2993 to drop the OEP_ADDRESS_OF flags. */
2994 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2995 return tree_int_cst_equal (arg0
, arg1
);
2998 if (!(flags
& OEP_ADDRESS_OF
))
3000 /* If both types don't have the same signedness, then we can't consider
3001 them equal. We must check this before the STRIP_NOPS calls
3002 because they may change the signedness of the arguments. As pointers
3003 strictly don't have a signedness, require either two pointers or
3004 two non-pointers as well. */
3005 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3006 || POINTER_TYPE_P (TREE_TYPE (arg0
))
3007 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3010 /* If both types don't have the same precision, then it is not safe
3012 if (element_precision (TREE_TYPE (arg0
))
3013 != element_precision (TREE_TYPE (arg1
)))
3020 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021 sanity check once the issue is solved. */
3023 /* Addresses of conversions and SSA_NAMEs (and many other things)
3024 are not defined. Check that we did not forget to drop the
3025 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3026 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
3027 && TREE_CODE (arg0
) != SSA_NAME
);
3030 /* In case both args are comparisons but with different comparison
3031 code, try to swap the comparison operands of one arg to produce
3032 a match and compare that variant. */
3033 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3034 && COMPARISON_CLASS_P (arg0
)
3035 && COMPARISON_CLASS_P (arg1
))
3037 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3039 if (TREE_CODE (arg0
) == swap_code
)
3040 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3041 TREE_OPERAND (arg1
, 1), flags
)
3042 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3043 TREE_OPERAND (arg1
, 0), flags
);
3046 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
3048 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3049 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
3051 else if (flags
& OEP_ADDRESS_OF
)
3053 /* If we are interested in comparing addresses ignore
3054 MEM_REF wrappings of the base that can appear just for
3056 if (TREE_CODE (arg0
) == MEM_REF
3058 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
3060 && integer_zerop (TREE_OPERAND (arg0
, 1)))
3062 else if (TREE_CODE (arg1
) == MEM_REF
3064 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
3066 && integer_zerop (TREE_OPERAND (arg1
, 1)))
3074 /* When not checking adddresses, this is needed for conversions and for
3075 COMPONENT_REF. Might as well play it safe and always test this. */
3076 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3077 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3078 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
3079 && !(flags
& OEP_ADDRESS_OF
)))
3082 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083 We don't care about side effects in that case because the SAVE_EXPR
3084 takes care of that for us. In all other cases, two expressions are
3085 equal if they have no side effects. If we have two identical
3086 expressions with side effects that should be treated the same due
3087 to the only side effects being identical SAVE_EXPR's, that will
3088 be detected in the recursive calls below.
3089 If we are taking an invariant address of two identical objects
3090 they are necessarily equal as well. */
3091 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3092 && (TREE_CODE (arg0
) == SAVE_EXPR
3093 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
3094 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3097 /* Next handle constant cases, those for which we can return 1 even
3098 if ONLY_CONST is set. */
3099 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3100 switch (TREE_CODE (arg0
))
3103 return tree_int_cst_equal (arg0
, arg1
);
3106 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3107 TREE_FIXED_CST (arg1
));
3110 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
3113 if (!(flags
& OEP_BITWISE
) && !HONOR_SIGNED_ZEROS (arg0
))
3115 /* If we do not distinguish between signed and unsigned zero,
3116 consider them equal. */
3117 if (real_zerop (arg0
) && real_zerop (arg1
))
3124 if (VECTOR_CST_LOG2_NPATTERNS (arg0
)
3125 != VECTOR_CST_LOG2_NPATTERNS (arg1
))
3128 if (VECTOR_CST_NELTS_PER_PATTERN (arg0
)
3129 != VECTOR_CST_NELTS_PER_PATTERN (arg1
))
3132 unsigned int count
= vector_cst_encoded_nelts (arg0
);
3133 for (unsigned int i
= 0; i
< count
; ++i
)
3134 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0
, i
),
3135 VECTOR_CST_ENCODED_ELT (arg1
, i
), flags
))
3141 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3143 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3147 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3148 && ! memcmp (TREE_STRING_POINTER (arg0
),
3149 TREE_STRING_POINTER (arg1
),
3150 TREE_STRING_LENGTH (arg0
)));
3153 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3154 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3155 flags
| OEP_ADDRESS_OF
3156 | OEP_MATCH_SIDE_EFFECTS
);
3158 /* In GIMPLE empty constructors are allowed in initializers of
3160 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
3165 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166 two instances of undefined behavior will give identical results. */
3167 if (flags
& (OEP_ONLY_CONST
| OEP_BITWISE
))
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171 variant that allows null and views null as being different from any
3172 non-null value. In the latter case, if either is null, the both
3173 must be; otherwise, do the normal comparison. */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3175 TREE_OPERAND (arg1, N), flags)
3177 #define OP_SAME_WITH_NULL(N) \
3178 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3179 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3181 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3184 /* Two conversions are equal only if signedness and modes match. */
3185 switch (TREE_CODE (arg0
))
3188 case FIX_TRUNC_EXPR
:
3189 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3190 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3200 case tcc_comparison
:
3202 if (OP_SAME (0) && OP_SAME (1))
3205 /* For commutative ops, allow the other order. */
3206 return (commutative_tree_code (TREE_CODE (arg0
))
3207 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3208 TREE_OPERAND (arg1
, 1), flags
)
3209 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3210 TREE_OPERAND (arg1
, 0), flags
));
3213 /* If either of the pointer (or reference) expressions we are
3214 dereferencing contain a side effect, these cannot be equal,
3215 but their addresses can be. */
3216 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3217 && (TREE_SIDE_EFFECTS (arg0
)
3218 || TREE_SIDE_EFFECTS (arg1
)))
3221 switch (TREE_CODE (arg0
))
3224 if (!(flags
& OEP_ADDRESS_OF
))
3226 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3227 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3229 /* Verify that the access types are compatible. */
3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0
))
3231 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)))
3234 flags
&= ~OEP_ADDRESS_OF
;
3238 /* Require the same offset. */
3239 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3240 TYPE_SIZE (TREE_TYPE (arg1
)),
3241 flags
& ~OEP_ADDRESS_OF
))
3246 case VIEW_CONVERT_EXPR
:
3249 case TARGET_MEM_REF
:
3251 if (!(flags
& OEP_ADDRESS_OF
))
3253 /* Require equal access sizes */
3254 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3255 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3256 || !TYPE_SIZE (TREE_TYPE (arg1
))
3257 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3258 TYPE_SIZE (TREE_TYPE (arg1
)),
3261 /* Verify that access happens in similar types. */
3262 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3264 /* Verify that accesses are TBAA compatible. */
3265 if (!alias_ptr_types_compatible_p
3266 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3267 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3268 || (MR_DEPENDENCE_CLIQUE (arg0
)
3269 != MR_DEPENDENCE_CLIQUE (arg1
))
3270 || (MR_DEPENDENCE_BASE (arg0
)
3271 != MR_DEPENDENCE_BASE (arg1
)))
3273 /* Verify that alignment is compatible. */
3274 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3275 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3278 flags
&= ~OEP_ADDRESS_OF
;
3279 return (OP_SAME (0) && OP_SAME (1)
3280 /* TARGET_MEM_REF require equal extra operands. */
3281 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3282 || (OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 && OP_SAME_WITH_NULL (4))));
3287 case ARRAY_RANGE_REF
:
3290 flags
&= ~OEP_ADDRESS_OF
;
3291 /* Compare the array index by value if it is constant first as we
3292 may have different types but same value here. */
3293 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3294 TREE_OPERAND (arg1
, 1))
3296 && OP_SAME_WITH_NULL (2)
3297 && OP_SAME_WITH_NULL (3)
3298 /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 we have to account for the offset of the ref. */
3300 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3301 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3302 || (operand_equal_p (array_ref_low_bound
3303 (CONST_CAST_TREE (arg0
)),
3305 (CONST_CAST_TREE (arg1
)), flags
)
3306 && operand_equal_p (array_ref_element_size
3307 (CONST_CAST_TREE (arg0
)),
3308 array_ref_element_size
3309 (CONST_CAST_TREE (arg1
)),
3313 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3314 may be NULL when we're called to compare MEM_EXPRs. */
3315 if (!OP_SAME_WITH_NULL (0)
3318 flags
&= ~OEP_ADDRESS_OF
;
3319 return OP_SAME_WITH_NULL (2);
3324 flags
&= ~OEP_ADDRESS_OF
;
3325 return OP_SAME (1) && OP_SAME (2);
3327 /* Virtual table call. */
3330 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0
),
3331 OBJ_TYPE_REF_EXPR (arg1
), flags
))
3333 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0
))
3334 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1
)))
3336 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0
),
3337 OBJ_TYPE_REF_OBJECT (arg1
), flags
))
3339 if (!types_same_for_odr (obj_type_ref_class (arg0
),
3340 obj_type_ref_class (arg1
)))
3349 case tcc_expression
:
3350 switch (TREE_CODE (arg0
))
3353 /* Be sure we pass right ADDRESS_OF flag. */
3354 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3355 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3356 TREE_OPERAND (arg1
, 0),
3357 flags
| OEP_ADDRESS_OF
);
3359 case TRUTH_NOT_EXPR
:
3362 case TRUTH_ANDIF_EXPR
:
3363 case TRUTH_ORIF_EXPR
:
3364 return OP_SAME (0) && OP_SAME (1);
3366 case WIDEN_MULT_PLUS_EXPR
:
3367 case WIDEN_MULT_MINUS_EXPR
:
3370 /* The multiplcation operands are commutative. */
3373 case TRUTH_AND_EXPR
:
3375 case TRUTH_XOR_EXPR
:
3376 if (OP_SAME (0) && OP_SAME (1))
3379 /* Otherwise take into account this is a commutative operation. */
3380 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3381 TREE_OPERAND (arg1
, 1), flags
)
3382 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3383 TREE_OPERAND (arg1
, 0), flags
));
3386 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3388 flags
&= ~OEP_ADDRESS_OF
;
3391 case BIT_INSERT_EXPR
:
3392 /* BIT_INSERT_EXPR has an implict operand as the type precision
3393 of op1. Need to check to make sure they are the same. */
3394 if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3395 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
3397 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
3403 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3408 case PREDECREMENT_EXPR
:
3409 case PREINCREMENT_EXPR
:
3410 case POSTDECREMENT_EXPR
:
3411 case POSTINCREMENT_EXPR
:
3412 if (flags
& OEP_LEXICOGRAPHIC
)
3413 return OP_SAME (0) && OP_SAME (1);
3416 case CLEANUP_POINT_EXPR
:
3419 if (flags
& OEP_LEXICOGRAPHIC
)
3428 switch (TREE_CODE (arg0
))
3431 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3432 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3433 /* If not both CALL_EXPRs are either internal or normal function
3434 functions, then they are not equal. */
3436 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3438 /* If the CALL_EXPRs call different internal functions, then they
3440 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3445 /* If the CALL_EXPRs call different functions, then they are not
3447 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3452 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3454 unsigned int cef
= call_expr_flags (arg0
);
3455 if (flags
& OEP_PURE_SAME
)
3456 cef
&= ECF_CONST
| ECF_PURE
;
3459 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3463 /* Now see if all the arguments are the same. */
3465 const_call_expr_arg_iterator iter0
, iter1
;
3467 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3468 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3470 a0
= next_const_call_expr_arg (&iter0
),
3471 a1
= next_const_call_expr_arg (&iter1
))
3472 if (! operand_equal_p (a0
, a1
, flags
))
3475 /* If we get here and both argument lists are exhausted
3476 then the CALL_EXPRs are equal. */
3477 return ! (a0
|| a1
);
3483 case tcc_declaration
:
3484 /* Consider __builtin_sqrt equal to sqrt. */
3485 return (TREE_CODE (arg0
) == FUNCTION_DECL
3486 && fndecl_built_in_p (arg0
) && fndecl_built_in_p (arg1
)
3487 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3488 && (DECL_UNCHECKED_FUNCTION_CODE (arg0
)
3489 == DECL_UNCHECKED_FUNCTION_CODE (arg1
)));
3491 case tcc_exceptional
:
3492 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3494 if (CONSTRUCTOR_NO_CLEARING (arg0
) != CONSTRUCTOR_NO_CLEARING (arg1
))
3497 /* In GIMPLE constructors are used only to build vectors from
3498 elements. Individual elements in the constructor must be
3499 indexed in increasing order and form an initial sequence.
3501 We make no effort to compare constructors in generic.
3502 (see sem_variable::equals in ipa-icf which can do so for
3504 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3505 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3508 /* Be sure that vectors constructed have the same representation.
3509 We only tested element precision and modes to match.
3510 Vectors may be BLKmode and thus also check that the number of
3512 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)),
3513 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
))))
3516 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3517 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3518 unsigned int len
= vec_safe_length (v0
);
3520 if (len
!= vec_safe_length (v1
))
3523 for (unsigned int i
= 0; i
< len
; i
++)
3525 constructor_elt
*c0
= &(*v0
)[i
];
3526 constructor_elt
*c1
= &(*v1
)[i
];
3528 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3529 /* In GIMPLE the indexes can be either NULL or matching i.
3530 Double check this so we won't get false
3531 positives for GENERIC. */
3533 && (TREE_CODE (c0
->index
) != INTEGER_CST
3534 || compare_tree_int (c0
->index
, i
)))
3536 && (TREE_CODE (c1
->index
) != INTEGER_CST
3537 || compare_tree_int (c1
->index
, i
))))
3542 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3543 && (flags
& OEP_LEXICOGRAPHIC
))
3545 /* Compare the STATEMENT_LISTs. */
3546 tree_stmt_iterator tsi1
, tsi2
;
3547 tree body1
= CONST_CAST_TREE (arg0
);
3548 tree body2
= CONST_CAST_TREE (arg1
);
3549 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3550 tsi_next (&tsi1
), tsi_next (&tsi2
))
3552 /* The lists don't have the same number of statements. */
3553 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3555 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3557 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3558 flags
& (OEP_LEXICOGRAPHIC
3559 | OEP_NO_HASH_CHECK
)))
3566 switch (TREE_CODE (arg0
))
3569 if (flags
& OEP_LEXICOGRAPHIC
)
3570 return OP_SAME_WITH_NULL (0);
3572 case DEBUG_BEGIN_STMT
:
3573 if (flags
& OEP_LEXICOGRAPHIC
)
3585 #undef OP_SAME_WITH_NULL
3588 /* Generate a hash value for an expression. This can be used iteratively
3589 by passing a previous result as the HSTATE argument. */
3592 operand_compare::hash_operand (const_tree t
, inchash::hash
&hstate
,
3596 enum tree_code code
;
3597 enum tree_code_class tclass
;
3599 if (t
== NULL_TREE
|| t
== error_mark_node
)
3601 hstate
.merge_hash (0);
3605 STRIP_ANY_LOCATION_WRAPPER (t
);
3607 if (!(flags
& OEP_ADDRESS_OF
))
3610 code
= TREE_CODE (t
);
3614 /* Alas, constants aren't shared, so we can't rely on pointer
3617 hstate
.merge_hash (0);
3620 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3621 for (i
= 0; i
< TREE_INT_CST_EXT_NUNITS (t
); i
++)
3622 hstate
.add_hwi (TREE_INT_CST_ELT (t
, i
));
3627 if (!HONOR_SIGNED_ZEROS (t
) && real_zerop (t
))
3630 val2
= real_hash (TREE_REAL_CST_PTR (t
));
3631 hstate
.merge_hash (val2
);
3636 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
3637 hstate
.merge_hash (val2
);
3641 hstate
.add ((const void *) TREE_STRING_POINTER (t
),
3642 TREE_STRING_LENGTH (t
));
3645 hash_operand (TREE_REALPART (t
), hstate
, flags
);
3646 hash_operand (TREE_IMAGPART (t
), hstate
, flags
);
3650 hstate
.add_int (VECTOR_CST_NPATTERNS (t
));
3651 hstate
.add_int (VECTOR_CST_NELTS_PER_PATTERN (t
));
3652 unsigned int count
= vector_cst_encoded_nelts (t
);
3653 for (unsigned int i
= 0; i
< count
; ++i
)
3654 hash_operand (VECTOR_CST_ENCODED_ELT (t
, i
), hstate
, flags
);
3658 /* We can just compare by pointer. */
3659 hstate
.add_hwi (SSA_NAME_VERSION (t
));
3661 case PLACEHOLDER_EXPR
:
3662 /* The node itself doesn't matter. */
3669 /* A list of expressions, for a CALL_EXPR or as the elements of a
3671 for (; t
; t
= TREE_CHAIN (t
))
3672 hash_operand (TREE_VALUE (t
), hstate
, flags
);
3676 unsigned HOST_WIDE_INT idx
;
3678 flags
&= ~OEP_ADDRESS_OF
;
3679 hstate
.add_int (CONSTRUCTOR_NO_CLEARING (t
));
3680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
3682 /* In GIMPLE the indexes can be either NULL or matching i. */
3683 if (field
== NULL_TREE
)
3684 field
= bitsize_int (idx
);
3685 hash_operand (field
, hstate
, flags
);
3686 hash_operand (value
, hstate
, flags
);
3690 case STATEMENT_LIST
:
3692 tree_stmt_iterator i
;
3693 for (i
= tsi_start (CONST_CAST_TREE (t
));
3694 !tsi_end_p (i
); tsi_next (&i
))
3695 hash_operand (tsi_stmt (i
), hstate
, flags
);
3699 for (i
= 0; i
< TREE_VEC_LENGTH (t
); ++i
)
3700 hash_operand (TREE_VEC_ELT (t
, i
), hstate
, flags
);
3702 case IDENTIFIER_NODE
:
3703 hstate
.add_object (IDENTIFIER_HASH_VALUE (t
));
3706 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3707 Otherwise nodes that compare equal according to operand_equal_p might
3708 get different hash codes. However, don't do this for machine specific
3709 or front end builtins, since the function code is overloaded in those
3711 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
3712 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
3714 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
3715 code
= TREE_CODE (t
);
3719 if (POLY_INT_CST_P (t
))
3721 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3722 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
3725 tclass
= TREE_CODE_CLASS (code
);
3727 if (tclass
== tcc_declaration
)
3729 /* DECL's have a unique ID */
3730 hstate
.add_hwi (DECL_UID (t
));
3732 else if (tclass
== tcc_comparison
&& !commutative_tree_code (code
))
3734 /* For comparisons that can be swapped, use the lower
3736 enum tree_code ccode
= swap_tree_comparison (code
);
3739 hstate
.add_object (ccode
);
3740 hash_operand (TREE_OPERAND (t
, ccode
!= code
), hstate
, flags
);
3741 hash_operand (TREE_OPERAND (t
, ccode
== code
), hstate
, flags
);
3743 else if (CONVERT_EXPR_CODE_P (code
))
3745 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3747 enum tree_code ccode
= NOP_EXPR
;
3748 hstate
.add_object (ccode
);
3750 /* Don't hash the type, that can lead to having nodes which
3751 compare equal according to operand_equal_p, but which
3752 have different hash codes. Make sure to include signedness
3753 in the hash computation. */
3754 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3755 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3757 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3758 else if (code
== MEM_REF
3759 && (flags
& OEP_ADDRESS_OF
) != 0
3760 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
3761 && DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
3762 && integer_zerop (TREE_OPERAND (t
, 1)))
3763 hash_operand (TREE_OPERAND (TREE_OPERAND (t
, 0), 0),
3765 /* Don't ICE on FE specific trees, or their arguments etc.
3766 during operand_equal_p hash verification. */
3767 else if (!IS_EXPR_CODE_CLASS (tclass
))
3768 gcc_assert (flags
& OEP_HASH_CHECK
);
3771 unsigned int sflags
= flags
;
3773 hstate
.add_object (code
);
3778 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3779 flags
|= OEP_ADDRESS_OF
;
3785 case TARGET_MEM_REF
:
3786 flags
&= ~OEP_ADDRESS_OF
;
3791 case ARRAY_RANGE_REF
:
3794 sflags
&= ~OEP_ADDRESS_OF
;
3798 flags
&= ~OEP_ADDRESS_OF
;
3801 case WIDEN_MULT_PLUS_EXPR
:
3802 case WIDEN_MULT_MINUS_EXPR
:
3804 /* The multiplication operands are commutative. */
3805 inchash::hash one
, two
;
3806 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3807 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3808 hstate
.add_commutative (one
, two
);
3809 hash_operand (TREE_OPERAND (t
, 2), two
, flags
);
3814 if (CALL_EXPR_FN (t
) == NULL_TREE
)
3815 hstate
.add_int (CALL_EXPR_IFN (t
));
3819 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3820 Usually different TARGET_EXPRs just should use
3821 different temporaries in their slots. */
3822 hash_operand (TARGET_EXPR_SLOT (t
), hstate
, flags
);
3825 /* Virtual table call. */
3827 inchash::add_expr (OBJ_TYPE_REF_EXPR (t
), hstate
, flags
);
3828 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t
), hstate
, flags
);
3829 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t
), hstate
, flags
);
3835 /* Don't hash the type, that can lead to having nodes which
3836 compare equal according to operand_equal_p, but which
3837 have different hash codes. */
3838 if (code
== NON_LVALUE_EXPR
)
3840 /* Make sure to include signness in the hash computation. */
3841 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3842 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3845 else if (commutative_tree_code (code
))
3847 /* It's a commutative expression. We want to hash it the same
3848 however it appears. We do this by first hashing both operands
3849 and then rehashing based on the order of their independent
3851 inchash::hash one
, two
;
3852 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3853 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3854 hstate
.add_commutative (one
, two
);
3857 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
3858 hash_operand (TREE_OPERAND (t
, i
), hstate
,
3859 i
== 0 ? flags
: sflags
);
3866 operand_compare::verify_hash_value (const_tree arg0
, const_tree arg1
,
3867 unsigned int flags
, bool *ret
)
3869 /* When checking, verify at the outermost operand_equal_p call that
3870 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3872 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
3874 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
3878 inchash::hash
hstate0 (0), hstate1 (0);
3879 hash_operand (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
3880 hash_operand (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
3881 hashval_t h0
= hstate0
.end ();
3882 hashval_t h1
= hstate1
.end ();
3883 gcc_assert (h0
== h1
);
3897 static operand_compare default_compare_instance
;
3899 /* Conveinece wrapper around operand_compare class because usually we do
3900 not need to play with the valueizer. */
3903 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3905 return default_compare_instance
.operand_equal_p (arg0
, arg1
, flags
);
3911 /* Generate a hash value for an expression. This can be used iteratively
3912 by passing a previous result as the HSTATE argument.
3914 This function is intended to produce the same hash for expressions which
3915 would compare equal using operand_equal_p. */
3917 add_expr (const_tree t
, inchash::hash
&hstate
, unsigned int flags
)
3919 default_compare_instance
.hash_operand (t
, hstate
, flags
);
3924 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3925 with a different signedness or a narrower precision. */
3928 operand_equal_for_comparison_p (tree arg0
, tree arg1
)
3930 if (operand_equal_p (arg0
, arg1
, 0))
3933 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3934 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3937 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3938 and see if the inner values are the same. This removes any
3939 signedness comparison, which doesn't matter here. */
3944 if (operand_equal_p (op0
, op1
, 0))
3947 /* Discard a single widening conversion from ARG1 and see if the inner
3948 value is the same as ARG0. */
3949 if (CONVERT_EXPR_P (arg1
)
3950 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
3951 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
3952 < TYPE_PRECISION (TREE_TYPE (arg1
))
3953 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
3959 /* See if ARG is an expression that is either a comparison or is performing
3960 arithmetic on comparisons. The comparisons must only be comparing
3961 two different values, which will be stored in *CVAL1 and *CVAL2; if
3962 they are nonzero it means that some operands have already been found.
3963 No variables may be used anywhere else in the expression except in the
3966 If this is true, return 1. Otherwise, return zero. */
3969 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
)
3971 enum tree_code code
= TREE_CODE (arg
);
3972 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3974 /* We can handle some of the tcc_expression cases here. */
3975 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3977 else if (tclass
== tcc_expression
3978 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3979 || code
== COMPOUND_EXPR
))
3980 tclass
= tcc_binary
;
3985 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
);
3988 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
3989 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
));
3994 case tcc_expression
:
3995 if (code
== COND_EXPR
)
3996 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
3997 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
)
3998 && twoval_comparison_p (TREE_OPERAND (arg
, 2), cval1
, cval2
));
4001 case tcc_comparison
:
4002 /* First see if we can handle the first operand, then the second. For
4003 the second operand, we know *CVAL1 can't be zero. It must be that
4004 one side of the comparison is each of the values; test for the
4005 case where this isn't true by failing if the two operands
4008 if (operand_equal_p (TREE_OPERAND (arg
, 0),
4009 TREE_OPERAND (arg
, 1), 0))
4013 *cval1
= TREE_OPERAND (arg
, 0);
4014 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
4016 else if (*cval2
== 0)
4017 *cval2
= TREE_OPERAND (arg
, 0);
4018 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
4023 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
4025 else if (*cval2
== 0)
4026 *cval2
= TREE_OPERAND (arg
, 1);
4027 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
4039 /* ARG is a tree that is known to contain just arithmetic operations and
4040 comparisons. Evaluate the operations in the tree substituting NEW0 for
4041 any occurrence of OLD0 as an operand of a comparison and likewise for
4045 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
4046 tree old1
, tree new1
)
4048 tree type
= TREE_TYPE (arg
);
4049 enum tree_code code
= TREE_CODE (arg
);
4050 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4052 /* We can handle some of the tcc_expression cases here. */
4053 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4055 else if (tclass
== tcc_expression
4056 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
4057 tclass
= tcc_binary
;
4062 return fold_build1_loc (loc
, code
, type
,
4063 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4064 old0
, new0
, old1
, new1
));
4067 return fold_build2_loc (loc
, code
, type
,
4068 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4069 old0
, new0
, old1
, new1
),
4070 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4071 old0
, new0
, old1
, new1
));
4073 case tcc_expression
:
4077 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
4081 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
4085 return fold_build3_loc (loc
, code
, type
,
4086 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4087 old0
, new0
, old1
, new1
),
4088 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4089 old0
, new0
, old1
, new1
),
4090 eval_subst (loc
, TREE_OPERAND (arg
, 2),
4091 old0
, new0
, old1
, new1
));
4095 /* Fall through - ??? */
4097 case tcc_comparison
:
4099 tree arg0
= TREE_OPERAND (arg
, 0);
4100 tree arg1
= TREE_OPERAND (arg
, 1);
4102 /* We need to check both for exact equality and tree equality. The
4103 former will be true if the operand has a side-effect. In that
4104 case, we know the operand occurred exactly once. */
4106 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
4108 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
4111 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
4113 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
4116 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
4124 /* Return a tree for the case when the result of an expression is RESULT
4125 converted to TYPE and OMITTED was previously an operand of the expression
4126 but is now not needed (e.g., we folded OMITTED * 0).
4128 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4129 the conversion of RESULT to TYPE. */
4132 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
4134 tree t
= fold_convert_loc (loc
, type
, result
);
4136 /* If the resulting operand is an empty statement, just return the omitted
4137 statement casted to void. */
4138 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
4139 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
4140 fold_ignored_result (omitted
));
4142 if (TREE_SIDE_EFFECTS (omitted
))
4143 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4144 fold_ignored_result (omitted
), t
);
4146 return non_lvalue_loc (loc
, t
);
4149 /* Return a tree for the case when the result of an expression is RESULT
4150 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4151 of the expression but are now not needed.
4153 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4154 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4155 evaluated before OMITTED2. Otherwise, if neither has side effects,
4156 just do the conversion of RESULT to TYPE. */
4159 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
4160 tree omitted1
, tree omitted2
)
4162 tree t
= fold_convert_loc (loc
, type
, result
);
4164 if (TREE_SIDE_EFFECTS (omitted2
))
4165 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
4166 if (TREE_SIDE_EFFECTS (omitted1
))
4167 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
4169 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
4173 /* Return a simplified tree node for the truth-negation of ARG. This
4174 never alters ARG itself. We assume that ARG is an operation that
4175 returns a truth value (0 or 1).
4177 FIXME: one would think we would fold the result, but it causes
4178 problems with the dominator optimizer. */
4181 fold_truth_not_expr (location_t loc
, tree arg
)
4183 tree type
= TREE_TYPE (arg
);
4184 enum tree_code code
= TREE_CODE (arg
);
4185 location_t loc1
, loc2
;
4187 /* If this is a comparison, we can simply invert it, except for
4188 floating-point non-equality comparisons, in which case we just
4189 enclose a TRUTH_NOT_EXPR around what we have. */
4191 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4193 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
4194 if (FLOAT_TYPE_P (op_type
)
4195 && flag_trapping_math
4196 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
4197 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
4200 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
4201 if (code
== ERROR_MARK
)
4204 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
4205 TREE_OPERAND (arg
, 1));
4206 if (TREE_NO_WARNING (arg
))
4207 TREE_NO_WARNING (ret
) = 1;
4214 return constant_boolean_node (integer_zerop (arg
), type
);
4216 case TRUTH_AND_EXPR
:
4217 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4218 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4219 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
4220 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4221 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4224 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4225 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4226 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
4227 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4228 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4230 case TRUTH_XOR_EXPR
:
4231 /* Here we can invert either operand. We invert the first operand
4232 unless the second operand is a TRUTH_NOT_EXPR in which case our
4233 result is the XOR of the first operand with the inside of the
4234 negation of the second operand. */
4236 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
4237 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
4238 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
4240 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
4241 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
4242 TREE_OPERAND (arg
, 1));
4244 case TRUTH_ANDIF_EXPR
:
4245 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4246 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4247 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
4248 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4249 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4251 case TRUTH_ORIF_EXPR
:
4252 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4253 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4254 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
4255 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4256 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4258 case TRUTH_NOT_EXPR
:
4259 return TREE_OPERAND (arg
, 0);
4263 tree arg1
= TREE_OPERAND (arg
, 1);
4264 tree arg2
= TREE_OPERAND (arg
, 2);
4266 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4267 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
4269 /* A COND_EXPR may have a throw as one operand, which
4270 then has void type. Just leave void operands
4272 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
4273 VOID_TYPE_P (TREE_TYPE (arg1
))
4274 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
4275 VOID_TYPE_P (TREE_TYPE (arg2
))
4276 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
4280 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4281 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4282 TREE_OPERAND (arg
, 0),
4283 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
4285 case NON_LVALUE_EXPR
:
4286 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4287 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
4290 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
4291 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4296 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4297 return build1_loc (loc
, TREE_CODE (arg
), type
,
4298 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4301 if (!integer_onep (TREE_OPERAND (arg
, 1)))
4303 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
4306 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4308 case CLEANUP_POINT_EXPR
:
4309 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4310 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
4311 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4318 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4319 assume that ARG is an operation that returns a truth value (0 or 1
4320 for scalars, 0 or -1 for vectors). Return the folded expression if
4321 folding is successful. Otherwise, return NULL_TREE. */
4324 fold_invert_truthvalue (location_t loc
, tree arg
)
4326 tree type
= TREE_TYPE (arg
);
4327 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
4333 /* Return a simplified tree node for the truth-negation of ARG. This
4334 never alters ARG itself. We assume that ARG is an operation that
4335 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4338 invert_truthvalue_loc (location_t loc
, tree arg
)
4340 if (TREE_CODE (arg
) == ERROR_MARK
)
4343 tree type
= TREE_TYPE (arg
);
4344 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
4350 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4351 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4352 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4353 is the original memory reference used to preserve the alias set of
4357 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
4358 HOST_WIDE_INT bitsize
, poly_int64 bitpos
,
4359 int unsignedp
, int reversep
)
4361 tree result
, bftype
;
4363 /* Attempt not to lose the access path if possible. */
4364 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
4366 tree ninner
= TREE_OPERAND (orig_inner
, 0);
4368 poly_int64 nbitsize
, nbitpos
;
4370 int nunsignedp
, nreversep
, nvolatilep
= 0;
4371 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
4372 &noffset
, &nmode
, &nunsignedp
,
4373 &nreversep
, &nvolatilep
);
4375 && noffset
== NULL_TREE
4376 && known_subrange_p (bitpos
, bitsize
, nbitpos
, nbitsize
)
4386 alias_set_type iset
= get_alias_set (orig_inner
);
4387 if (iset
== 0 && get_alias_set (inner
) != iset
)
4388 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
4389 build_fold_addr_expr (inner
),
4390 build_int_cst (ptr_type_node
, 0));
4392 if (known_eq (bitpos
, 0) && !reversep
)
4394 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
4395 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
4396 || POINTER_TYPE_P (TREE_TYPE (inner
)))
4397 && tree_fits_shwi_p (size
)
4398 && tree_to_shwi (size
) == bitsize
)
4399 return fold_convert_loc (loc
, type
, inner
);
4403 if (TYPE_PRECISION (bftype
) != bitsize
4404 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
4405 bftype
= build_nonstandard_integer_type (bitsize
, 0);
4407 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
4408 bitsize_int (bitsize
), bitsize_int (bitpos
));
4409 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
4412 result
= fold_convert_loc (loc
, type
, result
);
4417 /* Optimize a bit-field compare.
4419 There are two cases: First is a compare against a constant and the
4420 second is a comparison of two items where the fields are at the same
4421 bit position relative to the start of a chunk (byte, halfword, word)
4422 large enough to contain it. In these cases we can avoid the shift
4423 implicit in bitfield extractions.
4425 For constants, we emit a compare of the shifted constant with the
4426 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4427 compared. For two fields at the same position, we do the ANDs with the
4428 similar mask and compare the result of the ANDs.
4430 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4431 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4432 are the left and right operands of the comparison, respectively.
4434 If the optimization described above can be done, we return the resulting
4435 tree. Otherwise we return zero. */
4438 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
4439 tree compare_type
, tree lhs
, tree rhs
)
4441 poly_int64 plbitpos
, plbitsize
, rbitpos
, rbitsize
;
4442 HOST_WIDE_INT lbitpos
, lbitsize
, nbitpos
, nbitsize
;
4443 tree type
= TREE_TYPE (lhs
);
4445 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
4446 machine_mode lmode
, rmode
;
4447 scalar_int_mode nmode
;
4448 int lunsignedp
, runsignedp
;
4449 int lreversep
, rreversep
;
4450 int lvolatilep
= 0, rvolatilep
= 0;
4451 tree linner
, rinner
= NULL_TREE
;
4455 /* Get all the information about the extractions being done. If the bit size
4456 is the same as the size of the underlying object, we aren't doing an
4457 extraction at all and so can do nothing. We also don't want to
4458 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4459 then will no longer be able to replace it. */
4460 linner
= get_inner_reference (lhs
, &plbitsize
, &plbitpos
, &offset
, &lmode
,
4461 &lunsignedp
, &lreversep
, &lvolatilep
);
4463 || !known_size_p (plbitsize
)
4464 || !plbitsize
.is_constant (&lbitsize
)
4465 || !plbitpos
.is_constant (&lbitpos
)
4466 || known_eq (lbitsize
, GET_MODE_BITSIZE (lmode
))
4468 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
4473 rreversep
= lreversep
;
4476 /* If this is not a constant, we can only do something if bit positions,
4477 sizes, signedness and storage order are the same. */
4479 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
4480 &runsignedp
, &rreversep
, &rvolatilep
);
4483 || maybe_ne (lbitpos
, rbitpos
)
4484 || maybe_ne (lbitsize
, rbitsize
)
4485 || lunsignedp
!= runsignedp
4486 || lreversep
!= rreversep
4488 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
4493 /* Honor the C++ memory model and mimic what RTL expansion does. */
4494 poly_uint64 bitstart
= 0;
4495 poly_uint64 bitend
= 0;
4496 if (TREE_CODE (lhs
) == COMPONENT_REF
)
4498 get_bit_range (&bitstart
, &bitend
, lhs
, &plbitpos
, &offset
);
4499 if (!plbitpos
.is_constant (&lbitpos
) || offset
!= NULL_TREE
)
4503 /* See if we can find a mode to refer to this field. We should be able to,
4504 but fail if we can't. */
4505 if (!get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
4506 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4507 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4508 TYPE_ALIGN (TREE_TYPE (rinner
))),
4509 BITS_PER_WORD
, false, &nmode
))
4512 /* Set signed and unsigned types of the precision of this mode for the
4514 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4516 /* Compute the bit position and size for the new reference and our offset
4517 within it. If the new reference is the same size as the original, we
4518 won't optimize anything, so return zero. */
4519 nbitsize
= GET_MODE_BITSIZE (nmode
);
4520 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4522 if (nbitsize
== lbitsize
)
4525 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4526 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4528 /* Make the mask to be used against the extracted field. */
4529 mask
= build_int_cst_type (unsigned_type
, -1);
4530 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4531 mask
= const_binop (RSHIFT_EXPR
, mask
,
4532 size_int (nbitsize
- lbitsize
- lbitpos
));
4539 /* If not comparing with constant, just rework the comparison
4541 tree t1
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4542 nbitsize
, nbitpos
, 1, lreversep
);
4543 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t1
, mask
);
4544 tree t2
= make_bit_field_ref (loc
, rinner
, rhs
, unsigned_type
,
4545 nbitsize
, nbitpos
, 1, rreversep
);
4546 t2
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t2
, mask
);
4547 return fold_build2_loc (loc
, code
, compare_type
, t1
, t2
);
4550 /* Otherwise, we are handling the constant case. See if the constant is too
4551 big for the field. Warn and return a tree for 0 (false) if so. We do
4552 this not only for its own sake, but to avoid having to test for this
4553 error case below. If we didn't, we might generate wrong code.
4555 For unsigned fields, the constant shifted right by the field length should
4556 be all zero. For signed fields, the high-order bits should agree with
4561 if (wi::lrshift (wi::to_wide (rhs
), lbitsize
) != 0)
4563 warning (0, "comparison is always %d due to width of bit-field",
4565 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4570 wide_int tem
= wi::arshift (wi::to_wide (rhs
), lbitsize
- 1);
4571 if (tem
!= 0 && tem
!= -1)
4573 warning (0, "comparison is always %d due to width of bit-field",
4575 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4582 /* Single-bit compares should always be against zero. */
4583 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4585 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4586 rhs
= build_int_cst (type
, 0);
4589 /* Make a new bitfield reference, shift the constant over the
4590 appropriate number of bits and mask it with the computed mask
4591 (in case this was a signed field). If we changed it, make a new one. */
4592 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4593 nbitsize
, nbitpos
, 1, lreversep
);
4595 rhs
= const_binop (BIT_AND_EXPR
,
4596 const_binop (LSHIFT_EXPR
,
4597 fold_convert_loc (loc
, unsigned_type
, rhs
),
4598 size_int (lbitpos
)),
4601 lhs
= build2_loc (loc
, code
, compare_type
,
4602 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4606 /* Subroutine for fold_truth_andor_1: decode a field reference.
4608 If EXP is a comparison reference, we return the innermost reference.
4610 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4611 set to the starting bit number.
4613 If the innermost field can be completely contained in a mode-sized
4614 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4616 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4617 otherwise it is not changed.
4619 *PUNSIGNEDP is set to the signedness of the field.
4621 *PREVERSEP is set to the storage order of the field.
4623 *PMASK is set to the mask used. This is either contained in a
4624 BIT_AND_EXPR or derived from the width of the field.
4626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4628 Return 0 if this is not a component reference or is one that we can't
4629 do anything with. */
4632 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4633 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4634 int *punsignedp
, int *preversep
, int *pvolatilep
,
4635 tree
*pmask
, tree
*pand_mask
)
4638 tree outer_type
= 0;
4640 tree mask
, inner
, offset
;
4642 unsigned int precision
;
4644 /* All the optimizations using this function assume integer fields.
4645 There are problems with FP fields since the type_for_size call
4646 below can fail for, e.g., XFmode. */
4647 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4650 /* We are interested in the bare arrangement of bits, so strip everything
4651 that doesn't affect the machine mode. However, record the type of the
4652 outermost expression if it may matter below. */
4653 if (CONVERT_EXPR_P (exp
)
4654 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4655 outer_type
= TREE_TYPE (exp
);
4658 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4660 and_mask
= TREE_OPERAND (exp
, 1);
4661 exp
= TREE_OPERAND (exp
, 0);
4662 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4663 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4667 poly_int64 poly_bitsize
, poly_bitpos
;
4668 inner
= get_inner_reference (exp
, &poly_bitsize
, &poly_bitpos
, &offset
,
4669 pmode
, punsignedp
, preversep
, pvolatilep
);
4670 if ((inner
== exp
&& and_mask
== 0)
4671 || !poly_bitsize
.is_constant (pbitsize
)
4672 || !poly_bitpos
.is_constant (pbitpos
)
4675 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4676 /* Reject out-of-bound accesses (PR79731). */
4677 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4678 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4679 *pbitpos
+ *pbitsize
) < 0))
4682 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4683 if (unsigned_type
== NULL_TREE
)
4688 /* If the number of bits in the reference is the same as the bitsize of
4689 the outer type, then the outer type gives the signedness. Otherwise
4690 (in case of a small bitfield) the signedness is unchanged. */
4691 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4692 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4694 /* Compute the mask to access the bitfield. */
4695 precision
= TYPE_PRECISION (unsigned_type
);
4697 mask
= build_int_cst_type (unsigned_type
, -1);
4699 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4700 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4702 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4704 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4705 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4708 *pand_mask
= and_mask
;
4712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4713 bit positions and MASK is SIGNED. */
4716 all_ones_mask_p (const_tree mask
, unsigned int size
)
4718 tree type
= TREE_TYPE (mask
);
4719 unsigned int precision
= TYPE_PRECISION (type
);
4721 /* If this function returns true when the type of the mask is
4722 UNSIGNED, then there will be errors. In particular see
4723 gcc.c-torture/execute/990326-1.c. There does not appear to be
4724 any documentation paper trail as to why this is so. But the pre
4725 wide-int worked with that restriction and it has been preserved
4727 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4730 return wi::mask (size
, false, precision
) == wi::to_wide (mask
);
4733 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4734 represents the sign bit of EXP's type. If EXP represents a sign
4735 or zero extension, also test VAL against the unextended type.
4736 The return value is the (sub)expression whose sign bit is VAL,
4737 or NULL_TREE otherwise. */
4740 sign_bit_p (tree exp
, const_tree val
)
4745 /* Tree EXP must have an integral type. */
4746 t
= TREE_TYPE (exp
);
4747 if (! INTEGRAL_TYPE_P (t
))
4750 /* Tree VAL must be an integer constant. */
4751 if (TREE_CODE (val
) != INTEGER_CST
4752 || TREE_OVERFLOW (val
))
4755 width
= TYPE_PRECISION (t
);
4756 if (wi::only_sign_bit_p (wi::to_wide (val
), width
))
4759 /* Handle extension from a narrower type. */
4760 if (TREE_CODE (exp
) == NOP_EXPR
4761 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4762 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4767 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4768 to be evaluated unconditionally. */
4771 simple_operand_p (const_tree exp
)
4773 /* Strip any conversions that don't change the machine mode. */
4776 return (CONSTANT_CLASS_P (exp
)
4777 || TREE_CODE (exp
) == SSA_NAME
4779 && ! TREE_ADDRESSABLE (exp
)
4780 && ! TREE_THIS_VOLATILE (exp
)
4781 && ! DECL_NONLOCAL (exp
)
4782 /* Don't regard global variables as simple. They may be
4783 allocated in ways unknown to the compiler (shared memory,
4784 #pragma weak, etc). */
4785 && ! TREE_PUBLIC (exp
)
4786 && ! DECL_EXTERNAL (exp
)
4787 /* Weakrefs are not safe to be read, since they can be NULL.
4788 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4789 have DECL_WEAK flag set. */
4790 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4791 /* Loading a static variable is unduly expensive, but global
4792 registers aren't expensive. */
4793 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4796 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4797 to be evaluated unconditionally.
4798 I addition to simple_operand_p, we assume that comparisons, conversions,
4799 and logic-not operations are simple, if their operands are simple, too. */
4802 simple_operand_p_2 (tree exp
)
4804 enum tree_code code
;
4806 if (TREE_SIDE_EFFECTS (exp
) || generic_expr_could_trap_p (exp
))
4809 while (CONVERT_EXPR_P (exp
))
4810 exp
= TREE_OPERAND (exp
, 0);
4812 code
= TREE_CODE (exp
);
4814 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4815 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4816 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4818 if (code
== TRUTH_NOT_EXPR
)
4819 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4821 return simple_operand_p (exp
);
4825 /* The following functions are subroutines to fold_range_test and allow it to
4826 try to change a logical combination of comparisons into a range test.
4829 X == 2 || X == 3 || X == 4 || X == 5
4833 (unsigned) (X - 2) <= 3
4835 We describe each set of comparisons as being either inside or outside
4836 a range, using a variable named like IN_P, and then describe the
4837 range with a lower and upper bound. If one of the bounds is omitted,
4838 it represents either the highest or lowest value of the type.
4840 In the comments below, we represent a range by two numbers in brackets
4841 preceded by a "+" to designate being inside that range, or a "-" to
4842 designate being outside that range, so the condition can be inverted by
4843 flipping the prefix. An omitted bound is represented by a "-". For
4844 example, "- [-, 10]" means being outside the range starting at the lowest
4845 possible value and ending at 10, in other words, being greater than 10.
4846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4849 We set up things so that the missing bounds are handled in a consistent
4850 manner so neither a missing bound nor "true" and "false" need to be
4851 handled using a special case. */
4853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4855 and UPPER1_P are nonzero if the respective argument is an upper bound
4856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4857 must be specified for a comparison. ARG1 will be converted to ARG0's
4858 type if both are specified. */
4861 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4862 tree arg1
, int upper1_p
)
4868 /* If neither arg represents infinity, do the normal operation.
4869 Else, if not a comparison, return infinity. Else handle the special
4870 comparison rules. Note that most of the cases below won't occur, but
4871 are handled for consistency. */
4873 if (arg0
!= 0 && arg1
!= 0)
4875 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4876 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4878 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4881 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4885 for neither. In real maths, we cannot assume open ended ranges are
4886 the same. But, this is computer arithmetic, where numbers are finite.
4887 We can therefore make the transformation of any unbounded range with
4888 the value Z, Z being greater than any representable number. This permits
4889 us to treat unbounded ranges as equal. */
4890 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4891 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4895 result
= sgn0
== sgn1
;
4898 result
= sgn0
!= sgn1
;
4901 result
= sgn0
< sgn1
;
4904 result
= sgn0
<= sgn1
;
4907 result
= sgn0
> sgn1
;
4910 result
= sgn0
>= sgn1
;
4916 return constant_boolean_node (result
, type
);
4919 /* Helper routine for make_range. Perform one step for it, return
4920 new expression if the loop should continue or NULL_TREE if it should
4924 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4925 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4926 bool *strict_overflow_p
)
4928 tree arg0_type
= TREE_TYPE (arg0
);
4929 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4930 int in_p
= *p_in_p
, n_in_p
;
4934 case TRUTH_NOT_EXPR
:
4935 /* We can only do something if the range is testing for zero. */
4936 if (low
== NULL_TREE
|| high
== NULL_TREE
4937 || ! integer_zerop (low
) || ! integer_zerop (high
))
4942 case EQ_EXPR
: case NE_EXPR
:
4943 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4944 /* We can only do something if the range is testing for zero
4945 and if the second operand is an integer constant. Note that
4946 saying something is "in" the range we make is done by
4947 complementing IN_P since it will set in the initial case of
4948 being not equal to zero; "out" is leaving it alone. */
4949 if (low
== NULL_TREE
|| high
== NULL_TREE
4950 || ! integer_zerop (low
) || ! integer_zerop (high
)
4951 || TREE_CODE (arg1
) != INTEGER_CST
)
4956 case NE_EXPR
: /* - [c, c] */
4959 case EQ_EXPR
: /* + [c, c] */
4960 in_p
= ! in_p
, low
= high
= arg1
;
4962 case GT_EXPR
: /* - [-, c] */
4963 low
= 0, high
= arg1
;
4965 case GE_EXPR
: /* + [c, -] */
4966 in_p
= ! in_p
, low
= arg1
, high
= 0;
4968 case LT_EXPR
: /* - [c, -] */
4969 low
= arg1
, high
= 0;
4971 case LE_EXPR
: /* + [-, c] */
4972 in_p
= ! in_p
, low
= 0, high
= arg1
;
4978 /* If this is an unsigned comparison, we also know that EXP is
4979 greater than or equal to zero. We base the range tests we make
4980 on that fact, so we record it here so we can parse existing
4981 range tests. We test arg0_type since often the return type
4982 of, e.g. EQ_EXPR, is boolean. */
4983 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4985 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4987 build_int_cst (arg0_type
, 0),
4991 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4993 /* If the high bound is missing, but we have a nonzero low
4994 bound, reverse the range so it goes from zero to the low bound
4996 if (high
== 0 && low
&& ! integer_zerop (low
))
4999 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
5000 build_int_cst (TREE_TYPE (low
), 1), 0);
5001 low
= build_int_cst (arg0_type
, 0);
5011 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5012 low and high are non-NULL, then normalize will DTRT. */
5013 if (!TYPE_UNSIGNED (arg0_type
)
5014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5016 if (low
== NULL_TREE
)
5017 low
= TYPE_MIN_VALUE (arg0_type
);
5018 if (high
== NULL_TREE
)
5019 high
= TYPE_MAX_VALUE (arg0_type
);
5022 /* (-x) IN [a,b] -> x in [-b, -a] */
5023 n_low
= range_binop (MINUS_EXPR
, exp_type
,
5024 build_int_cst (exp_type
, 0),
5026 n_high
= range_binop (MINUS_EXPR
, exp_type
,
5027 build_int_cst (exp_type
, 0),
5029 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
5035 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
5036 build_int_cst (exp_type
, 1));
5040 if (TREE_CODE (arg1
) != INTEGER_CST
)
5043 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5044 move a constant to the other side. */
5045 if (!TYPE_UNSIGNED (arg0_type
)
5046 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5049 /* If EXP is signed, any overflow in the computation is undefined,
5050 so we don't worry about it so long as our computations on
5051 the bounds don't overflow. For unsigned, overflow is defined
5052 and this is exactly the right thing. */
5053 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5054 arg0_type
, low
, 0, arg1
, 0);
5055 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5056 arg0_type
, high
, 1, arg1
, 0);
5057 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
5058 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
5061 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5062 *strict_overflow_p
= true;
5065 /* Check for an unsigned range which has wrapped around the maximum
5066 value thus making n_high < n_low, and normalize it. */
5067 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
5069 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
5070 build_int_cst (TREE_TYPE (n_high
), 1), 0);
5071 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
5072 build_int_cst (TREE_TYPE (n_low
), 1), 0);
5074 /* If the range is of the form +/- [ x+1, x ], we won't
5075 be able to normalize it. But then, it represents the
5076 whole range or the empty set, so make it
5078 if (tree_int_cst_equal (n_low
, low
)
5079 && tree_int_cst_equal (n_high
, high
))
5085 low
= n_low
, high
= n_high
;
5093 case NON_LVALUE_EXPR
:
5094 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
5097 if (! INTEGRAL_TYPE_P (arg0_type
)
5098 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
5099 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
5102 n_low
= low
, n_high
= high
;
5105 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
5108 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
5110 /* If we're converting arg0 from an unsigned type, to exp,
5111 a signed type, we will be doing the comparison as unsigned.
5112 The tests above have already verified that LOW and HIGH
5115 So we have to ensure that we will handle large unsigned
5116 values the same way that the current signed bounds treat
5119 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
5123 /* For fixed-point modes, we need to pass the saturating flag
5124 as the 2nd parameter. */
5125 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
5127 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
5128 TYPE_SATURATING (arg0_type
));
5131 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
5133 /* A range without an upper bound is, naturally, unbounded.
5134 Since convert would have cropped a very large value, use
5135 the max value for the destination type. */
5137 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
5138 : TYPE_MAX_VALUE (arg0_type
);
5140 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
5141 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
5142 fold_convert_loc (loc
, arg0_type
,
5144 build_int_cst (arg0_type
, 1));
5146 /* If the low bound is specified, "and" the range with the
5147 range for which the original unsigned value will be
5151 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
5152 1, fold_convert_loc (loc
, arg0_type
,
5157 in_p
= (n_in_p
== in_p
);
5161 /* Otherwise, "or" the range with the range of the input
5162 that will be interpreted as negative. */
5163 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
5164 1, fold_convert_loc (loc
, arg0_type
,
5169 in_p
= (in_p
!= n_in_p
);
5183 /* Given EXP, a logical expression, set the range it is testing into
5184 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5185 actually being tested. *PLOW and *PHIGH will be made of the same
5186 type as the returned expression. If EXP is not a comparison, we
5187 will most likely not be returning a useful value and range. Set
5188 *STRICT_OVERFLOW_P to true if the return value is only valid
5189 because signed overflow is undefined; otherwise, do not change
5190 *STRICT_OVERFLOW_P. */
5193 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
5194 bool *strict_overflow_p
)
5196 enum tree_code code
;
5197 tree arg0
, arg1
= NULL_TREE
;
5198 tree exp_type
, nexp
;
5201 location_t loc
= EXPR_LOCATION (exp
);
5203 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5204 and see if we can refine the range. Some of the cases below may not
5205 happen, but it doesn't seem worth worrying about this. We "continue"
5206 the outer loop when we've changed something; otherwise we "break"
5207 the switch, which will "break" the while. */
5210 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
5214 code
= TREE_CODE (exp
);
5215 exp_type
= TREE_TYPE (exp
);
5218 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
5220 if (TREE_OPERAND_LENGTH (exp
) > 0)
5221 arg0
= TREE_OPERAND (exp
, 0);
5222 if (TREE_CODE_CLASS (code
) == tcc_binary
5223 || TREE_CODE_CLASS (code
) == tcc_comparison
5224 || (TREE_CODE_CLASS (code
) == tcc_expression
5225 && TREE_OPERAND_LENGTH (exp
) > 1))
5226 arg1
= TREE_OPERAND (exp
, 1);
5228 if (arg0
== NULL_TREE
)
5231 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
5232 &high
, &in_p
, strict_overflow_p
);
5233 if (nexp
== NULL_TREE
)
5238 /* If EXP is a constant, we can evaluate whether this is true or false. */
5239 if (TREE_CODE (exp
) == INTEGER_CST
)
5241 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
5243 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5249 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5253 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5254 a bitwise check i.e. when
5255 LOW == 0xXX...X00...0
5256 HIGH == 0xXX...X11...1
5257 Return corresponding mask in MASK and stem in VALUE. */
5260 maskable_range_p (const_tree low
, const_tree high
, tree type
, tree
*mask
,
5263 if (TREE_CODE (low
) != INTEGER_CST
5264 || TREE_CODE (high
) != INTEGER_CST
)
5267 unsigned prec
= TYPE_PRECISION (type
);
5268 wide_int lo
= wi::to_wide (low
, prec
);
5269 wide_int hi
= wi::to_wide (high
, prec
);
5271 wide_int end_mask
= lo
^ hi
;
5272 if ((end_mask
& (end_mask
+ 1)) != 0
5273 || (lo
& end_mask
) != 0)
5276 wide_int stem_mask
= ~end_mask
;
5277 wide_int stem
= lo
& stem_mask
;
5278 if (stem
!= (hi
& stem_mask
))
5281 *mask
= wide_int_to_tree (type
, stem_mask
);
5282 *value
= wide_int_to_tree (type
, stem
);
5287 /* Helper routine for build_range_check and match.pd. Return the type to
5288 perform the check or NULL if it shouldn't be optimized. */
5291 range_check_type (tree etype
)
5293 /* First make sure that arithmetics in this type is valid, then make sure
5294 that it wraps around. */
5295 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
5296 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
), 1);
5298 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_UNSIGNED (etype
))
5300 tree utype
, minv
, maxv
;
5302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5303 for the type in question, as we rely on this here. */
5304 utype
= unsigned_type_for (etype
);
5305 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
5306 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
5307 build_int_cst (TREE_TYPE (maxv
), 1), 1);
5308 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
5310 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
5316 else if (POINTER_TYPE_P (etype
))
5317 etype
= unsigned_type_for (etype
);
5321 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5322 type, TYPE, return an expression to test if EXP is in (or out of, depending
5323 on IN_P) the range. Return 0 if the test couldn't be created. */
5326 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
5327 tree low
, tree high
)
5329 tree etype
= TREE_TYPE (exp
), mask
, value
;
5331 /* Disable this optimization for function pointer expressions
5332 on targets that require function pointer canonicalization. */
5333 if (targetm
.have_canonicalize_funcptr_for_compare ()
5334 && POINTER_TYPE_P (etype
)
5335 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype
)))
5340 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
5342 return invert_truthvalue_loc (loc
, value
);
5347 if (low
== 0 && high
== 0)
5348 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
5351 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
5352 fold_convert_loc (loc
, etype
, high
));
5355 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
5356 fold_convert_loc (loc
, etype
, low
));
5358 if (operand_equal_p (low
, high
, 0))
5359 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
5360 fold_convert_loc (loc
, etype
, low
));
5362 if (TREE_CODE (exp
) == BIT_AND_EXPR
5363 && maskable_range_p (low
, high
, etype
, &mask
, &value
))
5364 return fold_build2_loc (loc
, EQ_EXPR
, type
,
5365 fold_build2_loc (loc
, BIT_AND_EXPR
, etype
,
5369 if (integer_zerop (low
))
5371 if (! TYPE_UNSIGNED (etype
))
5373 etype
= unsigned_type_for (etype
);
5374 high
= fold_convert_loc (loc
, etype
, high
);
5375 exp
= fold_convert_loc (loc
, etype
, exp
);
5377 return build_range_check (loc
, type
, exp
, 1, 0, high
);
5380 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5381 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
5383 int prec
= TYPE_PRECISION (etype
);
5385 if (wi::mask
<widest_int
> (prec
- 1, false) == wi::to_widest (high
))
5387 if (TYPE_UNSIGNED (etype
))
5389 tree signed_etype
= signed_type_for (etype
);
5390 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
5392 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
5394 etype
= signed_etype
;
5395 exp
= fold_convert_loc (loc
, etype
, exp
);
5397 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
5398 build_int_cst (etype
, 0));
5402 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5403 This requires wrap-around arithmetics for the type of the expression. */
5404 etype
= range_check_type (etype
);
5405 if (etype
== NULL_TREE
)
5408 high
= fold_convert_loc (loc
, etype
, high
);
5409 low
= fold_convert_loc (loc
, etype
, low
);
5410 exp
= fold_convert_loc (loc
, etype
, exp
);
5412 value
= const_binop (MINUS_EXPR
, high
, low
);
5414 if (value
!= 0 && !TREE_OVERFLOW (value
))
5415 return build_range_check (loc
, type
,
5416 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
5417 1, build_int_cst (etype
, 0), value
);
5422 /* Return the predecessor of VAL in its type, handling the infinite case. */
5425 range_predecessor (tree val
)
5427 tree type
= TREE_TYPE (val
);
5429 if (INTEGRAL_TYPE_P (type
)
5430 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
5433 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
5434 build_int_cst (TREE_TYPE (val
), 1), 0);
5437 /* Return the successor of VAL in its type, handling the infinite case. */
5440 range_successor (tree val
)
5442 tree type
= TREE_TYPE (val
);
5444 if (INTEGRAL_TYPE_P (type
)
5445 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
5448 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
5449 build_int_cst (TREE_TYPE (val
), 1), 0);
5452 /* Given two ranges, see if we can merge them into one. Return 1 if we
5453 can, 0 if we can't. Set the output range into the specified parameters. */
5456 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
5457 tree high0
, int in1_p
, tree low1
, tree high1
)
5465 int lowequal
= ((low0
== 0 && low1
== 0)
5466 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5467 low0
, 0, low1
, 0)));
5468 int highequal
= ((high0
== 0 && high1
== 0)
5469 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5470 high0
, 1, high1
, 1)));
5472 /* Make range 0 be the range that starts first, or ends last if they
5473 start at the same value. Swap them if it isn't. */
5474 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5477 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5478 high1
, 1, high0
, 1))))
5480 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
5481 tem
= low0
, low0
= low1
, low1
= tem
;
5482 tem
= high0
, high0
= high1
, high1
= tem
;
5485 /* If the second range is != high1 where high1 is the type maximum of
5486 the type, try first merging with < high1 range. */
5489 && TREE_CODE (low1
) == INTEGER_CST
5490 && (TREE_CODE (TREE_TYPE (low1
)) == INTEGER_TYPE
5491 || (TREE_CODE (TREE_TYPE (low1
)) == ENUMERAL_TYPE
5492 && known_eq (TYPE_PRECISION (TREE_TYPE (low1
)),
5493 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1
))))))
5494 && operand_equal_p (low1
, high1
, 0))
5496 if (tree_int_cst_equal (low1
, TYPE_MAX_VALUE (TREE_TYPE (low1
)))
5497 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5498 !in1_p
, NULL_TREE
, range_predecessor (low1
)))
5500 /* Similarly for the second range != low1 where low1 is the type minimum
5501 of the type, try first merging with > low1 range. */
5502 if (tree_int_cst_equal (low1
, TYPE_MIN_VALUE (TREE_TYPE (low1
)))
5503 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5504 !in1_p
, range_successor (low1
), NULL_TREE
))
5508 /* Now flag two cases, whether the ranges are disjoint or whether the
5509 second range is totally subsumed in the first. Note that the tests
5510 below are simplified by the ones above. */
5511 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
5512 high0
, 1, low1
, 0));
5513 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5514 high1
, 1, high0
, 1));
5516 /* We now have four cases, depending on whether we are including or
5517 excluding the two ranges. */
5520 /* If they don't overlap, the result is false. If the second range
5521 is a subset it is the result. Otherwise, the range is from the start
5522 of the second to the end of the first. */
5524 in_p
= 0, low
= high
= 0;
5526 in_p
= 1, low
= low1
, high
= high1
;
5528 in_p
= 1, low
= low1
, high
= high0
;
5531 else if (in0_p
&& ! in1_p
)
5533 /* If they don't overlap, the result is the first range. If they are
5534 equal, the result is false. If the second range is a subset of the
5535 first, and the ranges begin at the same place, we go from just after
5536 the end of the second range to the end of the first. If the second
5537 range is not a subset of the first, or if it is a subset and both
5538 ranges end at the same place, the range starts at the start of the
5539 first range and ends just before the second range.
5540 Otherwise, we can't describe this as a single range. */
5542 in_p
= 1, low
= low0
, high
= high0
;
5543 else if (lowequal
&& highequal
)
5544 in_p
= 0, low
= high
= 0;
5545 else if (subset
&& lowequal
)
5547 low
= range_successor (high1
);
5552 /* We are in the weird situation where high0 > high1 but
5553 high1 has no successor. Punt. */
5557 else if (! subset
|| highequal
)
5560 high
= range_predecessor (low1
);
5564 /* low0 < low1 but low1 has no predecessor. Punt. */
5572 else if (! in0_p
&& in1_p
)
5574 /* If they don't overlap, the result is the second range. If the second
5575 is a subset of the first, the result is false. Otherwise,
5576 the range starts just after the first range and ends at the
5577 end of the second. */
5579 in_p
= 1, low
= low1
, high
= high1
;
5580 else if (subset
|| highequal
)
5581 in_p
= 0, low
= high
= 0;
5584 low
= range_successor (high0
);
5589 /* high1 > high0 but high0 has no successor. Punt. */
5597 /* The case where we are excluding both ranges. Here the complex case
5598 is if they don't overlap. In that case, the only time we have a
5599 range is if they are adjacent. If the second is a subset of the
5600 first, the result is the first. Otherwise, the range to exclude
5601 starts at the beginning of the first range and ends at the end of the
5605 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5606 range_successor (high0
),
5608 in_p
= 0, low
= low0
, high
= high1
;
5611 /* Canonicalize - [min, x] into - [-, x]. */
5612 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5613 switch (TREE_CODE (TREE_TYPE (low0
)))
5616 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0
)),
5618 (TYPE_MODE (TREE_TYPE (low0
)))))
5622 if (tree_int_cst_equal (low0
,
5623 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5627 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5628 && integer_zerop (low0
))
5635 /* Canonicalize - [x, max] into - [x, -]. */
5636 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5637 switch (TREE_CODE (TREE_TYPE (high1
)))
5640 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1
)),
5642 (TYPE_MODE (TREE_TYPE (high1
)))))
5646 if (tree_int_cst_equal (high1
,
5647 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5651 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5652 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5654 build_int_cst (TREE_TYPE (high1
), 1),
5662 /* The ranges might be also adjacent between the maximum and
5663 minimum values of the given type. For
5664 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5665 return + [x + 1, y - 1]. */
5666 if (low0
== 0 && high1
== 0)
5668 low
= range_successor (high0
);
5669 high
= range_predecessor (low1
);
5670 if (low
== 0 || high
== 0)
5680 in_p
= 0, low
= low0
, high
= high0
;
5682 in_p
= 0, low
= low0
, high
= high1
;
5685 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5690 /* Subroutine of fold, looking inside expressions of the form
5691 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5692 of the COND_EXPR. This function is being used also to optimize
5693 A op B ? C : A, by reversing the comparison first.
5695 Return a folded expression whose code is not a COND_EXPR
5696 anymore, or NULL_TREE if no folding opportunity is found. */
5699 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5700 tree arg0
, tree arg1
, tree arg2
)
5702 enum tree_code comp_code
= TREE_CODE (arg0
);
5703 tree arg00
= TREE_OPERAND (arg0
, 0);
5704 tree arg01
= TREE_OPERAND (arg0
, 1);
5705 tree arg1_type
= TREE_TYPE (arg1
);
5711 /* If we have A op 0 ? A : -A, consider applying the following
5714 A == 0? A : -A same as -A
5715 A != 0? A : -A same as A
5716 A >= 0? A : -A same as abs (A)
5717 A > 0? A : -A same as abs (A)
5718 A <= 0? A : -A same as -abs (A)
5719 A < 0? A : -A same as -abs (A)
5721 None of these transformations work for modes with signed
5722 zeros. If A is +/-0, the first two transformations will
5723 change the sign of the result (from +0 to -0, or vice
5724 versa). The last four will fix the sign of the result,
5725 even though the original expressions could be positive or
5726 negative, depending on the sign of A.
5728 Note that all these transformations are correct if A is
5729 NaN, since the two alternatives (A and -A) are also NaNs. */
5730 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5731 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5732 ? real_zerop (arg01
)
5733 : integer_zerop (arg01
))
5734 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5735 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5736 /* In the case that A is of the form X-Y, '-A' (arg2) may
5737 have already been folded to Y-X, check for that. */
5738 || (TREE_CODE (arg1
) == MINUS_EXPR
5739 && TREE_CODE (arg2
) == MINUS_EXPR
5740 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5741 TREE_OPERAND (arg2
, 1), 0)
5742 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5743 TREE_OPERAND (arg2
, 0), 0))))
5748 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5749 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5752 return fold_convert_loc (loc
, type
, arg1
);
5755 if (flag_trapping_math
)
5760 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5762 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5763 return fold_convert_loc (loc
, type
, tem
);
5766 if (flag_trapping_math
)
5771 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5773 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
5774 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
5776 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5777 is not, invokes UB both in abs and in the negation of it.
5778 So, use ABSU_EXPR instead. */
5779 tree utype
= unsigned_type_for (TREE_TYPE (arg1
));
5780 tem
= fold_build1_loc (loc
, ABSU_EXPR
, utype
, arg1
);
5781 tem
= negate_expr (tem
);
5782 return fold_convert_loc (loc
, type
, tem
);
5786 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5787 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5790 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5794 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5795 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5796 both transformations are correct when A is NaN: A != 0
5797 is then true, and A == 0 is false. */
5799 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5800 && integer_zerop (arg01
) && integer_zerop (arg2
))
5802 if (comp_code
== NE_EXPR
)
5803 return fold_convert_loc (loc
, type
, arg1
);
5804 else if (comp_code
== EQ_EXPR
)
5805 return build_zero_cst (type
);
5808 /* Try some transformations of A op B ? A : B.
5810 A == B? A : B same as B
5811 A != B? A : B same as A
5812 A >= B? A : B same as max (A, B)
5813 A > B? A : B same as max (B, A)
5814 A <= B? A : B same as min (A, B)
5815 A < B? A : B same as min (B, A)
5817 As above, these transformations don't work in the presence
5818 of signed zeros. For example, if A and B are zeros of
5819 opposite sign, the first two transformations will change
5820 the sign of the result. In the last four, the original
5821 expressions give different results for (A=+0, B=-0) and
5822 (A=-0, B=+0), but the transformed expressions do not.
5824 The first two transformations are correct if either A or B
5825 is a NaN. In the first transformation, the condition will
5826 be false, and B will indeed be chosen. In the case of the
5827 second transformation, the condition A != B will be true,
5828 and A will be chosen.
5830 The conversions to max() and min() are not correct if B is
5831 a number and A is not. The conditions in the original
5832 expressions will be false, so all four give B. The min()
5833 and max() versions would give a NaN instead. */
5834 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5835 && operand_equal_for_comparison_p (arg01
, arg2
)
5836 /* Avoid these transformations if the COND_EXPR may be used
5837 as an lvalue in the C++ front-end. PR c++/19199. */
5839 || VECTOR_TYPE_P (type
)
5840 || (! lang_GNU_CXX ()
5841 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5842 || ! maybe_lvalue_p (arg1
)
5843 || ! maybe_lvalue_p (arg2
)))
5845 tree comp_op0
= arg00
;
5846 tree comp_op1
= arg01
;
5847 tree comp_type
= TREE_TYPE (comp_op0
);
5852 return fold_convert_loc (loc
, type
, arg2
);
5854 return fold_convert_loc (loc
, type
, arg1
);
5859 /* In C++ a ?: expression can be an lvalue, so put the
5860 operand which will be used if they are equal first
5861 so that we can convert this back to the
5862 corresponding COND_EXPR. */
5863 if (!HONOR_NANS (arg1
))
5865 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5866 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5867 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5868 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5869 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5870 comp_op1
, comp_op0
);
5871 return fold_convert_loc (loc
, type
, tem
);
5878 if (!HONOR_NANS (arg1
))
5880 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5881 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5882 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5883 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5884 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5885 comp_op1
, comp_op0
);
5886 return fold_convert_loc (loc
, type
, tem
);
5890 if (!HONOR_NANS (arg1
))
5891 return fold_convert_loc (loc
, type
, arg2
);
5894 if (!HONOR_NANS (arg1
))
5895 return fold_convert_loc (loc
, type
, arg1
);
5898 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5908 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5909 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5910 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5914 /* EXP is some logical combination of boolean tests. See if we can
5915 merge it into some range test. Return the new tree if so. */
5918 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5921 int or_op
= (code
== TRUTH_ORIF_EXPR
5922 || code
== TRUTH_OR_EXPR
);
5923 int in0_p
, in1_p
, in_p
;
5924 tree low0
, low1
, low
, high0
, high1
, high
;
5925 bool strict_overflow_p
= false;
5927 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5928 "when simplifying range test");
5930 if (!INTEGRAL_TYPE_P (type
))
5933 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5934 /* If op0 is known true or false and this is a short-circuiting
5935 operation we must not merge with op1 since that makes side-effects
5936 unconditional. So special-case this. */
5938 && ((code
== TRUTH_ORIF_EXPR
&& in0_p
)
5939 || (code
== TRUTH_ANDIF_EXPR
&& !in0_p
)))
5941 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5943 /* If this is an OR operation, invert both sides; we will invert
5944 again at the end. */
5946 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5948 /* If both expressions are the same, if we can merge the ranges, and we
5949 can build the range test, return it or it inverted. If one of the
5950 ranges is always true or always false, consider it to be the same
5951 expression as the other. */
5952 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5953 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5955 && (tem
= (build_range_check (loc
, type
,
5957 : rhs
!= 0 ? rhs
: integer_zero_node
,
5958 in_p
, low
, high
))) != 0)
5960 if (strict_overflow_p
)
5961 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5962 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5965 /* On machines where the branch cost is expensive, if this is a
5966 short-circuited branch and the underlying object on both sides
5967 is the same, make a non-short-circuit operation. */
5968 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
5969 if (param_logical_op_non_short_circuit
!= -1)
5970 logical_op_non_short_circuit
5971 = param_logical_op_non_short_circuit
;
5972 if (logical_op_non_short_circuit
5973 && !flag_sanitize_coverage
5974 && lhs
!= 0 && rhs
!= 0
5975 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5976 && operand_equal_p (lhs
, rhs
, 0))
5978 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5979 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5980 which cases we can't do this. */
5981 if (simple_operand_p (lhs
))
5982 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5983 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5986 else if (!lang_hooks
.decls
.global_bindings_p ()
5987 && !CONTAINS_PLACEHOLDER_P (lhs
))
5989 tree common
= save_expr (lhs
);
5991 if ((lhs
= build_range_check (loc
, type
, common
,
5992 or_op
? ! in0_p
: in0_p
,
5994 && (rhs
= build_range_check (loc
, type
, common
,
5995 or_op
? ! in1_p
: in1_p
,
5998 if (strict_overflow_p
)
5999 fold_overflow_warning (warnmsg
,
6000 WARN_STRICT_OVERFLOW_COMPARISON
);
6001 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6002 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6011 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6012 bit value. Arrange things so the extra bits will be set to zero if and
6013 only if C is signed-extended to its full width. If MASK is nonzero,
6014 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6017 unextend (tree c
, int p
, int unsignedp
, tree mask
)
6019 tree type
= TREE_TYPE (c
);
6020 int modesize
= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type
));
6023 if (p
== modesize
|| unsignedp
)
6026 /* We work by getting just the sign bit into the low-order bit, then
6027 into the high-order bit, then sign-extend. We then XOR that value
6029 temp
= build_int_cst (TREE_TYPE (c
),
6030 wi::extract_uhwi (wi::to_wide (c
), p
- 1, 1));
6032 /* We must use a signed type in order to get an arithmetic right shift.
6033 However, we must also avoid introducing accidental overflows, so that
6034 a subsequent call to integer_zerop will work. Hence we must
6035 do the type conversion here. At this point, the constant is either
6036 zero or one, and the conversion to a signed type can never overflow.
6037 We could get an overflow if this conversion is done anywhere else. */
6038 if (TYPE_UNSIGNED (type
))
6039 temp
= fold_convert (signed_type_for (type
), temp
);
6041 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
6042 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
6044 temp
= const_binop (BIT_AND_EXPR
, temp
,
6045 fold_convert (TREE_TYPE (c
), mask
));
6046 /* If necessary, convert the type back to match the type of C. */
6047 if (TYPE_UNSIGNED (type
))
6048 temp
= fold_convert (type
, temp
);
6050 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
6053 /* For an expression that has the form
6057 we can drop one of the inner expressions and simplify to
6061 LOC is the location of the resulting expression. OP is the inner
6062 logical operation; the left-hand side in the examples above, while CMPOP
6063 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6064 removing a condition that guards another, as in
6065 (A != NULL && A->...) || A == NULL
6066 which we must not transform. If RHS_ONLY is true, only eliminate the
6067 right-most operand of the inner logical operation. */
6070 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
6073 tree type
= TREE_TYPE (cmpop
);
6074 enum tree_code code
= TREE_CODE (cmpop
);
6075 enum tree_code truthop_code
= TREE_CODE (op
);
6076 tree lhs
= TREE_OPERAND (op
, 0);
6077 tree rhs
= TREE_OPERAND (op
, 1);
6078 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
6079 enum tree_code rhs_code
= TREE_CODE (rhs
);
6080 enum tree_code lhs_code
= TREE_CODE (lhs
);
6081 enum tree_code inv_code
;
6083 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
6086 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
6089 if (rhs_code
== truthop_code
)
6091 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
6092 if (newrhs
!= NULL_TREE
)
6095 rhs_code
= TREE_CODE (rhs
);
6098 if (lhs_code
== truthop_code
&& !rhs_only
)
6100 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
6101 if (newlhs
!= NULL_TREE
)
6104 lhs_code
= TREE_CODE (lhs
);
6108 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
6109 if (inv_code
== rhs_code
6110 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6111 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6113 if (!rhs_only
&& inv_code
== lhs_code
6114 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6115 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6117 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
6118 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
6123 /* Find ways of folding logical expressions of LHS and RHS:
6124 Try to merge two comparisons to the same innermost item.
6125 Look for range tests like "ch >= '0' && ch <= '9'".
6126 Look for combinations of simple terms on machines with expensive branches
6127 and evaluate the RHS unconditionally.
6129 For example, if we have p->a == 2 && p->b == 4 and we can make an
6130 object large enough to span both A and B, we can do this with a comparison
6131 against the object ANDed with the a mask.
6133 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6134 operations to do this with one comparison.
6136 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6137 function and the one above.
6139 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6140 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6142 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6145 We return the simplified tree or 0 if no optimization is possible. */
6148 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
6151 /* If this is the "or" of two comparisons, we can do something if
6152 the comparisons are NE_EXPR. If this is the "and", we can do something
6153 if the comparisons are EQ_EXPR. I.e.,
6154 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6156 WANTED_CODE is this operation code. For single bit fields, we can
6157 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6158 comparison for one-bit fields. */
6160 enum tree_code wanted_code
;
6161 enum tree_code lcode
, rcode
;
6162 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
6163 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
6164 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
6165 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
6166 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
6167 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
6168 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
6169 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
6170 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
6171 scalar_int_mode lnmode
, rnmode
;
6172 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
6173 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
6174 tree l_const
, r_const
;
6175 tree lntype
, rntype
, result
;
6176 HOST_WIDE_INT first_bit
, end_bit
;
6179 /* Start by getting the comparison codes. Fail if anything is volatile.
6180 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6181 it were surrounded with a NE_EXPR. */
6183 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
6186 lcode
= TREE_CODE (lhs
);
6187 rcode
= TREE_CODE (rhs
);
6189 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
6191 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
6192 build_int_cst (TREE_TYPE (lhs
), 0));
6196 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
6198 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
6199 build_int_cst (TREE_TYPE (rhs
), 0));
6203 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
6204 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
6207 ll_arg
= TREE_OPERAND (lhs
, 0);
6208 lr_arg
= TREE_OPERAND (lhs
, 1);
6209 rl_arg
= TREE_OPERAND (rhs
, 0);
6210 rr_arg
= TREE_OPERAND (rhs
, 1);
6212 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6213 if (simple_operand_p (ll_arg
)
6214 && simple_operand_p (lr_arg
))
6216 if (operand_equal_p (ll_arg
, rl_arg
, 0)
6217 && operand_equal_p (lr_arg
, rr_arg
, 0))
6219 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
6220 truth_type
, ll_arg
, lr_arg
);
6224 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
6225 && operand_equal_p (lr_arg
, rl_arg
, 0))
6227 result
= combine_comparisons (loc
, code
, lcode
,
6228 swap_tree_comparison (rcode
),
6229 truth_type
, ll_arg
, lr_arg
);
6235 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
6236 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
6238 /* If the RHS can be evaluated unconditionally and its operands are
6239 simple, it wins to evaluate the RHS unconditionally on machines
6240 with expensive branches. In this case, this isn't a comparison
6241 that can be merged. */
6243 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
6245 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
6246 && simple_operand_p (rl_arg
)
6247 && simple_operand_p (rr_arg
))
6249 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6250 if (code
== TRUTH_OR_EXPR
6251 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
6252 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
6253 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6254 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6255 return build2_loc (loc
, NE_EXPR
, truth_type
,
6256 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6258 build_int_cst (TREE_TYPE (ll_arg
), 0));
6260 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6261 if (code
== TRUTH_AND_EXPR
6262 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
6263 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
6264 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6265 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6266 return build2_loc (loc
, EQ_EXPR
, truth_type
,
6267 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6269 build_int_cst (TREE_TYPE (ll_arg
), 0));
6272 /* See if the comparisons can be merged. Then get all the parameters for
6275 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
6276 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
6279 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
6281 ll_inner
= decode_field_reference (loc
, &ll_arg
,
6282 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
6283 &ll_unsignedp
, &ll_reversep
, &volatilep
,
6284 &ll_mask
, &ll_and_mask
);
6285 lr_inner
= decode_field_reference (loc
, &lr_arg
,
6286 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
6287 &lr_unsignedp
, &lr_reversep
, &volatilep
,
6288 &lr_mask
, &lr_and_mask
);
6289 rl_inner
= decode_field_reference (loc
, &rl_arg
,
6290 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
6291 &rl_unsignedp
, &rl_reversep
, &volatilep
,
6292 &rl_mask
, &rl_and_mask
);
6293 rr_inner
= decode_field_reference (loc
, &rr_arg
,
6294 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
6295 &rr_unsignedp
, &rr_reversep
, &volatilep
,
6296 &rr_mask
, &rr_and_mask
);
6298 /* It must be true that the inner operation on the lhs of each
6299 comparison must be the same if we are to be able to do anything.
6300 Then see if we have constants. If not, the same must be true for
6303 || ll_reversep
!= rl_reversep
6304 || ll_inner
== 0 || rl_inner
== 0
6305 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
6308 if (TREE_CODE (lr_arg
) == INTEGER_CST
6309 && TREE_CODE (rr_arg
) == INTEGER_CST
)
6311 l_const
= lr_arg
, r_const
= rr_arg
;
6312 lr_reversep
= ll_reversep
;
6314 else if (lr_reversep
!= rr_reversep
6315 || lr_inner
== 0 || rr_inner
== 0
6316 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
6319 l_const
= r_const
= 0;
6321 /* If either comparison code is not correct for our logical operation,
6322 fail. However, we can convert a one-bit comparison against zero into
6323 the opposite comparison against that bit being set in the field. */
6325 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
6326 if (lcode
!= wanted_code
)
6328 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
6330 /* Make the left operand unsigned, since we are only interested
6331 in the value of one bit. Otherwise we are doing the wrong
6340 /* This is analogous to the code for l_const above. */
6341 if (rcode
!= wanted_code
)
6343 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
6352 /* See if we can find a mode that contains both fields being compared on
6353 the left. If we can't, fail. Otherwise, update all constants and masks
6354 to be relative to a field of that size. */
6355 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
6356 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
6357 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6358 TYPE_ALIGN (TREE_TYPE (ll_inner
)), BITS_PER_WORD
,
6359 volatilep
, &lnmode
))
6362 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
6363 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
6364 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
6365 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
6367 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6369 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
6370 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
6373 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
6374 size_int (xll_bitpos
));
6375 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
6376 size_int (xrl_bitpos
));
6380 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
6381 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
6382 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
6383 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
6384 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6387 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6389 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6394 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
6395 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
6396 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
6397 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
6398 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6401 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6403 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6407 /* If the right sides are not constant, do the same for it. Also,
6408 disallow this optimization if a size, signedness or storage order
6409 mismatch occurs between the left and right sides. */
6412 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
6413 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
6414 || ll_reversep
!= lr_reversep
6415 /* Make sure the two fields on the right
6416 correspond to the left without being swapped. */
6417 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
6420 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
6421 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
6422 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6423 TYPE_ALIGN (TREE_TYPE (lr_inner
)), BITS_PER_WORD
,
6424 volatilep
, &rnmode
))
6427 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
6428 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
6429 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
6430 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
6432 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6434 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
6435 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
6438 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6440 size_int (xlr_bitpos
));
6441 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6443 size_int (xrr_bitpos
));
6445 /* Make a mask that corresponds to both fields being compared.
6446 Do this for both items being compared. If the operands are the
6447 same size and the bits being compared are in the same position
6448 then we can do this by masking both and comparing the masked
6450 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6451 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
6452 if (lnbitsize
== rnbitsize
6453 && xll_bitpos
== xlr_bitpos
6457 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6458 lntype
, lnbitsize
, lnbitpos
,
6459 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6460 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6461 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
6463 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
6464 rntype
, rnbitsize
, rnbitpos
,
6465 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
6466 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
6467 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
6469 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6472 /* There is still another way we can do something: If both pairs of
6473 fields being compared are adjacent, we may be able to make a wider
6474 field containing them both.
6476 Note that we still must mask the lhs/rhs expressions. Furthermore,
6477 the mask must be shifted to account for the shift done by
6478 make_bit_field_ref. */
6479 if (((ll_bitsize
+ ll_bitpos
== rl_bitpos
6480 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
6481 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
6482 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
6490 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
6491 ll_bitsize
+ rl_bitsize
,
6492 MIN (ll_bitpos
, rl_bitpos
),
6493 ll_unsignedp
, ll_reversep
);
6494 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
6495 lr_bitsize
+ rr_bitsize
,
6496 MIN (lr_bitpos
, rr_bitpos
),
6497 lr_unsignedp
, lr_reversep
);
6499 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
6500 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
6501 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
6502 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
6504 /* Convert to the smaller type before masking out unwanted bits. */
6506 if (lntype
!= rntype
)
6508 if (lnbitsize
> rnbitsize
)
6510 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
6511 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
6514 else if (lnbitsize
< rnbitsize
)
6516 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
6517 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
6522 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
6523 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
6525 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
6526 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
6528 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6534 /* Handle the case of comparisons with constants. If there is something in
6535 common between the masks, those bits of the constants must be the same.
6536 If not, the condition is always false. Test for this to avoid generating
6537 incorrect code below. */
6538 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
6539 if (! integer_zerop (result
)
6540 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
6541 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
6543 if (wanted_code
== NE_EXPR
)
6545 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6546 return constant_boolean_node (true, truth_type
);
6550 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6551 return constant_boolean_node (false, truth_type
);
6558 /* Construct the expression we will return. First get the component
6559 reference we will make. Unless the mask is all ones the width of
6560 that field, perform the mask operation. Then compare with the
6562 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6563 lntype
, lnbitsize
, lnbitpos
,
6564 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6566 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6567 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6568 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6570 return build2_loc (loc
, wanted_code
, truth_type
, result
,
6571 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
6574 /* T is an integer expression that is being multiplied, divided, or taken a
6575 modulus (CODE says which and what kind of divide or modulus) by a
6576 constant C. See if we can eliminate that operation by folding it with
6577 other operations already in T. WIDE_TYPE, if non-null, is a type that
6578 should be used for the computation if wider than our type.
6580 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6581 (X * 2) + (Y * 4). We must, however, be assured that either the original
6582 expression would not overflow or that overflow is undefined for the type
6583 in the language in question.
6585 If we return a non-null expression, it is an equivalent form of the
6586 original computation, but need not be in the original type.
6588 We set *STRICT_OVERFLOW_P to true if the return values depends on
6589 signed overflow being undefined. Otherwise we do not change
6590 *STRICT_OVERFLOW_P. */
6593 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6594 bool *strict_overflow_p
)
6596 /* To avoid exponential search depth, refuse to allow recursion past
6597 three levels. Beyond that (1) it's highly unlikely that we'll find
6598 something interesting and (2) we've probably processed it before
6599 when we built the inner expression. */
6608 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6615 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6616 bool *strict_overflow_p
)
6618 tree type
= TREE_TYPE (t
);
6619 enum tree_code tcode
= TREE_CODE (t
);
6620 tree ctype
= (wide_type
!= 0
6621 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type
))
6622 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
)))
6623 ? wide_type
: type
);
6625 int same_p
= tcode
== code
;
6626 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6627 bool sub_strict_overflow_p
;
6629 /* Don't deal with constants of zero here; they confuse the code below. */
6630 if (integer_zerop (c
))
6633 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6634 op0
= TREE_OPERAND (t
, 0);
6636 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6637 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6639 /* Note that we need not handle conditional operations here since fold
6640 already handles those cases. So just do arithmetic here. */
6644 /* For a constant, we can always simplify if we are a multiply
6645 or (for divide and modulus) if it is a multiple of our constant. */
6646 if (code
== MULT_EXPR
6647 || wi::multiple_of_p (wi::to_wide (t
), wi::to_wide (c
),
6650 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6651 fold_convert (ctype
, c
));
6652 /* If the multiplication overflowed, we lost information on it.
6653 See PR68142 and PR69845. */
6654 if (TREE_OVERFLOW (tem
))
6660 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6661 /* If op0 is an expression ... */
6662 if ((COMPARISON_CLASS_P (op0
)
6663 || UNARY_CLASS_P (op0
)
6664 || BINARY_CLASS_P (op0
)
6665 || VL_EXP_CLASS_P (op0
)
6666 || EXPRESSION_CLASS_P (op0
))
6667 /* ... and has wrapping overflow, and its type is smaller
6668 than ctype, then we cannot pass through as widening. */
6669 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6670 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6671 && (TYPE_PRECISION (ctype
)
6672 > TYPE_PRECISION (TREE_TYPE (op0
))))
6673 /* ... or this is a truncation (t is narrower than op0),
6674 then we cannot pass through this narrowing. */
6675 || (TYPE_PRECISION (type
)
6676 < TYPE_PRECISION (TREE_TYPE (op0
)))
6677 /* ... or signedness changes for division or modulus,
6678 then we cannot pass through this conversion. */
6679 || (code
!= MULT_EXPR
6680 && (TYPE_UNSIGNED (ctype
)
6681 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6682 /* ... or has undefined overflow while the converted to
6683 type has not, we cannot do the operation in the inner type
6684 as that would introduce undefined overflow. */
6685 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6686 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6687 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6690 /* Pass the constant down and see if we can make a simplification. If
6691 we can, replace this expression with the inner simplification for
6692 possible later conversion to our or some other type. */
6693 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6694 && TREE_CODE (t2
) == INTEGER_CST
6695 && !TREE_OVERFLOW (t2
)
6696 && (t1
= extract_muldiv (op0
, t2
, code
,
6697 code
== MULT_EXPR
? ctype
: NULL_TREE
,
6698 strict_overflow_p
)) != 0)
6703 /* If widening the type changes it from signed to unsigned, then we
6704 must avoid building ABS_EXPR itself as unsigned. */
6705 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6707 tree cstype
= (*signed_type_for
) (ctype
);
6708 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6711 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6712 return fold_convert (ctype
, t1
);
6716 /* If the constant is negative, we cannot simplify this. */
6717 if (tree_int_cst_sgn (c
) == -1)
6721 /* For division and modulus, type can't be unsigned, as e.g.
6722 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6723 For signed types, even with wrapping overflow, this is fine. */
6724 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6726 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6728 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6731 case MIN_EXPR
: case MAX_EXPR
:
6732 /* If widening the type changes the signedness, then we can't perform
6733 this optimization as that changes the result. */
6734 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6737 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6738 sub_strict_overflow_p
= false;
6739 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6740 &sub_strict_overflow_p
)) != 0
6741 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6742 &sub_strict_overflow_p
)) != 0)
6744 if (tree_int_cst_sgn (c
) < 0)
6745 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6746 if (sub_strict_overflow_p
)
6747 *strict_overflow_p
= true;
6748 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6749 fold_convert (ctype
, t2
));
6753 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6754 /* If the second operand is constant, this is a multiplication
6755 or floor division, by a power of two, so we can treat it that
6756 way unless the multiplier or divisor overflows. Signed
6757 left-shift overflow is implementation-defined rather than
6758 undefined in C90, so do not convert signed left shift into
6760 if (TREE_CODE (op1
) == INTEGER_CST
6761 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6762 /* const_binop may not detect overflow correctly,
6763 so check for it explicitly here. */
6764 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
6766 && (t1
= fold_convert (ctype
,
6767 const_binop (LSHIFT_EXPR
, size_one_node
,
6769 && !TREE_OVERFLOW (t1
))
6770 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6771 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6773 fold_convert (ctype
, op0
),
6775 c
, code
, wide_type
, strict_overflow_p
);
6778 case PLUS_EXPR
: case MINUS_EXPR
:
6779 /* See if we can eliminate the operation on both sides. If we can, we
6780 can return a new PLUS or MINUS. If we can't, the only remaining
6781 cases where we can do anything are if the second operand is a
6783 sub_strict_overflow_p
= false;
6784 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6785 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6786 if (t1
!= 0 && t2
!= 0
6787 && TYPE_OVERFLOW_WRAPS (ctype
)
6788 && (code
== MULT_EXPR
6789 /* If not multiplication, we can only do this if both operands
6790 are divisible by c. */
6791 || (multiple_of_p (ctype
, op0
, c
)
6792 && multiple_of_p (ctype
, op1
, c
))))
6794 if (sub_strict_overflow_p
)
6795 *strict_overflow_p
= true;
6796 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6797 fold_convert (ctype
, t2
));
6800 /* If this was a subtraction, negate OP1 and set it to be an addition.
6801 This simplifies the logic below. */
6802 if (tcode
== MINUS_EXPR
)
6804 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6805 /* If OP1 was not easily negatable, the constant may be OP0. */
6806 if (TREE_CODE (op0
) == INTEGER_CST
)
6808 std::swap (op0
, op1
);
6813 if (TREE_CODE (op1
) != INTEGER_CST
)
6816 /* If either OP1 or C are negative, this optimization is not safe for
6817 some of the division and remainder types while for others we need
6818 to change the code. */
6819 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6821 if (code
== CEIL_DIV_EXPR
)
6822 code
= FLOOR_DIV_EXPR
;
6823 else if (code
== FLOOR_DIV_EXPR
)
6824 code
= CEIL_DIV_EXPR
;
6825 else if (code
!= MULT_EXPR
6826 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6830 /* If it's a multiply or a division/modulus operation of a multiple
6831 of our constant, do the operation and verify it doesn't overflow. */
6832 if (code
== MULT_EXPR
6833 || wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6836 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6837 fold_convert (ctype
, c
));
6838 /* We allow the constant to overflow with wrapping semantics. */
6840 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6846 /* If we have an unsigned type, we cannot widen the operation since it
6847 will change the result if the original computation overflowed. */
6848 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6851 /* The last case is if we are a multiply. In that case, we can
6852 apply the distributive law to commute the multiply and addition
6853 if the multiplication of the constants doesn't overflow
6854 and overflow is defined. With undefined overflow
6855 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6856 But fold_plusminus_mult_expr would factor back any power-of-two
6857 value so do not distribute in the first place in this case. */
6858 if (code
== MULT_EXPR
6859 && TYPE_OVERFLOW_WRAPS (ctype
)
6860 && !(tree_fits_shwi_p (c
) && pow2p_hwi (absu_hwi (tree_to_shwi (c
)))))
6861 return fold_build2 (tcode
, ctype
,
6862 fold_build2 (code
, ctype
,
6863 fold_convert (ctype
, op0
),
6864 fold_convert (ctype
, c
)),
6870 /* We have a special case here if we are doing something like
6871 (C * 8) % 4 since we know that's zero. */
6872 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6873 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6874 /* If the multiplication can overflow we cannot optimize this. */
6875 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6876 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6877 && wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6880 *strict_overflow_p
= true;
6881 return omit_one_operand (type
, integer_zero_node
, op0
);
6884 /* ... fall through ... */
6886 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6887 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6888 /* If we can extract our operation from the LHS, do so and return a
6889 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6890 do something only if the second operand is a constant. */
6892 && TYPE_OVERFLOW_WRAPS (ctype
)
6893 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6894 strict_overflow_p
)) != 0)
6895 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6896 fold_convert (ctype
, op1
));
6897 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6898 && TYPE_OVERFLOW_WRAPS (ctype
)
6899 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6900 strict_overflow_p
)) != 0)
6901 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6902 fold_convert (ctype
, t1
));
6903 else if (TREE_CODE (op1
) != INTEGER_CST
)
6906 /* If these are the same operation types, we can associate them
6907 assuming no overflow. */
6910 bool overflow_p
= false;
6911 wi::overflow_type overflow_mul
;
6912 signop sign
= TYPE_SIGN (ctype
);
6913 unsigned prec
= TYPE_PRECISION (ctype
);
6914 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
6915 wi::to_wide (c
, prec
),
6916 sign
, &overflow_mul
);
6917 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6919 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6922 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6923 wide_int_to_tree (ctype
, mul
));
6926 /* If these operations "cancel" each other, we have the main
6927 optimizations of this pass, which occur when either constant is a
6928 multiple of the other, in which case we replace this with either an
6929 operation or CODE or TCODE.
6931 If we have an unsigned type, we cannot do this since it will change
6932 the result if the original computation overflowed. */
6933 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6934 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6935 || (tcode
== MULT_EXPR
6936 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6937 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6938 && code
!= MULT_EXPR
)))
6940 if (wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6943 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6944 *strict_overflow_p
= true;
6945 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6946 fold_convert (ctype
,
6947 const_binop (TRUNC_DIV_EXPR
,
6950 else if (wi::multiple_of_p (wi::to_wide (c
), wi::to_wide (op1
),
6953 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6954 *strict_overflow_p
= true;
6955 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6956 fold_convert (ctype
,
6957 const_binop (TRUNC_DIV_EXPR
,
6970 /* Return a node which has the indicated constant VALUE (either 0 or
6971 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6972 and is of the indicated TYPE. */
6975 constant_boolean_node (bool value
, tree type
)
6977 if (type
== integer_type_node
)
6978 return value
? integer_one_node
: integer_zero_node
;
6979 else if (type
== boolean_type_node
)
6980 return value
? boolean_true_node
: boolean_false_node
;
6981 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6982 return build_vector_from_val (type
,
6983 build_int_cst (TREE_TYPE (type
),
6986 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6990 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6991 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6992 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6993 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6994 COND is the first argument to CODE; otherwise (as in the example
6995 given here), it is the second argument. TYPE is the type of the
6996 original expression. Return NULL_TREE if no simplification is
7000 fold_binary_op_with_conditional_arg (location_t loc
,
7001 enum tree_code code
,
7002 tree type
, tree op0
, tree op1
,
7003 tree cond
, tree arg
, int cond_first_p
)
7005 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
7006 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
7007 tree test
, true_value
, false_value
;
7008 tree lhs
= NULL_TREE
;
7009 tree rhs
= NULL_TREE
;
7010 enum tree_code cond_code
= COND_EXPR
;
7012 /* Do not move possibly trapping operations into the conditional as this
7013 pessimizes code and causes gimplification issues when applied late. */
7014 if (operation_could_trap_p (code
, FLOAT_TYPE_P (type
),
7015 ANY_INTEGRAL_TYPE_P (type
)
7016 && TYPE_OVERFLOW_TRAPS (type
), op1
))
7019 if (TREE_CODE (cond
) == COND_EXPR
7020 || TREE_CODE (cond
) == VEC_COND_EXPR
)
7022 test
= TREE_OPERAND (cond
, 0);
7023 true_value
= TREE_OPERAND (cond
, 1);
7024 false_value
= TREE_OPERAND (cond
, 2);
7025 /* If this operand throws an expression, then it does not make
7026 sense to try to perform a logical or arithmetic operation
7028 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
7030 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
7033 else if (!(TREE_CODE (type
) != VECTOR_TYPE
7034 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
7036 tree testtype
= TREE_TYPE (cond
);
7038 true_value
= constant_boolean_node (true, testtype
);
7039 false_value
= constant_boolean_node (false, testtype
);
7042 /* Detect the case of mixing vector and scalar types - bail out. */
7045 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
7046 cond_code
= VEC_COND_EXPR
;
7048 /* This transformation is only worthwhile if we don't have to wrap ARG
7049 in a SAVE_EXPR and the operation can be simplified without recursing
7050 on at least one of the branches once its pushed inside the COND_EXPR. */
7051 if (!TREE_CONSTANT (arg
)
7052 && (TREE_SIDE_EFFECTS (arg
)
7053 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
7054 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
7057 arg
= fold_convert_loc (loc
, arg_type
, arg
);
7060 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
7062 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
7064 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
7068 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
7070 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
7072 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
7075 /* Check that we have simplified at least one of the branches. */
7076 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
7079 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
7083 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7085 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7086 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7087 ADDEND is the same as X.
7089 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7090 and finite. The problematic cases are when X is zero, and its mode
7091 has signed zeros. In the case of rounding towards -infinity,
7092 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7093 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7096 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
7098 if (!real_zerop (addend
))
7101 /* Don't allow the fold with -fsignaling-nans. */
7102 if (HONOR_SNANS (type
))
7105 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7106 if (!HONOR_SIGNED_ZEROS (type
))
7109 /* There is no case that is safe for all rounding modes. */
7110 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
))
7113 /* In a vector or complex, we would need to check the sign of all zeros. */
7114 if (TREE_CODE (addend
) == VECTOR_CST
)
7115 addend
= uniform_vector_p (addend
);
7116 if (!addend
|| TREE_CODE (addend
) != REAL_CST
)
7119 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7120 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
7123 /* The mode has signed zeros, and we have to honor their sign.
7124 In this situation, there is only one case we can return true for.
7125 X - 0 is the same as X with default rounding. */
7129 /* Subroutine of match.pd that optimizes comparisons of a division by
7130 a nonzero integer constant against an integer constant, i.e.
7133 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7134 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7137 fold_div_compare (enum tree_code code
, tree c1
, tree c2
, tree
*lo
,
7138 tree
*hi
, bool *neg_overflow
)
7140 tree prod
, tmp
, type
= TREE_TYPE (c1
);
7141 signop sign
= TYPE_SIGN (type
);
7142 wi::overflow_type overflow
;
7144 /* We have to do this the hard way to detect unsigned overflow.
7145 prod = int_const_binop (MULT_EXPR, c1, c2); */
7146 wide_int val
= wi::mul (wi::to_wide (c1
), wi::to_wide (c2
), sign
, &overflow
);
7147 prod
= force_fit_type (type
, val
, -1, overflow
);
7148 *neg_overflow
= false;
7150 if (sign
== UNSIGNED
)
7152 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7155 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7156 val
= wi::add (wi::to_wide (prod
), wi::to_wide (tmp
), sign
, &overflow
);
7157 *hi
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (prod
));
7159 else if (tree_int_cst_sgn (c1
) >= 0)
7161 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7162 switch (tree_int_cst_sgn (c2
))
7165 *neg_overflow
= true;
7166 *lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7171 *lo
= fold_negate_const (tmp
, type
);
7176 *hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7186 /* A negative divisor reverses the relational operators. */
7187 code
= swap_tree_comparison (code
);
7189 tmp
= int_const_binop (PLUS_EXPR
, c1
, build_int_cst (type
, 1));
7190 switch (tree_int_cst_sgn (c2
))
7193 *hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7198 *hi
= fold_negate_const (tmp
, type
);
7203 *neg_overflow
= true;
7204 *lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7213 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
7216 if (TREE_OVERFLOW (*lo
)
7217 || operand_equal_p (*lo
, TYPE_MIN_VALUE (type
), 0))
7219 if (TREE_OVERFLOW (*hi
)
7220 || operand_equal_p (*hi
, TYPE_MAX_VALUE (type
), 0))
7227 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7228 equality/inequality test, then return a simplified form of the test
7229 using a sign testing. Otherwise return NULL. TYPE is the desired
7233 fold_single_bit_test_into_sign_test (location_t loc
,
7234 enum tree_code code
, tree arg0
, tree arg1
,
7237 /* If this is testing a single bit, we can optimize the test. */
7238 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7239 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7240 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7242 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7243 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7244 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
7246 if (arg00
!= NULL_TREE
7247 /* This is only a win if casting to a signed type is cheap,
7248 i.e. when arg00's type is not a partial mode. */
7249 && type_has_mode_precision_p (TREE_TYPE (arg00
)))
7251 tree stype
= signed_type_for (TREE_TYPE (arg00
));
7252 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
7254 fold_convert_loc (loc
, stype
, arg00
),
7255 build_int_cst (stype
, 0));
7262 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7263 equality/inequality test, then return a simplified form of
7264 the test using shifts and logical operations. Otherwise return
7265 NULL. TYPE is the desired result type. */
7268 fold_single_bit_test (location_t loc
, enum tree_code code
,
7269 tree arg0
, tree arg1
, tree result_type
)
7271 /* If this is testing a single bit, we can optimize the test. */
7272 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7273 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7274 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7276 tree inner
= TREE_OPERAND (arg0
, 0);
7277 tree type
= TREE_TYPE (arg0
);
7278 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
7279 scalar_int_mode operand_mode
= SCALAR_INT_TYPE_MODE (type
);
7281 tree signed_type
, unsigned_type
, intermediate_type
;
7284 /* First, see if we can fold the single bit test into a sign-bit
7286 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
7291 /* Otherwise we have (A & C) != 0 where C is a single bit,
7292 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7293 Similarly for (A & C) == 0. */
7295 /* If INNER is a right shift of a constant and it plus BITNUM does
7296 not overflow, adjust BITNUM and INNER. */
7297 if (TREE_CODE (inner
) == RSHIFT_EXPR
7298 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
7299 && bitnum
< TYPE_PRECISION (type
)
7300 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner
, 1)),
7301 TYPE_PRECISION (type
) - bitnum
))
7303 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
7304 inner
= TREE_OPERAND (inner
, 0);
7307 /* If we are going to be able to omit the AND below, we must do our
7308 operations as unsigned. If we must use the AND, we have a choice.
7309 Normally unsigned is faster, but for some machines signed is. */
7310 ops_unsigned
= (load_extend_op (operand_mode
) == SIGN_EXTEND
7311 && !flag_syntax_only
) ? 0 : 1;
7313 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
7314 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
7315 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
7316 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
7319 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
7320 inner
, size_int (bitnum
));
7322 one
= build_int_cst (intermediate_type
, 1);
7324 if (code
== EQ_EXPR
)
7325 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
7327 /* Put the AND last so it can combine with more things. */
7328 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
7330 /* Make sure to return the proper type. */
7331 inner
= fold_convert_loc (loc
, result_type
, inner
);
7338 /* Test whether it is preferable two swap two operands, ARG0 and
7339 ARG1, for example because ARG0 is an integer constant and ARG1
7343 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
7345 if (CONSTANT_CLASS_P (arg1
))
7347 if (CONSTANT_CLASS_P (arg0
))
7353 if (TREE_CONSTANT (arg1
))
7355 if (TREE_CONSTANT (arg0
))
7358 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7359 for commutative and comparison operators. Ensuring a canonical
7360 form allows the optimizers to find additional redundancies without
7361 having to explicitly check for both orderings. */
7362 if (TREE_CODE (arg0
) == SSA_NAME
7363 && TREE_CODE (arg1
) == SSA_NAME
7364 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7367 /* Put SSA_NAMEs last. */
7368 if (TREE_CODE (arg1
) == SSA_NAME
)
7370 if (TREE_CODE (arg0
) == SSA_NAME
)
7373 /* Put variables last. */
7383 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7384 means A >= Y && A != MAX, but in this case we know that
7385 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7388 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7390 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7392 if (TREE_CODE (bound
) == LT_EXPR
)
7393 a
= TREE_OPERAND (bound
, 0);
7394 else if (TREE_CODE (bound
) == GT_EXPR
)
7395 a
= TREE_OPERAND (bound
, 1);
7399 typea
= TREE_TYPE (a
);
7400 if (!INTEGRAL_TYPE_P (typea
)
7401 && !POINTER_TYPE_P (typea
))
7404 if (TREE_CODE (ineq
) == LT_EXPR
)
7406 a1
= TREE_OPERAND (ineq
, 1);
7407 y
= TREE_OPERAND (ineq
, 0);
7409 else if (TREE_CODE (ineq
) == GT_EXPR
)
7411 a1
= TREE_OPERAND (ineq
, 0);
7412 y
= TREE_OPERAND (ineq
, 1);
7417 if (TREE_TYPE (a1
) != typea
)
7420 if (POINTER_TYPE_P (typea
))
7422 /* Convert the pointer types into integer before taking the difference. */
7423 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7424 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7425 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7428 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7430 if (!diff
|| !integer_onep (diff
))
7433 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7436 /* Fold a sum or difference of at least one multiplication.
7437 Returns the folded tree or NULL if no simplification could be made. */
7440 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7441 tree arg0
, tree arg1
)
7443 tree arg00
, arg01
, arg10
, arg11
;
7444 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7446 /* (A * C) +- (B * C) -> (A+-B) * C.
7447 (A * C) +- A -> A * (C+-1).
7448 We are most concerned about the case where C is a constant,
7449 but other combinations show up during loop reduction. Since
7450 it is not difficult, try all four possibilities. */
7452 if (TREE_CODE (arg0
) == MULT_EXPR
)
7454 arg00
= TREE_OPERAND (arg0
, 0);
7455 arg01
= TREE_OPERAND (arg0
, 1);
7457 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7459 arg00
= build_one_cst (type
);
7464 /* We cannot generate constant 1 for fract. */
7465 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7468 arg01
= build_one_cst (type
);
7470 if (TREE_CODE (arg1
) == MULT_EXPR
)
7472 arg10
= TREE_OPERAND (arg1
, 0);
7473 arg11
= TREE_OPERAND (arg1
, 1);
7475 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7477 arg10
= build_one_cst (type
);
7478 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7479 the purpose of this canonicalization. */
7480 if (wi::neg_p (wi::to_wide (arg1
), TYPE_SIGN (TREE_TYPE (arg1
)))
7481 && negate_expr_p (arg1
)
7482 && code
== PLUS_EXPR
)
7484 arg11
= negate_expr (arg1
);
7492 /* We cannot generate constant 1 for fract. */
7493 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7496 arg11
= build_one_cst (type
);
7500 /* Prefer factoring a common non-constant. */
7501 if (operand_equal_p (arg00
, arg10
, 0))
7502 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7503 else if (operand_equal_p (arg01
, arg11
, 0))
7504 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7505 else if (operand_equal_p (arg00
, arg11
, 0))
7506 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7507 else if (operand_equal_p (arg01
, arg10
, 0))
7508 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7510 /* No identical multiplicands; see if we can find a common
7511 power-of-two factor in non-power-of-two multiplies. This
7512 can help in multi-dimensional array access. */
7513 else if (tree_fits_shwi_p (arg01
) && tree_fits_shwi_p (arg11
))
7515 HOST_WIDE_INT int01
= tree_to_shwi (arg01
);
7516 HOST_WIDE_INT int11
= tree_to_shwi (arg11
);
7521 /* Move min of absolute values to int11. */
7522 if (absu_hwi (int01
) < absu_hwi (int11
))
7524 tmp
= int01
, int01
= int11
, int11
= tmp
;
7525 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7532 const unsigned HOST_WIDE_INT factor
= absu_hwi (int11
);
7534 && pow2p_hwi (factor
)
7535 && (int01
& (factor
- 1)) == 0
7536 /* The remainder should not be a constant, otherwise we
7537 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7538 increased the number of multiplications necessary. */
7539 && TREE_CODE (arg10
) != INTEGER_CST
)
7541 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7542 build_int_cst (TREE_TYPE (arg00
),
7547 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7554 if (! ANY_INTEGRAL_TYPE_P (type
)
7555 || TYPE_OVERFLOW_WRAPS (type
)
7556 /* We are neither factoring zero nor minus one. */
7557 || TREE_CODE (same
) == INTEGER_CST
)
7558 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7559 fold_build2_loc (loc
, code
, type
,
7560 fold_convert_loc (loc
, type
, alt0
),
7561 fold_convert_loc (loc
, type
, alt1
)),
7562 fold_convert_loc (loc
, type
, same
));
7564 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7565 same may be minus one and thus the multiplication may overflow. Perform
7566 the sum operation in an unsigned type. */
7567 tree utype
= unsigned_type_for (type
);
7568 tree tem
= fold_build2_loc (loc
, code
, utype
,
7569 fold_convert_loc (loc
, utype
, alt0
),
7570 fold_convert_loc (loc
, utype
, alt1
));
7571 /* If the sum evaluated to a constant that is not -INF the multiplication
7573 if (TREE_CODE (tem
) == INTEGER_CST
7574 && (wi::to_wide (tem
)
7575 != wi::min_value (TYPE_PRECISION (utype
), SIGNED
)))
7576 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7577 fold_convert (type
, tem
), same
);
7579 /* Do not resort to unsigned multiplication because
7580 we lose the no-overflow property of the expression. */
7584 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7585 specified by EXPR into the buffer PTR of length LEN bytes.
7586 Return the number of bytes placed in the buffer, or zero
7590 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7592 tree type
= TREE_TYPE (expr
);
7593 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
7594 int byte
, offset
, word
, words
;
7595 unsigned char value
;
7597 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7604 return MIN (len
, total_bytes
- off
);
7606 words
= total_bytes
/ UNITS_PER_WORD
;
7608 for (byte
= 0; byte
< total_bytes
; byte
++)
7610 int bitpos
= byte
* BITS_PER_UNIT
;
7611 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7613 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7615 if (total_bytes
> UNITS_PER_WORD
)
7617 word
= byte
/ UNITS_PER_WORD
;
7618 if (WORDS_BIG_ENDIAN
)
7619 word
= (words
- 1) - word
;
7620 offset
= word
* UNITS_PER_WORD
;
7621 if (BYTES_BIG_ENDIAN
)
7622 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7624 offset
+= byte
% UNITS_PER_WORD
;
7627 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7628 if (offset
>= off
&& offset
- off
< len
)
7629 ptr
[offset
- off
] = value
;
7631 return MIN (len
, total_bytes
- off
);
7635 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7636 specified by EXPR into the buffer PTR of length LEN bytes.
7637 Return the number of bytes placed in the buffer, or zero
7641 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7643 tree type
= TREE_TYPE (expr
);
7644 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
7645 int total_bytes
= GET_MODE_SIZE (mode
);
7646 FIXED_VALUE_TYPE value
;
7647 tree i_value
, i_type
;
7649 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7652 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7654 if (NULL_TREE
== i_type
|| TYPE_PRECISION (i_type
) != total_bytes
)
7657 value
= TREE_FIXED_CST (expr
);
7658 i_value
= double_int_to_tree (i_type
, value
.data
);
7660 return native_encode_int (i_value
, ptr
, len
, off
);
7664 /* Subroutine of native_encode_expr. Encode the REAL_CST
7665 specified by EXPR into the buffer PTR of length LEN bytes.
7666 Return the number of bytes placed in the buffer, or zero
7670 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7672 tree type
= TREE_TYPE (expr
);
7673 int total_bytes
= GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type
));
7674 int byte
, offset
, word
, words
, bitpos
;
7675 unsigned char value
;
7677 /* There are always 32 bits in each long, no matter the size of
7678 the hosts long. We handle floating point representations with
7682 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7689 return MIN (len
, total_bytes
- off
);
7691 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7693 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7695 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7696 bitpos
+= BITS_PER_UNIT
)
7698 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7699 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7701 if (UNITS_PER_WORD
< 4)
7703 word
= byte
/ UNITS_PER_WORD
;
7704 if (WORDS_BIG_ENDIAN
)
7705 word
= (words
- 1) - word
;
7706 offset
= word
* UNITS_PER_WORD
;
7707 if (BYTES_BIG_ENDIAN
)
7708 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7710 offset
+= byte
% UNITS_PER_WORD
;
7715 if (BYTES_BIG_ENDIAN
)
7717 /* Reverse bytes within each long, or within the entire float
7718 if it's smaller than a long (for HFmode). */
7719 offset
= MIN (3, total_bytes
- 1) - offset
;
7720 gcc_assert (offset
>= 0);
7723 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7725 && offset
- off
< len
)
7726 ptr
[offset
- off
] = value
;
7728 return MIN (len
, total_bytes
- off
);
7731 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7732 specified by EXPR into the buffer PTR of length LEN bytes.
7733 Return the number of bytes placed in the buffer, or zero
7737 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7742 part
= TREE_REALPART (expr
);
7743 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7744 if (off
== -1 && rsize
== 0)
7746 part
= TREE_IMAGPART (expr
);
7748 off
= MAX (0, off
- GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part
))));
7749 isize
= native_encode_expr (part
, ptr
? ptr
+ rsize
: NULL
,
7751 if (off
== -1 && isize
!= rsize
)
7753 return rsize
+ isize
;
7756 /* Like native_encode_vector, but only encode the first COUNT elements.
7757 The other arguments are as for native_encode_vector. */
7760 native_encode_vector_part (const_tree expr
, unsigned char *ptr
, int len
,
7761 int off
, unsigned HOST_WIDE_INT count
)
7763 tree itype
= TREE_TYPE (TREE_TYPE (expr
));
7764 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr
))
7765 && TYPE_PRECISION (itype
) <= BITS_PER_UNIT
)
7767 /* This is the only case in which elements can be smaller than a byte.
7768 Element 0 is always in the lsb of the containing byte. */
7769 unsigned int elt_bits
= TYPE_PRECISION (itype
);
7770 int total_bytes
= CEIL (elt_bits
* count
, BITS_PER_UNIT
);
7771 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7777 /* Zero the buffer and then set bits later where necessary. */
7778 int extract_bytes
= MIN (len
, total_bytes
- off
);
7780 memset (ptr
, 0, extract_bytes
);
7782 unsigned int elts_per_byte
= BITS_PER_UNIT
/ elt_bits
;
7783 unsigned int first_elt
= off
* elts_per_byte
;
7784 unsigned int extract_elts
= extract_bytes
* elts_per_byte
;
7785 for (unsigned int i
= 0; i
< extract_elts
; ++i
)
7787 tree elt
= VECTOR_CST_ELT (expr
, first_elt
+ i
);
7788 if (TREE_CODE (elt
) != INTEGER_CST
)
7791 if (ptr
&& wi::extract_uhwi (wi::to_wide (elt
), 0, 1))
7793 unsigned int bit
= i
* elt_bits
;
7794 ptr
[bit
/ BITS_PER_UNIT
] |= 1 << (bit
% BITS_PER_UNIT
);
7797 return extract_bytes
;
7801 int size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (itype
));
7802 for (unsigned HOST_WIDE_INT i
= 0; i
< count
; i
++)
7809 tree elem
= VECTOR_CST_ELT (expr
, i
);
7810 int res
= native_encode_expr (elem
, ptr
? ptr
+ offset
: NULL
,
7812 if ((off
== -1 && res
!= size
) || res
== 0)
7816 return (off
== -1 && i
< count
- 1) ? 0 : offset
;
7823 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7824 specified by EXPR into the buffer PTR of length LEN bytes.
7825 Return the number of bytes placed in the buffer, or zero
7829 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7831 unsigned HOST_WIDE_INT count
;
7832 if (!VECTOR_CST_NELTS (expr
).is_constant (&count
))
7834 return native_encode_vector_part (expr
, ptr
, len
, off
, count
);
7838 /* Subroutine of native_encode_expr. Encode the STRING_CST
7839 specified by EXPR into the buffer PTR of length LEN bytes.
7840 Return the number of bytes placed in the buffer, or zero
7844 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7846 tree type
= TREE_TYPE (expr
);
7848 /* Wide-char strings are encoded in target byte-order so native
7849 encoding them is trivial. */
7850 if (BITS_PER_UNIT
!= CHAR_BIT
7851 || TREE_CODE (type
) != ARRAY_TYPE
7852 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7853 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7856 HOST_WIDE_INT total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
7857 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7861 len
= MIN (total_bytes
- off
, len
);
7867 if (off
< TREE_STRING_LENGTH (expr
))
7869 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7870 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7872 memset (ptr
+ written
, 0, len
- written
);
7878 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7879 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7880 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7881 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7882 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7883 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7884 Return the number of bytes placed in the buffer, or zero upon failure. */
7887 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7889 /* We don't support starting at negative offset and -1 is special. */
7893 switch (TREE_CODE (expr
))
7896 return native_encode_int (expr
, ptr
, len
, off
);
7899 return native_encode_real (expr
, ptr
, len
, off
);
7902 return native_encode_fixed (expr
, ptr
, len
, off
);
7905 return native_encode_complex (expr
, ptr
, len
, off
);
7908 return native_encode_vector (expr
, ptr
, len
, off
);
7911 return native_encode_string (expr
, ptr
, len
, off
);
7918 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7919 NON_LVALUE_EXPRs and nops. */
7922 native_encode_initializer (tree init
, unsigned char *ptr
, int len
,
7925 /* We don't support starting at negative offset and -1 is special. */
7926 if (off
< -1 || init
== NULL_TREE
)
7930 switch (TREE_CODE (init
))
7932 case VIEW_CONVERT_EXPR
:
7933 case NON_LVALUE_EXPR
:
7934 return native_encode_initializer (TREE_OPERAND (init
, 0), ptr
, len
, off
);
7936 return native_encode_expr (init
, ptr
, len
, off
);
7938 tree type
= TREE_TYPE (init
);
7939 HOST_WIDE_INT total_bytes
= int_size_in_bytes (type
);
7940 if (total_bytes
< 0)
7942 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7944 int o
= off
== -1 ? 0 : off
;
7945 if (TREE_CODE (type
) == ARRAY_TYPE
)
7947 HOST_WIDE_INT min_index
;
7948 unsigned HOST_WIDE_INT cnt
;
7949 HOST_WIDE_INT curpos
= 0, fieldsize
;
7950 constructor_elt
*ce
;
7952 if (TYPE_DOMAIN (type
) == NULL_TREE
7953 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))))
7956 fieldsize
= int_size_in_bytes (TREE_TYPE (type
));
7960 min_index
= tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type
)));
7962 memset (ptr
, '\0', MIN (total_bytes
- off
, len
));
7964 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init
), cnt
, ce
)
7966 tree val
= ce
->value
;
7967 tree index
= ce
->index
;
7968 HOST_WIDE_INT pos
= curpos
, count
= 0;
7970 if (index
&& TREE_CODE (index
) == RANGE_EXPR
)
7972 if (!tree_fits_shwi_p (TREE_OPERAND (index
, 0))
7973 || !tree_fits_shwi_p (TREE_OPERAND (index
, 1)))
7975 pos
= (tree_to_shwi (TREE_OPERAND (index
, 0)) - min_index
)
7977 count
= (tree_to_shwi (TREE_OPERAND (index
, 1))
7978 - tree_to_shwi (TREE_OPERAND (index
, 0)));
7982 if (!tree_fits_shwi_p (index
))
7984 pos
= (tree_to_shwi (index
) - min_index
) * fieldsize
;
7993 && (curpos
+ fieldsize
7994 <= (HOST_WIDE_INT
) off
+ len
)))
7999 memcpy (ptr
+ (curpos
- o
), ptr
+ (pos
- o
),
8002 else if (!native_encode_initializer (val
,
8016 else if (curpos
+ fieldsize
> off
8017 && curpos
< (HOST_WIDE_INT
) off
+ len
)
8019 /* Partial overlap. */
8020 unsigned char *p
= NULL
;
8026 p
= ptr
+ curpos
- off
;
8027 l
= MIN ((HOST_WIDE_INT
) off
+ len
- curpos
,
8036 if (!native_encode_initializer (val
, p
, l
, no
))
8039 curpos
+= fieldsize
;
8041 while (count
-- != 0);
8043 return MIN (total_bytes
- off
, len
);
8045 else if (TREE_CODE (type
) == RECORD_TYPE
8046 || TREE_CODE (type
) == UNION_TYPE
)
8048 unsigned HOST_WIDE_INT cnt
;
8049 constructor_elt
*ce
;
8052 memset (ptr
, '\0', MIN (total_bytes
- off
, len
));
8053 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init
), cnt
, ce
)
8055 tree field
= ce
->index
;
8056 tree val
= ce
->value
;
8057 HOST_WIDE_INT pos
, fieldsize
;
8058 unsigned HOST_WIDE_INT bpos
= 0, epos
= 0;
8060 if (field
== NULL_TREE
)
8063 pos
= int_byte_position (field
);
8064 if (off
!= -1 && (HOST_WIDE_INT
) off
+ len
<= pos
)
8067 if (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
8068 && TYPE_DOMAIN (TREE_TYPE (field
))
8069 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field
))))
8071 if (DECL_SIZE_UNIT (field
) == NULL_TREE
8072 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field
)))
8074 fieldsize
= tree_to_shwi (DECL_SIZE_UNIT (field
));
8078 if (DECL_BIT_FIELD (field
))
8080 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field
)))
8082 fieldsize
= TYPE_PRECISION (TREE_TYPE (field
));
8083 bpos
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
8084 if (bpos
% BITS_PER_UNIT
)
8085 bpos
%= BITS_PER_UNIT
;
8089 epos
= fieldsize
% BITS_PER_UNIT
;
8090 fieldsize
+= BITS_PER_UNIT
- 1;
8091 fieldsize
/= BITS_PER_UNIT
;
8094 if (off
!= -1 && pos
+ fieldsize
<= off
)
8097 if (val
== NULL_TREE
)
8100 if (DECL_BIT_FIELD (field
))
8102 /* FIXME: Handle PDP endian. */
8103 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
8106 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8107 if (repr
== NULL_TREE
8108 || TREE_CODE (val
) != INTEGER_CST
8109 || !INTEGRAL_TYPE_P (TREE_TYPE (repr
)))
8112 HOST_WIDE_INT rpos
= int_byte_position (repr
);
8115 wide_int w
= wi::to_wide (val
,
8116 TYPE_PRECISION (TREE_TYPE (repr
)));
8117 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8118 - TYPE_PRECISION (TREE_TYPE (field
)));
8119 HOST_WIDE_INT bitoff
= (pos
- rpos
) * BITS_PER_UNIT
+ bpos
;
8120 if (!BYTES_BIG_ENDIAN
)
8121 w
= wi::lshift (w
, bitoff
);
8123 w
= wi::lshift (w
, diff
- bitoff
);
8124 val
= wide_int_to_tree (TREE_TYPE (repr
), w
);
8126 unsigned char buf
[MAX_BITSIZE_MODE_ANY_INT
8127 / BITS_PER_UNIT
+ 1];
8128 int l
= native_encode_int (val
, buf
, sizeof buf
, 0);
8129 if (l
* BITS_PER_UNIT
!= TYPE_PRECISION (TREE_TYPE (repr
)))
8135 /* If the bitfield does not start at byte boundary, handle
8136 the partial byte at the start. */
8138 && (off
== -1 || (pos
>= off
&& len
>= 1)))
8140 if (!BYTES_BIG_ENDIAN
)
8142 int mask
= (1 << bpos
) - 1;
8143 buf
[pos
- rpos
] &= ~mask
;
8144 buf
[pos
- rpos
] |= ptr
[pos
- o
] & mask
;
8148 int mask
= (1 << (BITS_PER_UNIT
- bpos
)) - 1;
8149 buf
[pos
- rpos
] &= mask
;
8150 buf
[pos
- rpos
] |= ptr
[pos
- o
] & ~mask
;
8153 /* If the bitfield does not end at byte boundary, handle
8154 the partial byte at the end. */
8157 || pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
))
8159 if (!BYTES_BIG_ENDIAN
)
8161 int mask
= (1 << epos
) - 1;
8162 buf
[pos
- rpos
+ fieldsize
- 1] &= mask
;
8163 buf
[pos
- rpos
+ fieldsize
- 1]
8164 |= ptr
[pos
+ fieldsize
- 1 - o
] & ~mask
;
8168 int mask
= (1 << (BITS_PER_UNIT
- epos
)) - 1;
8169 buf
[pos
- rpos
+ fieldsize
- 1] &= ~mask
;
8170 buf
[pos
- rpos
+ fieldsize
- 1]
8171 |= ptr
[pos
+ fieldsize
- 1 - o
] & mask
;
8176 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8177 memcpy (ptr
+ pos
- o
, buf
+ (pos
- rpos
), fieldsize
);
8180 /* Partial overlap. */
8181 HOST_WIDE_INT fsz
= fieldsize
;
8187 if (pos
+ fsz
> (HOST_WIDE_INT
) off
+ len
)
8188 fsz
= (HOST_WIDE_INT
) off
+ len
- pos
;
8189 memcpy (ptr
+ pos
- off
, buf
+ (pos
- rpos
), fsz
);
8196 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8198 if (!native_encode_initializer (val
, ptr
? ptr
+ pos
- o
8201 off
== -1 ? -1 : 0))
8206 /* Partial overlap. */
8207 unsigned char *p
= NULL
;
8213 p
= ptr
+ pos
- off
;
8214 l
= MIN ((HOST_WIDE_INT
) off
+ len
- pos
,
8223 if (!native_encode_initializer (val
, p
, l
, no
))
8227 return MIN (total_bytes
- off
, len
);
8234 /* Subroutine of native_interpret_expr. Interpret the contents of
8235 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8236 If the buffer cannot be interpreted, return NULL_TREE. */
8239 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
8241 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
8243 if (total_bytes
> len
8244 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8247 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
8249 return wide_int_to_tree (type
, result
);
8253 /* Subroutine of native_interpret_expr. Interpret the contents of
8254 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8255 If the buffer cannot be interpreted, return NULL_TREE. */
8258 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
8260 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
8261 int total_bytes
= GET_MODE_SIZE (mode
);
8263 FIXED_VALUE_TYPE fixed_value
;
8265 if (total_bytes
> len
8266 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8269 result
= double_int::from_buffer (ptr
, total_bytes
);
8270 fixed_value
= fixed_from_double_int (result
, mode
);
8272 return build_fixed (type
, fixed_value
);
8276 /* Subroutine of native_interpret_expr. Interpret the contents of
8277 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8278 If the buffer cannot be interpreted, return NULL_TREE. */
8281 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
8283 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
8284 int total_bytes
= GET_MODE_SIZE (mode
);
8285 unsigned char value
;
8286 /* There are always 32 bits in each long, no matter the size of
8287 the hosts long. We handle floating point representations with
8292 if (total_bytes
> len
|| total_bytes
> 24)
8294 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
8296 memset (tmp
, 0, sizeof (tmp
));
8297 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
8298 bitpos
+= BITS_PER_UNIT
)
8300 /* Both OFFSET and BYTE index within a long;
8301 bitpos indexes the whole float. */
8302 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
8303 if (UNITS_PER_WORD
< 4)
8305 int word
= byte
/ UNITS_PER_WORD
;
8306 if (WORDS_BIG_ENDIAN
)
8307 word
= (words
- 1) - word
;
8308 offset
= word
* UNITS_PER_WORD
;
8309 if (BYTES_BIG_ENDIAN
)
8310 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8312 offset
+= byte
% UNITS_PER_WORD
;
8317 if (BYTES_BIG_ENDIAN
)
8319 /* Reverse bytes within each long, or within the entire float
8320 if it's smaller than a long (for HFmode). */
8321 offset
= MIN (3, total_bytes
- 1) - offset
;
8322 gcc_assert (offset
>= 0);
8325 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
8327 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
8330 real_from_target (&r
, tmp
, mode
);
8331 tree ret
= build_real (type
, r
);
8332 if (MODE_COMPOSITE_P (mode
))
8334 /* For floating point values in composite modes, punt if this folding
8335 doesn't preserve bit representation. As the mode doesn't have fixed
8336 precision while GCC pretends it does, there could be valid values that
8337 GCC can't really represent accurately. See PR95450. */
8338 unsigned char buf
[24];
8339 if (native_encode_expr (ret
, buf
, total_bytes
, 0) != total_bytes
8340 || memcmp (ptr
, buf
, total_bytes
) != 0)
8347 /* Subroutine of native_interpret_expr. Interpret the contents of
8348 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8349 If the buffer cannot be interpreted, return NULL_TREE. */
8352 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
8354 tree etype
, rpart
, ipart
;
8357 etype
= TREE_TYPE (type
);
8358 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8361 rpart
= native_interpret_expr (etype
, ptr
, size
);
8364 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
8367 return build_complex (type
, rpart
, ipart
);
8370 /* Read a vector of type TYPE from the target memory image given by BYTES,
8371 which contains LEN bytes. The vector is known to be encodable using
8372 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8374 Return the vector on success, otherwise return null. */
8377 native_interpret_vector_part (tree type
, const unsigned char *bytes
,
8378 unsigned int len
, unsigned int npatterns
,
8379 unsigned int nelts_per_pattern
)
8381 tree elt_type
= TREE_TYPE (type
);
8382 if (VECTOR_BOOLEAN_TYPE_P (type
)
8383 && TYPE_PRECISION (elt_type
) <= BITS_PER_UNIT
)
8385 /* This is the only case in which elements can be smaller than a byte.
8386 Element 0 is always in the lsb of the containing byte. */
8387 unsigned int elt_bits
= TYPE_PRECISION (elt_type
);
8388 if (elt_bits
* npatterns
* nelts_per_pattern
> len
* BITS_PER_UNIT
)
8391 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8392 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8394 unsigned int bit_index
= i
* elt_bits
;
8395 unsigned int byte_index
= bit_index
/ BITS_PER_UNIT
;
8396 unsigned int lsb
= bit_index
% BITS_PER_UNIT
;
8397 builder
.quick_push (bytes
[byte_index
] & (1 << lsb
)
8398 ? build_all_ones_cst (elt_type
)
8399 : build_zero_cst (elt_type
));
8401 return builder
.build ();
8404 unsigned int elt_bytes
= tree_to_uhwi (TYPE_SIZE_UNIT (elt_type
));
8405 if (elt_bytes
* npatterns
* nelts_per_pattern
> len
)
8408 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8409 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8411 tree elt
= native_interpret_expr (elt_type
, bytes
, elt_bytes
);
8414 builder
.quick_push (elt
);
8417 return builder
.build ();
8420 /* Subroutine of native_interpret_expr. Interpret the contents of
8421 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8422 If the buffer cannot be interpreted, return NULL_TREE. */
8425 native_interpret_vector (tree type
, const unsigned char *ptr
, unsigned int len
)
8429 unsigned HOST_WIDE_INT count
;
8431 etype
= TREE_TYPE (type
);
8432 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8433 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&count
)
8434 || size
* count
> len
)
8437 return native_interpret_vector_part (type
, ptr
, len
, count
, 1);
8441 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8442 the buffer PTR of length LEN as a constant of type TYPE. For
8443 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8444 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8445 return NULL_TREE. */
8448 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
8450 switch (TREE_CODE (type
))
8456 case REFERENCE_TYPE
:
8457 return native_interpret_int (type
, ptr
, len
);
8460 return native_interpret_real (type
, ptr
, len
);
8462 case FIXED_POINT_TYPE
:
8463 return native_interpret_fixed (type
, ptr
, len
);
8466 return native_interpret_complex (type
, ptr
, len
);
8469 return native_interpret_vector (type
, ptr
, len
);
8476 /* Returns true if we can interpret the contents of a native encoding
8480 can_native_interpret_type_p (tree type
)
8482 switch (TREE_CODE (type
))
8488 case REFERENCE_TYPE
:
8489 case FIXED_POINT_TYPE
:
8499 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8500 or extracted constant positions and/or sizes aren't byte aligned. */
8502 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8503 bits between adjacent elements. AMNT should be within
8506 00011111|11100000 << 2 = 01111111|10000000
8507 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8510 shift_bytes_in_array_left (unsigned char *ptr
, unsigned int sz
,
8516 unsigned char carry_over
= 0U;
8517 unsigned char carry_mask
= (~0U) << (unsigned char) (BITS_PER_UNIT
- amnt
);
8518 unsigned char clear_mask
= (~0U) << amnt
;
8520 for (unsigned int i
= 0; i
< sz
; i
++)
8522 unsigned prev_carry_over
= carry_over
;
8523 carry_over
= (ptr
[i
] & carry_mask
) >> (BITS_PER_UNIT
- amnt
);
8528 ptr
[i
] &= clear_mask
;
8529 ptr
[i
] |= prev_carry_over
;
8534 /* Like shift_bytes_in_array_left but for big-endian.
8535 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8536 bits between adjacent elements. AMNT should be within
8539 00011111|11100000 >> 2 = 00000111|11111000
8540 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8543 shift_bytes_in_array_right (unsigned char *ptr
, unsigned int sz
,
8549 unsigned char carry_over
= 0U;
8550 unsigned char carry_mask
= ~(~0U << amnt
);
8552 for (unsigned int i
= 0; i
< sz
; i
++)
8554 unsigned prev_carry_over
= carry_over
;
8555 carry_over
= ptr
[i
] & carry_mask
;
8557 carry_over
<<= (unsigned char) BITS_PER_UNIT
- amnt
;
8559 ptr
[i
] |= prev_carry_over
;
8563 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8564 directly on the VECTOR_CST encoding, in a way that works for variable-
8565 length vectors. Return the resulting VECTOR_CST on success or null
8569 fold_view_convert_vector_encoding (tree type
, tree expr
)
8571 tree expr_type
= TREE_TYPE (expr
);
8572 poly_uint64 type_bits
, expr_bits
;
8573 if (!poly_int_tree_p (TYPE_SIZE (type
), &type_bits
)
8574 || !poly_int_tree_p (TYPE_SIZE (expr_type
), &expr_bits
))
8577 poly_uint64 type_units
= TYPE_VECTOR_SUBPARTS (type
);
8578 poly_uint64 expr_units
= TYPE_VECTOR_SUBPARTS (expr_type
);
8579 unsigned int type_elt_bits
= vector_element_size (type_bits
, type_units
);
8580 unsigned int expr_elt_bits
= vector_element_size (expr_bits
, expr_units
);
8582 /* We can only preserve the semantics of a stepped pattern if the new
8583 vector element is an integer of the same size. */
8584 if (VECTOR_CST_STEPPED_P (expr
)
8585 && (!INTEGRAL_TYPE_P (type
) || type_elt_bits
!= expr_elt_bits
))
8588 /* The number of bits needed to encode one element from every pattern
8589 of the original vector. */
8590 unsigned int expr_sequence_bits
8591 = VECTOR_CST_NPATTERNS (expr
) * expr_elt_bits
;
8593 /* The number of bits needed to encode one element from every pattern
8595 unsigned int type_sequence_bits
8596 = least_common_multiple (expr_sequence_bits
, type_elt_bits
);
8598 /* Don't try to read more bytes than are available, which can happen
8599 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8600 The general VIEW_CONVERT handling can cope with that case, so there's
8601 no point complicating things here. */
8602 unsigned int nelts_per_pattern
= VECTOR_CST_NELTS_PER_PATTERN (expr
);
8603 unsigned int buffer_bytes
= CEIL (nelts_per_pattern
* type_sequence_bits
,
8605 unsigned int buffer_bits
= buffer_bytes
* BITS_PER_UNIT
;
8606 if (known_gt (buffer_bits
, expr_bits
))
8609 /* Get enough bytes of EXPR to form the new encoding. */
8610 auto_vec
<unsigned char, 128> buffer (buffer_bytes
);
8611 buffer
.quick_grow (buffer_bytes
);
8612 if (native_encode_vector_part (expr
, buffer
.address (), buffer_bytes
, 0,
8613 buffer_bits
/ expr_elt_bits
)
8614 != (int) buffer_bytes
)
8617 /* Reencode the bytes as TYPE. */
8618 unsigned int type_npatterns
= type_sequence_bits
/ type_elt_bits
;
8619 return native_interpret_vector_part (type
, &buffer
[0], buffer
.length (),
8620 type_npatterns
, nelts_per_pattern
);
8623 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8624 TYPE at compile-time. If we're unable to perform the conversion
8625 return NULL_TREE. */
8628 fold_view_convert_expr (tree type
, tree expr
)
8630 /* We support up to 512-bit values (for V8DFmode). */
8631 unsigned char buffer
[64];
8634 /* Check that the host and target are sane. */
8635 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
8638 if (VECTOR_TYPE_P (type
) && TREE_CODE (expr
) == VECTOR_CST
)
8639 if (tree res
= fold_view_convert_vector_encoding (type
, expr
))
8642 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
8646 return native_interpret_expr (type
, buffer
, len
);
8649 /* Build an expression for the address of T. Folds away INDIRECT_REF
8650 to avoid confusing the gimplify process. */
8653 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
8655 /* The size of the object is not relevant when talking about its address. */
8656 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
8657 t
= TREE_OPERAND (t
, 0);
8659 if (TREE_CODE (t
) == INDIRECT_REF
)
8661 t
= TREE_OPERAND (t
, 0);
8663 if (TREE_TYPE (t
) != ptrtype
)
8664 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
8666 else if (TREE_CODE (t
) == MEM_REF
8667 && integer_zerop (TREE_OPERAND (t
, 1)))
8669 t
= TREE_OPERAND (t
, 0);
8671 if (TREE_TYPE (t
) != ptrtype
)
8672 t
= fold_convert_loc (loc
, ptrtype
, t
);
8674 else if (TREE_CODE (t
) == MEM_REF
8675 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
8676 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
8677 TREE_OPERAND (t
, 0),
8678 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
8679 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
8681 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
8683 if (TREE_TYPE (t
) != ptrtype
)
8684 t
= fold_convert_loc (loc
, ptrtype
, t
);
8687 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
8692 /* Build an expression for the address of T. */
8695 build_fold_addr_expr_loc (location_t loc
, tree t
)
8697 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
8699 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
8702 /* Fold a unary expression of code CODE and type TYPE with operand
8703 OP0. Return the folded expression if folding is successful.
8704 Otherwise, return NULL_TREE. */
8707 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
8711 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8713 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
8714 && TREE_CODE_LENGTH (code
) == 1);
8719 if (CONVERT_EXPR_CODE_P (code
)
8720 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
8722 /* Don't use STRIP_NOPS, because signedness of argument type
8724 STRIP_SIGN_NOPS (arg0
);
8728 /* Strip any conversions that don't change the mode. This
8729 is safe for every expression, except for a comparison
8730 expression because its signedness is derived from its
8733 Note that this is done as an internal manipulation within
8734 the constant folder, in order to find the simplest
8735 representation of the arguments so that their form can be
8736 studied. In any cases, the appropriate type conversions
8737 should be put back in the tree that will get out of the
8742 if (CONSTANT_CLASS_P (arg0
))
8744 tree tem
= const_unop (code
, type
, arg0
);
8747 if (TREE_TYPE (tem
) != type
)
8748 tem
= fold_convert_loc (loc
, type
, tem
);
8754 tem
= generic_simplify (loc
, code
, type
, op0
);
8758 if (TREE_CODE_CLASS (code
) == tcc_unary
)
8760 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8761 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8762 fold_build1_loc (loc
, code
, type
,
8763 fold_convert_loc (loc
, TREE_TYPE (op0
),
8764 TREE_OPERAND (arg0
, 1))));
8765 else if (TREE_CODE (arg0
) == COND_EXPR
)
8767 tree arg01
= TREE_OPERAND (arg0
, 1);
8768 tree arg02
= TREE_OPERAND (arg0
, 2);
8769 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
8770 arg01
= fold_build1_loc (loc
, code
, type
,
8771 fold_convert_loc (loc
,
8772 TREE_TYPE (op0
), arg01
));
8773 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
8774 arg02
= fold_build1_loc (loc
, code
, type
,
8775 fold_convert_loc (loc
,
8776 TREE_TYPE (op0
), arg02
));
8777 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8780 /* If this was a conversion, and all we did was to move into
8781 inside the COND_EXPR, bring it back out. But leave it if
8782 it is a conversion from integer to integer and the
8783 result precision is no wider than a word since such a
8784 conversion is cheap and may be optimized away by combine,
8785 while it couldn't if it were outside the COND_EXPR. Then return
8786 so we don't get into an infinite recursion loop taking the
8787 conversion out and then back in. */
8789 if ((CONVERT_EXPR_CODE_P (code
)
8790 || code
== NON_LVALUE_EXPR
)
8791 && TREE_CODE (tem
) == COND_EXPR
8792 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
8793 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
8794 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
8795 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
8796 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
8797 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
8798 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8800 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
8801 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
8802 || flag_syntax_only
))
8803 tem
= build1_loc (loc
, code
, type
,
8805 TREE_TYPE (TREE_OPERAND
8806 (TREE_OPERAND (tem
, 1), 0)),
8807 TREE_OPERAND (tem
, 0),
8808 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
8809 TREE_OPERAND (TREE_OPERAND (tem
, 2),
8817 case NON_LVALUE_EXPR
:
8818 if (!maybe_lvalue_p (op0
))
8819 return fold_convert_loc (loc
, type
, op0
);
8824 case FIX_TRUNC_EXPR
:
8825 if (COMPARISON_CLASS_P (op0
))
8827 /* If we have (type) (a CMP b) and type is an integral type, return
8828 new expression involving the new type. Canonicalize
8829 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8831 Do not fold the result as that would not simplify further, also
8832 folding again results in recursions. */
8833 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8834 return build2_loc (loc
, TREE_CODE (op0
), type
,
8835 TREE_OPERAND (op0
, 0),
8836 TREE_OPERAND (op0
, 1));
8837 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
8838 && TREE_CODE (type
) != VECTOR_TYPE
)
8839 return build3_loc (loc
, COND_EXPR
, type
, op0
,
8840 constant_boolean_node (true, type
),
8841 constant_boolean_node (false, type
));
8844 /* Handle (T *)&A.B.C for A being of type T and B and C
8845 living at offset zero. This occurs frequently in
8846 C++ upcasting and then accessing the base. */
8847 if (TREE_CODE (op0
) == ADDR_EXPR
8848 && POINTER_TYPE_P (type
)
8849 && handled_component_p (TREE_OPERAND (op0
, 0)))
8851 poly_int64 bitsize
, bitpos
;
8854 int unsignedp
, reversep
, volatilep
;
8856 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
8857 &offset
, &mode
, &unsignedp
, &reversep
,
8859 /* If the reference was to a (constant) zero offset, we can use
8860 the address of the base if it has the same base type
8861 as the result type and the pointer type is unqualified. */
8863 && known_eq (bitpos
, 0)
8864 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8865 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8866 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8867 return fold_convert_loc (loc
, type
,
8868 build_fold_addr_expr_loc (loc
, base
));
8871 if (TREE_CODE (op0
) == MODIFY_EXPR
8872 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8873 /* Detect assigning a bitfield. */
8874 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8876 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8878 /* Don't leave an assignment inside a conversion
8879 unless assigning a bitfield. */
8880 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8881 /* First do the assignment, then return converted constant. */
8882 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8883 TREE_NO_WARNING (tem
) = 1;
8884 TREE_USED (tem
) = 1;
8888 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8889 constants (if x has signed type, the sign bit cannot be set
8890 in c). This folds extension into the BIT_AND_EXPR.
8891 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8892 very likely don't have maximal range for their precision and this
8893 transformation effectively doesn't preserve non-maximal ranges. */
8894 if (TREE_CODE (type
) == INTEGER_TYPE
8895 && TREE_CODE (op0
) == BIT_AND_EXPR
8896 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8898 tree and_expr
= op0
;
8899 tree and0
= TREE_OPERAND (and_expr
, 0);
8900 tree and1
= TREE_OPERAND (and_expr
, 1);
8903 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8904 || (TYPE_PRECISION (type
)
8905 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8907 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8908 <= HOST_BITS_PER_WIDE_INT
8909 && tree_fits_uhwi_p (and1
))
8911 unsigned HOST_WIDE_INT cst
;
8913 cst
= tree_to_uhwi (and1
);
8914 cst
&= HOST_WIDE_INT_M1U
8915 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8916 change
= (cst
== 0);
8918 && !flag_syntax_only
8919 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
8922 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8923 and0
= fold_convert_loc (loc
, uns
, and0
);
8924 and1
= fold_convert_loc (loc
, uns
, and1
);
8929 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8930 TREE_OVERFLOW (and1
));
8931 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8932 fold_convert_loc (loc
, type
, and0
), tem
);
8936 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8937 cast (T1)X will fold away. We assume that this happens when X itself
8939 if (POINTER_TYPE_P (type
)
8940 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8941 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
8943 tree arg00
= TREE_OPERAND (arg0
, 0);
8944 tree arg01
= TREE_OPERAND (arg0
, 1);
8946 return fold_build_pointer_plus_loc
8947 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8950 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8951 of the same precision, and X is an integer type not narrower than
8952 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8953 if (INTEGRAL_TYPE_P (type
)
8954 && TREE_CODE (op0
) == BIT_NOT_EXPR
8955 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8956 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8957 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8959 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8960 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8961 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8962 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8963 fold_convert_loc (loc
, type
, tem
));
8966 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8967 type of X and Y (integer types only). */
8968 if (INTEGRAL_TYPE_P (type
)
8969 && TREE_CODE (op0
) == MULT_EXPR
8970 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8971 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8973 /* Be careful not to introduce new overflows. */
8975 if (TYPE_OVERFLOW_WRAPS (type
))
8978 mult_type
= unsigned_type_for (type
);
8980 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8982 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8983 fold_convert_loc (loc
, mult_type
,
8984 TREE_OPERAND (op0
, 0)),
8985 fold_convert_loc (loc
, mult_type
,
8986 TREE_OPERAND (op0
, 1)));
8987 return fold_convert_loc (loc
, type
, tem
);
8993 case VIEW_CONVERT_EXPR
:
8994 if (TREE_CODE (op0
) == MEM_REF
)
8996 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
8997 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
8998 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
8999 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
9000 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
9007 tem
= fold_negate_expr (loc
, arg0
);
9009 return fold_convert_loc (loc
, type
, tem
);
9013 /* Convert fabs((double)float) into (double)fabsf(float). */
9014 if (TREE_CODE (arg0
) == NOP_EXPR
9015 && TREE_CODE (type
) == REAL_TYPE
)
9017 tree targ0
= strip_float_extensions (arg0
);
9019 return fold_convert_loc (loc
, type
,
9020 fold_build1_loc (loc
, ABS_EXPR
,
9027 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9028 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9029 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9030 fold_convert_loc (loc
, type
,
9031 TREE_OPERAND (arg0
, 0)))))
9032 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
9033 fold_convert_loc (loc
, type
,
9034 TREE_OPERAND (arg0
, 1)));
9035 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9036 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9037 fold_convert_loc (loc
, type
,
9038 TREE_OPERAND (arg0
, 1)))))
9039 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
9040 fold_convert_loc (loc
, type
,
9041 TREE_OPERAND (arg0
, 0)), tem
);
9045 case TRUTH_NOT_EXPR
:
9046 /* Note that the operand of this must be an int
9047 and its values must be 0 or 1.
9048 ("true" is a fixed value perhaps depending on the language,
9049 but we don't handle values other than 1 correctly yet.) */
9050 tem
= fold_truth_not_expr (loc
, arg0
);
9053 return fold_convert_loc (loc
, type
, tem
);
9056 /* Fold *&X to X if X is an lvalue. */
9057 if (TREE_CODE (op0
) == ADDR_EXPR
)
9059 tree op00
= TREE_OPERAND (op0
, 0);
9061 || TREE_CODE (op00
) == PARM_DECL
9062 || TREE_CODE (op00
) == RESULT_DECL
)
9063 && !TREE_READONLY (op00
))
9070 } /* switch (code) */
9074 /* If the operation was a conversion do _not_ mark a resulting constant
9075 with TREE_OVERFLOW if the original constant was not. These conversions
9076 have implementation defined behavior and retaining the TREE_OVERFLOW
9077 flag here would confuse later passes such as VRP. */
9079 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
9080 tree type
, tree op0
)
9082 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
9084 && TREE_CODE (res
) == INTEGER_CST
9085 && TREE_CODE (op0
) == INTEGER_CST
9086 && CONVERT_EXPR_CODE_P (code
))
9087 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
9092 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9093 operands OP0 and OP1. LOC is the location of the resulting expression.
9094 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9095 Return the folded expression if folding is successful. Otherwise,
9096 return NULL_TREE. */
9098 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
9099 tree arg0
, tree arg1
, tree op0
, tree op1
)
9103 /* We only do these simplifications if we are optimizing. */
9107 /* Check for things like (A || B) && (A || C). We can convert this
9108 to A || (B && C). Note that either operator can be any of the four
9109 truth and/or operations and the transformation will still be
9110 valid. Also note that we only care about order for the
9111 ANDIF and ORIF operators. If B contains side effects, this
9112 might change the truth-value of A. */
9113 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9114 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
9115 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
9116 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
9117 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
9118 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
9120 tree a00
= TREE_OPERAND (arg0
, 0);
9121 tree a01
= TREE_OPERAND (arg0
, 1);
9122 tree a10
= TREE_OPERAND (arg1
, 0);
9123 tree a11
= TREE_OPERAND (arg1
, 1);
9124 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
9125 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
9126 && (code
== TRUTH_AND_EXPR
9127 || code
== TRUTH_OR_EXPR
));
9129 if (operand_equal_p (a00
, a10
, 0))
9130 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9131 fold_build2_loc (loc
, code
, type
, a01
, a11
));
9132 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
9133 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9134 fold_build2_loc (loc
, code
, type
, a01
, a10
));
9135 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
9136 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
9137 fold_build2_loc (loc
, code
, type
, a00
, a11
));
9139 /* This case if tricky because we must either have commutative
9140 operators or else A10 must not have side-effects. */
9142 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
9143 && operand_equal_p (a01
, a11
, 0))
9144 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
9145 fold_build2_loc (loc
, code
, type
, a00
, a10
),
9149 /* See if we can build a range comparison. */
9150 if ((tem
= fold_range_test (loc
, code
, type
, op0
, op1
)) != 0)
9153 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
9154 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
9156 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
9158 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
9161 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
9162 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
9164 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
9166 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
9169 /* Check for the possibility of merging component references. If our
9170 lhs is another similar operation, try to merge its rhs with our
9171 rhs. Then try to merge our lhs and rhs. */
9172 if (TREE_CODE (arg0
) == code
9173 && (tem
= fold_truth_andor_1 (loc
, code
, type
,
9174 TREE_OPERAND (arg0
, 1), arg1
)) != 0)
9175 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9177 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
9180 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
9181 if (param_logical_op_non_short_circuit
!= -1)
9182 logical_op_non_short_circuit
9183 = param_logical_op_non_short_circuit
;
9184 if (logical_op_non_short_circuit
9185 && !flag_sanitize_coverage
9186 && (code
== TRUTH_AND_EXPR
9187 || code
== TRUTH_ANDIF_EXPR
9188 || code
== TRUTH_OR_EXPR
9189 || code
== TRUTH_ORIF_EXPR
))
9191 enum tree_code ncode
, icode
;
9193 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
9194 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
9195 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
9197 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9198 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9199 We don't want to pack more than two leafs to a non-IF AND/OR
9201 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9202 equal to IF-CODE, then we don't want to add right-hand operand.
9203 If the inner right-hand side of left-hand operand has
9204 side-effects, or isn't simple, then we can't add to it,
9205 as otherwise we might destroy if-sequence. */
9206 if (TREE_CODE (arg0
) == icode
9207 && simple_operand_p_2 (arg1
)
9208 /* Needed for sequence points to handle trappings, and
9210 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
9212 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
9214 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
9217 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9218 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9219 else if (TREE_CODE (arg1
) == icode
9220 && simple_operand_p_2 (arg0
)
9221 /* Needed for sequence points to handle trappings, and
9223 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
9225 tem
= fold_build2_loc (loc
, ncode
, type
,
9226 arg0
, TREE_OPERAND (arg1
, 0));
9227 return fold_build2_loc (loc
, icode
, type
, tem
,
9228 TREE_OPERAND (arg1
, 1));
9230 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9232 For sequence point consistancy, we need to check for trapping,
9233 and side-effects. */
9234 else if (code
== icode
&& simple_operand_p_2 (arg0
)
9235 && simple_operand_p_2 (arg1
))
9236 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
9242 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9243 by changing CODE to reduce the magnitude of constants involved in
9244 ARG0 of the comparison.
9245 Returns a canonicalized comparison tree if a simplification was
9246 possible, otherwise returns NULL_TREE.
9247 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9248 valid if signed overflow is undefined. */
9251 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
9252 tree arg0
, tree arg1
,
9253 bool *strict_overflow_p
)
9255 enum tree_code code0
= TREE_CODE (arg0
);
9256 tree t
, cst0
= NULL_TREE
;
9259 /* Match A +- CST code arg1. We can change this only if overflow
9261 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9262 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
9263 /* In principle pointers also have undefined overflow behavior,
9264 but that causes problems elsewhere. */
9265 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
9266 && (code0
== MINUS_EXPR
9267 || code0
== PLUS_EXPR
)
9268 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
9271 /* Identify the constant in arg0 and its sign. */
9272 cst0
= TREE_OPERAND (arg0
, 1);
9273 sgn0
= tree_int_cst_sgn (cst0
);
9275 /* Overflowed constants and zero will cause problems. */
9276 if (integer_zerop (cst0
)
9277 || TREE_OVERFLOW (cst0
))
9280 /* See if we can reduce the magnitude of the constant in
9281 arg0 by changing the comparison code. */
9282 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9284 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9286 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9287 else if (code
== GT_EXPR
9288 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9290 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9291 else if (code
== LE_EXPR
9292 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9294 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9295 else if (code
== GE_EXPR
9296 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9300 *strict_overflow_p
= true;
9302 /* Now build the constant reduced in magnitude. But not if that
9303 would produce one outside of its types range. */
9304 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
9306 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
9307 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
9309 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
9310 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
9313 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
9314 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
9315 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
9316 t
= fold_convert (TREE_TYPE (arg1
), t
);
9318 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
9321 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9322 overflow further. Try to decrease the magnitude of constants involved
9323 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9324 and put sole constants at the second argument position.
9325 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9328 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
9329 tree arg0
, tree arg1
)
9332 bool strict_overflow_p
;
9333 const char * const warnmsg
= G_("assuming signed overflow does not occur "
9334 "when reducing constant in comparison");
9336 /* Try canonicalization by simplifying arg0. */
9337 strict_overflow_p
= false;
9338 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
9339 &strict_overflow_p
);
9342 if (strict_overflow_p
)
9343 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9347 /* Try canonicalization by simplifying arg1 using the swapped
9349 code
= swap_tree_comparison (code
);
9350 strict_overflow_p
= false;
9351 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
9352 &strict_overflow_p
);
9353 if (t
&& strict_overflow_p
)
9354 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9358 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9359 space. This is used to avoid issuing overflow warnings for
9360 expressions like &p->x which cannot wrap. */
9363 pointer_may_wrap_p (tree base
, tree offset
, poly_int64 bitpos
)
9365 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
9368 if (maybe_lt (bitpos
, 0))
9371 poly_wide_int wi_offset
;
9372 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
9373 if (offset
== NULL_TREE
)
9374 wi_offset
= wi::zero (precision
);
9375 else if (!poly_int_tree_p (offset
) || TREE_OVERFLOW (offset
))
9378 wi_offset
= wi::to_poly_wide (offset
);
9380 wi::overflow_type overflow
;
9381 poly_wide_int units
= wi::shwi (bits_to_bytes_round_down (bitpos
),
9383 poly_wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
9387 poly_uint64 total_hwi
, size
;
9388 if (!total
.to_uhwi (&total_hwi
)
9389 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base
))),
9391 || known_eq (size
, 0U))
9394 if (known_le (total_hwi
, size
))
9397 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9399 if (TREE_CODE (base
) == ADDR_EXPR
9400 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base
, 0))),
9402 && maybe_ne (size
, 0U)
9403 && known_le (total_hwi
, size
))
9409 /* Return a positive integer when the symbol DECL is known to have
9410 a nonzero address, zero when it's known not to (e.g., it's a weak
9411 symbol), and a negative integer when the symbol is not yet in the
9412 symbol table and so whether or not its address is zero is unknown.
9413 For function local objects always return positive integer. */
9415 maybe_nonzero_address (tree decl
)
9417 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
9418 if (struct symtab_node
*symbol
= symtab_node::get_create (decl
))
9419 return symbol
->nonzero_address ();
9421 /* Function local objects are never NULL. */
9423 && (DECL_CONTEXT (decl
)
9424 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
9425 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
9431 /* Subroutine of fold_binary. This routine performs all of the
9432 transformations that are common to the equality/inequality
9433 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9434 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9435 fold_binary should call fold_binary. Fold a comparison with
9436 tree code CODE and type TYPE with operands OP0 and OP1. Return
9437 the folded comparison or NULL_TREE. */
9440 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
9443 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
9444 tree arg0
, arg1
, tem
;
9449 STRIP_SIGN_NOPS (arg0
);
9450 STRIP_SIGN_NOPS (arg1
);
9452 /* For comparisons of pointers we can decompose it to a compile time
9453 comparison of the base objects and the offsets into the object.
9454 This requires at least one operand being an ADDR_EXPR or a
9455 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9456 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9457 && (TREE_CODE (arg0
) == ADDR_EXPR
9458 || TREE_CODE (arg1
) == ADDR_EXPR
9459 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9460 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9462 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9463 poly_int64 bitsize
, bitpos0
= 0, bitpos1
= 0;
9465 int volatilep
, reversep
, unsignedp
;
9466 bool indirect_base0
= false, indirect_base1
= false;
9468 /* Get base and offset for the access. Strip ADDR_EXPR for
9469 get_inner_reference, but put it back by stripping INDIRECT_REF
9470 off the base object if possible. indirect_baseN will be true
9471 if baseN is not an address but refers to the object itself. */
9473 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9476 = get_inner_reference (TREE_OPERAND (arg0
, 0),
9477 &bitsize
, &bitpos0
, &offset0
, &mode
,
9478 &unsignedp
, &reversep
, &volatilep
);
9479 if (TREE_CODE (base0
) == INDIRECT_REF
)
9480 base0
= TREE_OPERAND (base0
, 0);
9482 indirect_base0
= true;
9484 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9486 base0
= TREE_OPERAND (arg0
, 0);
9487 STRIP_SIGN_NOPS (base0
);
9488 if (TREE_CODE (base0
) == ADDR_EXPR
)
9491 = get_inner_reference (TREE_OPERAND (base0
, 0),
9492 &bitsize
, &bitpos0
, &offset0
, &mode
,
9493 &unsignedp
, &reversep
, &volatilep
);
9494 if (TREE_CODE (base0
) == INDIRECT_REF
)
9495 base0
= TREE_OPERAND (base0
, 0);
9497 indirect_base0
= true;
9499 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
9500 offset0
= TREE_OPERAND (arg0
, 1);
9502 offset0
= size_binop (PLUS_EXPR
, offset0
,
9503 TREE_OPERAND (arg0
, 1));
9504 if (poly_int_tree_p (offset0
))
9506 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset0
),
9507 TYPE_PRECISION (sizetype
));
9508 tem
<<= LOG2_BITS_PER_UNIT
;
9510 if (tem
.to_shwi (&bitpos0
))
9511 offset0
= NULL_TREE
;
9516 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9519 = get_inner_reference (TREE_OPERAND (arg1
, 0),
9520 &bitsize
, &bitpos1
, &offset1
, &mode
,
9521 &unsignedp
, &reversep
, &volatilep
);
9522 if (TREE_CODE (base1
) == INDIRECT_REF
)
9523 base1
= TREE_OPERAND (base1
, 0);
9525 indirect_base1
= true;
9527 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9529 base1
= TREE_OPERAND (arg1
, 0);
9530 STRIP_SIGN_NOPS (base1
);
9531 if (TREE_CODE (base1
) == ADDR_EXPR
)
9534 = get_inner_reference (TREE_OPERAND (base1
, 0),
9535 &bitsize
, &bitpos1
, &offset1
, &mode
,
9536 &unsignedp
, &reversep
, &volatilep
);
9537 if (TREE_CODE (base1
) == INDIRECT_REF
)
9538 base1
= TREE_OPERAND (base1
, 0);
9540 indirect_base1
= true;
9542 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
9543 offset1
= TREE_OPERAND (arg1
, 1);
9545 offset1
= size_binop (PLUS_EXPR
, offset1
,
9546 TREE_OPERAND (arg1
, 1));
9547 if (poly_int_tree_p (offset1
))
9549 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset1
),
9550 TYPE_PRECISION (sizetype
));
9551 tem
<<= LOG2_BITS_PER_UNIT
;
9553 if (tem
.to_shwi (&bitpos1
))
9554 offset1
= NULL_TREE
;
9558 /* If we have equivalent bases we might be able to simplify. */
9559 if (indirect_base0
== indirect_base1
9560 && operand_equal_p (base0
, base1
,
9561 indirect_base0
? OEP_ADDRESS_OF
: 0))
9563 /* We can fold this expression to a constant if the non-constant
9564 offset parts are equal. */
9565 if ((offset0
== offset1
9566 || (offset0
&& offset1
9567 && operand_equal_p (offset0
, offset1
, 0)))
9570 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
9571 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
9574 && maybe_ne (bitpos0
, bitpos1
)
9575 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9576 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9577 fold_overflow_warning (("assuming pointer wraparound does not "
9578 "occur when comparing P +- C1 with "
9580 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9585 if (known_eq (bitpos0
, bitpos1
))
9586 return constant_boolean_node (true, type
);
9587 if (known_ne (bitpos0
, bitpos1
))
9588 return constant_boolean_node (false, type
);
9591 if (known_ne (bitpos0
, bitpos1
))
9592 return constant_boolean_node (true, type
);
9593 if (known_eq (bitpos0
, bitpos1
))
9594 return constant_boolean_node (false, type
);
9597 if (known_lt (bitpos0
, bitpos1
))
9598 return constant_boolean_node (true, type
);
9599 if (known_ge (bitpos0
, bitpos1
))
9600 return constant_boolean_node (false, type
);
9603 if (known_le (bitpos0
, bitpos1
))
9604 return constant_boolean_node (true, type
);
9605 if (known_gt (bitpos0
, bitpos1
))
9606 return constant_boolean_node (false, type
);
9609 if (known_ge (bitpos0
, bitpos1
))
9610 return constant_boolean_node (true, type
);
9611 if (known_lt (bitpos0
, bitpos1
))
9612 return constant_boolean_node (false, type
);
9615 if (known_gt (bitpos0
, bitpos1
))
9616 return constant_boolean_node (true, type
);
9617 if (known_le (bitpos0
, bitpos1
))
9618 return constant_boolean_node (false, type
);
9623 /* We can simplify the comparison to a comparison of the variable
9624 offset parts if the constant offset parts are equal.
9625 Be careful to use signed sizetype here because otherwise we
9626 mess with array offsets in the wrong way. This is possible
9627 because pointer arithmetic is restricted to retain within an
9628 object and overflow on pointer differences is undefined as of
9629 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9630 else if (known_eq (bitpos0
, bitpos1
)
9633 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
9634 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
9636 /* By converting to signed sizetype we cover middle-end pointer
9637 arithmetic which operates on unsigned pointer types of size
9638 type size and ARRAY_REF offsets which are properly sign or
9639 zero extended from their type in case it is narrower than
9641 if (offset0
== NULL_TREE
)
9642 offset0
= build_int_cst (ssizetype
, 0);
9644 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9645 if (offset1
== NULL_TREE
)
9646 offset1
= build_int_cst (ssizetype
, 0);
9648 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9651 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9652 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9653 fold_overflow_warning (("assuming pointer wraparound does not "
9654 "occur when comparing P +- C1 with "
9656 WARN_STRICT_OVERFLOW_COMPARISON
);
9658 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9661 /* For equal offsets we can simplify to a comparison of the
9663 else if (known_eq (bitpos0
, bitpos1
)
9665 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9667 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9668 && ((offset0
== offset1
)
9669 || (offset0
&& offset1
9670 && operand_equal_p (offset0
, offset1
, 0))))
9673 base0
= build_fold_addr_expr_loc (loc
, base0
);
9675 base1
= build_fold_addr_expr_loc (loc
, base1
);
9676 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9678 /* Comparison between an ordinary (non-weak) symbol and a null
9679 pointer can be eliminated since such symbols must have a non
9680 null address. In C, relational expressions between pointers
9681 to objects and null pointers are undefined. The results
9682 below follow the C++ rules with the additional property that
9683 every object pointer compares greater than a null pointer.
9685 else if (((DECL_P (base0
)
9686 && maybe_nonzero_address (base0
) > 0
9687 /* Avoid folding references to struct members at offset 0 to
9688 prevent tests like '&ptr->firstmember == 0' from getting
9689 eliminated. When ptr is null, although the -> expression
9690 is strictly speaking invalid, GCC retains it as a matter
9691 of QoI. See PR c/44555. */
9692 && (offset0
== NULL_TREE
&& known_ne (bitpos0
, 0)))
9693 || CONSTANT_CLASS_P (base0
))
9695 /* The caller guarantees that when one of the arguments is
9696 constant (i.e., null in this case) it is second. */
9697 && integer_zerop (arg1
))
9704 return constant_boolean_node (false, type
);
9708 return constant_boolean_node (true, type
);
9715 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9716 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9717 the resulting offset is smaller in absolute value than the
9718 original one and has the same sign. */
9719 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9720 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9721 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9722 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9723 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9724 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9725 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9726 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9728 tree const1
= TREE_OPERAND (arg0
, 1);
9729 tree const2
= TREE_OPERAND (arg1
, 1);
9730 tree variable1
= TREE_OPERAND (arg0
, 0);
9731 tree variable2
= TREE_OPERAND (arg1
, 0);
9733 const char * const warnmsg
= G_("assuming signed overflow does not "
9734 "occur when combining constants around "
9737 /* Put the constant on the side where it doesn't overflow and is
9738 of lower absolute value and of same sign than before. */
9739 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9740 ? MINUS_EXPR
: PLUS_EXPR
,
9742 if (!TREE_OVERFLOW (cst
)
9743 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9744 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9746 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9747 return fold_build2_loc (loc
, code
, type
,
9749 fold_build2_loc (loc
, TREE_CODE (arg1
),
9754 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9755 ? MINUS_EXPR
: PLUS_EXPR
,
9757 if (!TREE_OVERFLOW (cst
)
9758 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9759 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9761 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9762 return fold_build2_loc (loc
, code
, type
,
9763 fold_build2_loc (loc
, TREE_CODE (arg0
),
9770 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9774 /* If we are comparing an expression that just has comparisons
9775 of two integer values, arithmetic expressions of those comparisons,
9776 and constants, we can simplify it. There are only three cases
9777 to check: the two values can either be equal, the first can be
9778 greater, or the second can be greater. Fold the expression for
9779 those three values. Since each value must be 0 or 1, we have
9780 eight possibilities, each of which corresponds to the constant 0
9781 or 1 or one of the six possible comparisons.
9783 This handles common cases like (a > b) == 0 but also handles
9784 expressions like ((x > y) - (y > x)) > 0, which supposedly
9785 occur in macroized code. */
9787 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9789 tree cval1
= 0, cval2
= 0;
9791 if (twoval_comparison_p (arg0
, &cval1
, &cval2
)
9792 /* Don't handle degenerate cases here; they should already
9793 have been handled anyway. */
9794 && cval1
!= 0 && cval2
!= 0
9795 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9796 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9797 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9798 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9799 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9800 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9801 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9803 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9804 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9806 /* We can't just pass T to eval_subst in case cval1 or cval2
9807 was the same as ARG1. */
9810 = fold_build2_loc (loc
, code
, type
,
9811 eval_subst (loc
, arg0
, cval1
, maxval
,
9815 = fold_build2_loc (loc
, code
, type
,
9816 eval_subst (loc
, arg0
, cval1
, maxval
,
9820 = fold_build2_loc (loc
, code
, type
,
9821 eval_subst (loc
, arg0
, cval1
, minval
,
9825 /* All three of these results should be 0 or 1. Confirm they are.
9826 Then use those values to select the proper code to use. */
9828 if (TREE_CODE (high_result
) == INTEGER_CST
9829 && TREE_CODE (equal_result
) == INTEGER_CST
9830 && TREE_CODE (low_result
) == INTEGER_CST
)
9832 /* Make a 3-bit mask with the high-order bit being the
9833 value for `>', the next for '=', and the low for '<'. */
9834 switch ((integer_onep (high_result
) * 4)
9835 + (integer_onep (equal_result
) * 2)
9836 + integer_onep (low_result
))
9840 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9861 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9864 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9873 /* Subroutine of fold_binary. Optimize complex multiplications of the
9874 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9875 argument EXPR represents the expression "z" of type TYPE. */
9878 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9880 tree itype
= TREE_TYPE (type
);
9881 tree rpart
, ipart
, tem
;
9883 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9885 rpart
= TREE_OPERAND (expr
, 0);
9886 ipart
= TREE_OPERAND (expr
, 1);
9888 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9890 rpart
= TREE_REALPART (expr
);
9891 ipart
= TREE_IMAGPART (expr
);
9895 expr
= save_expr (expr
);
9896 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9897 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9900 rpart
= save_expr (rpart
);
9901 ipart
= save_expr (ipart
);
9902 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9903 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9904 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9905 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9906 build_zero_cst (itype
));
9910 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9911 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9912 true if successful. */
9915 vec_cst_ctor_to_array (tree arg
, unsigned int nelts
, tree
*elts
)
9917 unsigned HOST_WIDE_INT i
, nunits
;
9919 if (TREE_CODE (arg
) == VECTOR_CST
9920 && VECTOR_CST_NELTS (arg
).is_constant (&nunits
))
9922 for (i
= 0; i
< nunits
; ++i
)
9923 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9925 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9927 constructor_elt
*elt
;
9929 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9930 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9933 elts
[i
] = elt
->value
;
9937 for (; i
< nelts
; i
++)
9939 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9943 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9944 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9945 NULL_TREE otherwise. */
9948 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const vec_perm_indices
&sel
)
9951 unsigned HOST_WIDE_INT nelts
;
9952 bool need_ctor
= false;
9954 if (!sel
.length ().is_constant (&nelts
))
9956 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
), nelts
)
9957 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)), nelts
)
9958 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)), nelts
));
9959 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9960 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9963 tree
*in_elts
= XALLOCAVEC (tree
, nelts
* 2);
9964 if (!vec_cst_ctor_to_array (arg0
, nelts
, in_elts
)
9965 || !vec_cst_ctor_to_array (arg1
, nelts
, in_elts
+ nelts
))
9968 tree_vector_builder
out_elts (type
, nelts
, 1);
9969 for (i
= 0; i
< nelts
; i
++)
9971 HOST_WIDE_INT index
;
9972 if (!sel
[i
].is_constant (&index
))
9974 if (!CONSTANT_CLASS_P (in_elts
[index
]))
9976 out_elts
.quick_push (unshare_expr (in_elts
[index
]));
9981 vec
<constructor_elt
, va_gc
> *v
;
9982 vec_alloc (v
, nelts
);
9983 for (i
= 0; i
< nelts
; i
++)
9984 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, out_elts
[i
]);
9985 return build_constructor (type
, v
);
9988 return out_elts
.build ();
9991 /* Try to fold a pointer difference of type TYPE two address expressions of
9992 array references AREF0 and AREF1 using location LOC. Return a
9993 simplified expression for the difference or NULL_TREE. */
9996 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9997 tree aref0
, tree aref1
,
9998 bool use_pointer_diff
)
10000 tree base0
= TREE_OPERAND (aref0
, 0);
10001 tree base1
= TREE_OPERAND (aref1
, 0);
10002 tree base_offset
= build_int_cst (type
, 0);
10004 /* If the bases are array references as well, recurse. If the bases
10005 are pointer indirections compute the difference of the pointers.
10006 If the bases are equal, we are set. */
10007 if ((TREE_CODE (base0
) == ARRAY_REF
10008 && TREE_CODE (base1
) == ARRAY_REF
10010 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
,
10011 use_pointer_diff
)))
10012 || (INDIRECT_REF_P (base0
)
10013 && INDIRECT_REF_P (base1
)
10016 ? fold_binary_loc (loc
, POINTER_DIFF_EXPR
, type
,
10017 TREE_OPERAND (base0
, 0),
10018 TREE_OPERAND (base1
, 0))
10019 : fold_binary_loc (loc
, MINUS_EXPR
, type
,
10020 fold_convert (type
,
10021 TREE_OPERAND (base0
, 0)),
10022 fold_convert (type
,
10023 TREE_OPERAND (base1
, 0)))))
10024 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
10026 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10027 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10028 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
10029 tree diff
= fold_build2_loc (loc
, MINUS_EXPR
, type
, op0
, op1
);
10030 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10032 fold_build2_loc (loc
, MULT_EXPR
, type
,
10038 /* If the real or vector real constant CST of type TYPE has an exact
10039 inverse, return it, else return NULL. */
10042 exact_inverse (tree type
, tree cst
)
10048 switch (TREE_CODE (cst
))
10051 r
= TREE_REAL_CST (cst
);
10053 if (exact_real_inverse (TYPE_MODE (type
), &r
))
10054 return build_real (type
, r
);
10060 unit_type
= TREE_TYPE (type
);
10061 mode
= TYPE_MODE (unit_type
);
10063 tree_vector_builder elts
;
10064 if (!elts
.new_unary_operation (type
, cst
, false))
10066 unsigned int count
= elts
.encoded_nelts ();
10067 for (unsigned int i
= 0; i
< count
; ++i
)
10069 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
10070 if (!exact_real_inverse (mode
, &r
))
10072 elts
.quick_push (build_real (unit_type
, r
));
10075 return elts
.build ();
10083 /* Mask out the tz least significant bits of X of type TYPE where
10084 tz is the number of trailing zeroes in Y. */
10086 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
10088 int tz
= wi::ctz (y
);
10090 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
10094 /* Return true when T is an address and is known to be nonzero.
10095 For floating point we further ensure that T is not denormal.
10096 Similar logic is present in nonzero_address in rtlanal.h.
10098 If the return value is based on the assumption that signed overflow
10099 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10100 change *STRICT_OVERFLOW_P. */
10103 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
10105 tree type
= TREE_TYPE (t
);
10106 enum tree_code code
;
10108 /* Doing something useful for floating point would need more work. */
10109 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10112 code
= TREE_CODE (t
);
10113 switch (TREE_CODE_CLASS (code
))
10116 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10117 strict_overflow_p
);
10119 case tcc_comparison
:
10120 return tree_binary_nonzero_warnv_p (code
, type
,
10121 TREE_OPERAND (t
, 0),
10122 TREE_OPERAND (t
, 1),
10123 strict_overflow_p
);
10125 case tcc_declaration
:
10126 case tcc_reference
:
10127 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10135 case TRUTH_NOT_EXPR
:
10136 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10137 strict_overflow_p
);
10139 case TRUTH_AND_EXPR
:
10140 case TRUTH_OR_EXPR
:
10141 case TRUTH_XOR_EXPR
:
10142 return tree_binary_nonzero_warnv_p (code
, type
,
10143 TREE_OPERAND (t
, 0),
10144 TREE_OPERAND (t
, 1),
10145 strict_overflow_p
);
10152 case WITH_SIZE_EXPR
:
10154 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10156 case COMPOUND_EXPR
:
10159 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10160 strict_overflow_p
);
10163 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10164 strict_overflow_p
);
10168 tree fndecl
= get_callee_fndecl (t
);
10169 if (!fndecl
) return false;
10170 if (flag_delete_null_pointer_checks
&& !flag_check_new
10171 && DECL_IS_OPERATOR_NEW_P (fndecl
)
10172 && !TREE_NOTHROW (fndecl
))
10174 if (flag_delete_null_pointer_checks
10175 && lookup_attribute ("returns_nonnull",
10176 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10178 return alloca_call_p (t
);
10187 /* Return true when T is an address and is known to be nonzero.
10188 Handle warnings about undefined signed overflow. */
10191 tree_expr_nonzero_p (tree t
)
10193 bool ret
, strict_overflow_p
;
10195 strict_overflow_p
= false;
10196 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10197 if (strict_overflow_p
)
10198 fold_overflow_warning (("assuming signed overflow does not occur when "
10199 "determining that expression is always "
10201 WARN_STRICT_OVERFLOW_MISC
);
10205 /* Return true if T is known not to be equal to an integer W. */
10208 expr_not_equal_to (tree t
, const wide_int
&w
)
10211 switch (TREE_CODE (t
))
10214 return wi::to_wide (t
) != w
;
10217 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
10219 get_range_info (t
, vr
);
10220 if (!vr
.undefined_p ()
10221 && !vr
.contains_p (wide_int_to_tree (TREE_TYPE (t
), w
)))
10223 /* If T has some known zero bits and W has any of those bits set,
10224 then T is known not to be equal to W. */
10225 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
10226 TYPE_PRECISION (TREE_TYPE (t
))), 0))
10235 /* Fold a binary expression of code CODE and type TYPE with operands
10236 OP0 and OP1. LOC is the location of the resulting expression.
10237 Return the folded expression if folding is successful. Otherwise,
10238 return NULL_TREE. */
10241 fold_binary_loc (location_t loc
, enum tree_code code
, tree type
,
10242 tree op0
, tree op1
)
10244 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10245 tree arg0
, arg1
, tem
;
10246 tree t1
= NULL_TREE
;
10247 bool strict_overflow_p
;
10250 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10251 && TREE_CODE_LENGTH (code
) == 2
10252 && op0
!= NULL_TREE
10253 && op1
!= NULL_TREE
);
10258 /* Strip any conversions that don't change the mode. This is
10259 safe for every expression, except for a comparison expression
10260 because its signedness is derived from its operands. So, in
10261 the latter case, only strip conversions that don't change the
10262 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10265 Note that this is done as an internal manipulation within the
10266 constant folder, in order to find the simplest representation
10267 of the arguments so that their form can be studied. In any
10268 cases, the appropriate type conversions should be put back in
10269 the tree that will get out of the constant folder. */
10271 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10273 STRIP_SIGN_NOPS (arg0
);
10274 STRIP_SIGN_NOPS (arg1
);
10282 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10283 constant but we can't do arithmetic on them. */
10284 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
10286 tem
= const_binop (code
, type
, arg0
, arg1
);
10287 if (tem
!= NULL_TREE
)
10289 if (TREE_TYPE (tem
) != type
)
10290 tem
= fold_convert_loc (loc
, type
, tem
);
10295 /* If this is a commutative operation, and ARG0 is a constant, move it
10296 to ARG1 to reduce the number of tests below. */
10297 if (commutative_tree_code (code
)
10298 && tree_swap_operands_p (arg0
, arg1
))
10299 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10301 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10302 to ARG1 to reduce the number of tests below. */
10303 if (kind
== tcc_comparison
10304 && tree_swap_operands_p (arg0
, arg1
))
10305 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
10307 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
10311 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10313 First check for cases where an arithmetic operation is applied to a
10314 compound, conditional, or comparison operation. Push the arithmetic
10315 operation inside the compound or conditional to see if any folding
10316 can then be done. Convert comparison to conditional for this purpose.
10317 The also optimizes non-constant cases that used to be done in
10320 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10321 one of the operands is a comparison and the other is a comparison, a
10322 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10323 code below would make the expression more complex. Change it to a
10324 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10325 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10327 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10328 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10329 && !VECTOR_TYPE_P (TREE_TYPE (arg0
))
10330 && ((truth_value_p (TREE_CODE (arg0
))
10331 && (truth_value_p (TREE_CODE (arg1
))
10332 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10333 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10334 || (truth_value_p (TREE_CODE (arg1
))
10335 && (truth_value_p (TREE_CODE (arg0
))
10336 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10337 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10339 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10340 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10343 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10344 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10346 if (code
== EQ_EXPR
)
10347 tem
= invert_truthvalue_loc (loc
, tem
);
10349 return fold_convert_loc (loc
, type
, tem
);
10352 if (TREE_CODE_CLASS (code
) == tcc_binary
10353 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10355 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10357 tem
= fold_build2_loc (loc
, code
, type
,
10358 fold_convert_loc (loc
, TREE_TYPE (op0
),
10359 TREE_OPERAND (arg0
, 1)), op1
);
10360 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10363 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
10365 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10366 fold_convert_loc (loc
, TREE_TYPE (op1
),
10367 TREE_OPERAND (arg1
, 1)));
10368 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10372 if (TREE_CODE (arg0
) == COND_EXPR
10373 || TREE_CODE (arg0
) == VEC_COND_EXPR
10374 || COMPARISON_CLASS_P (arg0
))
10376 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10378 /*cond_first_p=*/1);
10379 if (tem
!= NULL_TREE
)
10383 if (TREE_CODE (arg1
) == COND_EXPR
10384 || TREE_CODE (arg1
) == VEC_COND_EXPR
10385 || COMPARISON_CLASS_P (arg1
))
10387 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10389 /*cond_first_p=*/0);
10390 if (tem
!= NULL_TREE
)
10398 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10399 if (TREE_CODE (arg0
) == ADDR_EXPR
10400 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10402 tree iref
= TREE_OPERAND (arg0
, 0);
10403 return fold_build2 (MEM_REF
, type
,
10404 TREE_OPERAND (iref
, 0),
10405 int_const_binop (PLUS_EXPR
, arg1
,
10406 TREE_OPERAND (iref
, 1)));
10409 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10410 if (TREE_CODE (arg0
) == ADDR_EXPR
10411 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10414 poly_int64 coffset
;
10415 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10419 return fold_build2 (MEM_REF
, type
,
10420 build1 (ADDR_EXPR
, TREE_TYPE (arg0
), base
),
10421 int_const_binop (PLUS_EXPR
, arg1
,
10422 size_int (coffset
)));
10427 case POINTER_PLUS_EXPR
:
10428 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10429 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10430 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10431 return fold_convert_loc (loc
, type
,
10432 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10433 fold_convert_loc (loc
, sizetype
,
10435 fold_convert_loc (loc
, sizetype
,
10441 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10443 /* X + (X / CST) * -CST is X % CST. */
10444 if (TREE_CODE (arg1
) == MULT_EXPR
10445 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10446 && operand_equal_p (arg0
,
10447 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10449 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10450 tree cst1
= TREE_OPERAND (arg1
, 1);
10451 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10453 if (sum
&& integer_zerop (sum
))
10454 return fold_convert_loc (loc
, type
,
10455 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10456 TREE_TYPE (arg0
), arg0
,
10461 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10462 one. Make sure the type is not saturating and has the signedness of
10463 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10464 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10465 if ((TREE_CODE (arg0
) == MULT_EXPR
10466 || TREE_CODE (arg1
) == MULT_EXPR
)
10467 && !TYPE_SATURATING (type
)
10468 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10469 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10470 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10472 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10477 if (! FLOAT_TYPE_P (type
))
10479 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10480 (plus (plus (mult) (mult)) (foo)) so that we can
10481 take advantage of the factoring cases below. */
10482 if (ANY_INTEGRAL_TYPE_P (type
)
10483 && TYPE_OVERFLOW_WRAPS (type
)
10484 && (((TREE_CODE (arg0
) == PLUS_EXPR
10485 || TREE_CODE (arg0
) == MINUS_EXPR
)
10486 && TREE_CODE (arg1
) == MULT_EXPR
)
10487 || ((TREE_CODE (arg1
) == PLUS_EXPR
10488 || TREE_CODE (arg1
) == MINUS_EXPR
)
10489 && TREE_CODE (arg0
) == MULT_EXPR
)))
10491 tree parg0
, parg1
, parg
, marg
;
10492 enum tree_code pcode
;
10494 if (TREE_CODE (arg1
) == MULT_EXPR
)
10495 parg
= arg0
, marg
= arg1
;
10497 parg
= arg1
, marg
= arg0
;
10498 pcode
= TREE_CODE (parg
);
10499 parg0
= TREE_OPERAND (parg
, 0);
10500 parg1
= TREE_OPERAND (parg
, 1);
10501 STRIP_NOPS (parg0
);
10502 STRIP_NOPS (parg1
);
10504 if (TREE_CODE (parg0
) == MULT_EXPR
10505 && TREE_CODE (parg1
) != MULT_EXPR
)
10506 return fold_build2_loc (loc
, pcode
, type
,
10507 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10508 fold_convert_loc (loc
, type
,
10510 fold_convert_loc (loc
, type
,
10512 fold_convert_loc (loc
, type
, parg1
));
10513 if (TREE_CODE (parg0
) != MULT_EXPR
10514 && TREE_CODE (parg1
) == MULT_EXPR
)
10516 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10517 fold_convert_loc (loc
, type
, parg0
),
10518 fold_build2_loc (loc
, pcode
, type
,
10519 fold_convert_loc (loc
, type
, marg
),
10520 fold_convert_loc (loc
, type
,
10526 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10527 to __complex__ ( x, y ). This is not the same for SNaNs or
10528 if signed zeros are involved. */
10529 if (!HONOR_SNANS (element_mode (arg0
))
10530 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10531 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10533 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10534 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10535 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10536 bool arg0rz
= false, arg0iz
= false;
10537 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10538 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10540 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10541 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10542 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10544 tree rp
= arg1r
? arg1r
10545 : build1 (REALPART_EXPR
, rtype
, arg1
);
10546 tree ip
= arg0i
? arg0i
10547 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10548 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10550 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10552 tree rp
= arg0r
? arg0r
10553 : build1 (REALPART_EXPR
, rtype
, arg0
);
10554 tree ip
= arg1i
? arg1i
10555 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10556 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10561 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10562 We associate floats only if the user has specified
10563 -fassociative-math. */
10564 if (flag_associative_math
10565 && TREE_CODE (arg1
) == PLUS_EXPR
10566 && TREE_CODE (arg0
) != MULT_EXPR
)
10568 tree tree10
= TREE_OPERAND (arg1
, 0);
10569 tree tree11
= TREE_OPERAND (arg1
, 1);
10570 if (TREE_CODE (tree11
) == MULT_EXPR
10571 && TREE_CODE (tree10
) == MULT_EXPR
)
10574 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10575 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10578 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10579 We associate floats only if the user has specified
10580 -fassociative-math. */
10581 if (flag_associative_math
10582 && TREE_CODE (arg0
) == PLUS_EXPR
10583 && TREE_CODE (arg1
) != MULT_EXPR
)
10585 tree tree00
= TREE_OPERAND (arg0
, 0);
10586 tree tree01
= TREE_OPERAND (arg0
, 1);
10587 if (TREE_CODE (tree01
) == MULT_EXPR
10588 && TREE_CODE (tree00
) == MULT_EXPR
)
10591 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10592 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10598 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10599 is a rotate of A by C1 bits. */
10600 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10601 is a rotate of A by B bits.
10602 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10603 though in this case CODE must be | and not + or ^, otherwise
10604 it doesn't return A when B is 0. */
10606 enum tree_code code0
, code1
;
10608 code0
= TREE_CODE (arg0
);
10609 code1
= TREE_CODE (arg1
);
10610 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10611 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10612 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10613 TREE_OPERAND (arg1
, 0), 0)
10614 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10615 TYPE_UNSIGNED (rtype
))
10616 /* Only create rotates in complete modes. Other cases are not
10617 expanded properly. */
10618 && (element_precision (rtype
)
10619 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
10621 tree tree01
, tree11
;
10622 tree orig_tree01
, orig_tree11
;
10623 enum tree_code code01
, code11
;
10625 tree01
= orig_tree01
= TREE_OPERAND (arg0
, 1);
10626 tree11
= orig_tree11
= TREE_OPERAND (arg1
, 1);
10627 STRIP_NOPS (tree01
);
10628 STRIP_NOPS (tree11
);
10629 code01
= TREE_CODE (tree01
);
10630 code11
= TREE_CODE (tree11
);
10631 if (code11
!= MINUS_EXPR
10632 && (code01
== MINUS_EXPR
|| code01
== BIT_AND_EXPR
))
10634 std::swap (code0
, code1
);
10635 std::swap (code01
, code11
);
10636 std::swap (tree01
, tree11
);
10637 std::swap (orig_tree01
, orig_tree11
);
10639 if (code01
== INTEGER_CST
10640 && code11
== INTEGER_CST
10641 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10642 == element_precision (rtype
)))
10644 tem
= build2_loc (loc
, LROTATE_EXPR
,
10645 rtype
, TREE_OPERAND (arg0
, 0),
10646 code0
== LSHIFT_EXPR
10647 ? orig_tree01
: orig_tree11
);
10648 return fold_convert_loc (loc
, type
, tem
);
10650 else if (code11
== MINUS_EXPR
)
10652 tree tree110
, tree111
;
10653 tree110
= TREE_OPERAND (tree11
, 0);
10654 tree111
= TREE_OPERAND (tree11
, 1);
10655 STRIP_NOPS (tree110
);
10656 STRIP_NOPS (tree111
);
10657 if (TREE_CODE (tree110
) == INTEGER_CST
10658 && compare_tree_int (tree110
,
10659 element_precision (rtype
)) == 0
10660 && operand_equal_p (tree01
, tree111
, 0))
10662 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
10663 ? LROTATE_EXPR
: RROTATE_EXPR
),
10664 rtype
, TREE_OPERAND (arg0
, 0),
10666 return fold_convert_loc (loc
, type
, tem
);
10669 else if (code
== BIT_IOR_EXPR
10670 && code11
== BIT_AND_EXPR
10671 && pow2p_hwi (element_precision (rtype
)))
10673 tree tree110
, tree111
;
10674 tree110
= TREE_OPERAND (tree11
, 0);
10675 tree111
= TREE_OPERAND (tree11
, 1);
10676 STRIP_NOPS (tree110
);
10677 STRIP_NOPS (tree111
);
10678 if (TREE_CODE (tree110
) == NEGATE_EXPR
10679 && TREE_CODE (tree111
) == INTEGER_CST
10680 && compare_tree_int (tree111
,
10681 element_precision (rtype
) - 1) == 0
10682 && operand_equal_p (tree01
, TREE_OPERAND (tree110
, 0), 0))
10684 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
10685 ? LROTATE_EXPR
: RROTATE_EXPR
),
10686 rtype
, TREE_OPERAND (arg0
, 0),
10688 return fold_convert_loc (loc
, type
, tem
);
10695 /* In most languages, can't associate operations on floats through
10696 parentheses. Rather than remember where the parentheses were, we
10697 don't associate floats at all, unless the user has specified
10698 -fassociative-math.
10699 And, we need to make sure type is not saturating. */
10701 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10702 && !TYPE_SATURATING (type
))
10704 tree var0
, minus_var0
, con0
, minus_con0
, lit0
, minus_lit0
;
10705 tree var1
, minus_var1
, con1
, minus_con1
, lit1
, minus_lit1
;
10709 /* Split both trees into variables, constants, and literals. Then
10710 associate each group together, the constants with literals,
10711 then the result with variables. This increases the chances of
10712 literals being recombined later and of generating relocatable
10713 expressions for the sum of a constant and literal. */
10714 var0
= split_tree (arg0
, type
, code
,
10715 &minus_var0
, &con0
, &minus_con0
,
10716 &lit0
, &minus_lit0
, 0);
10717 var1
= split_tree (arg1
, type
, code
,
10718 &minus_var1
, &con1
, &minus_con1
,
10719 &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
10721 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10722 if (code
== MINUS_EXPR
)
10725 /* With undefined overflow prefer doing association in a type
10726 which wraps on overflow, if that is one of the operand types. */
10727 if ((POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
10728 && !TYPE_OVERFLOW_WRAPS (type
))
10730 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10731 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10732 atype
= TREE_TYPE (arg0
);
10733 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10734 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10735 atype
= TREE_TYPE (arg1
);
10736 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10739 /* With undefined overflow we can only associate constants with one
10740 variable, and constants whose association doesn't overflow. */
10741 if ((POINTER_TYPE_P (atype
) || INTEGRAL_TYPE_P (atype
))
10742 && !TYPE_OVERFLOW_WRAPS (atype
))
10744 if ((var0
&& var1
) || (minus_var0
&& minus_var1
))
10746 /* ??? If split_tree would handle NEGATE_EXPR we could
10747 simply reject these cases and the allowed cases would
10748 be the var0/minus_var1 ones. */
10749 tree tmp0
= var0
? var0
: minus_var0
;
10750 tree tmp1
= var1
? var1
: minus_var1
;
10751 bool one_neg
= false;
10753 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10755 tmp0
= TREE_OPERAND (tmp0
, 0);
10756 one_neg
= !one_neg
;
10758 if (CONVERT_EXPR_P (tmp0
)
10759 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10760 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10761 <= TYPE_PRECISION (atype
)))
10762 tmp0
= TREE_OPERAND (tmp0
, 0);
10763 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10765 tmp1
= TREE_OPERAND (tmp1
, 0);
10766 one_neg
= !one_neg
;
10768 if (CONVERT_EXPR_P (tmp1
)
10769 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10770 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10771 <= TYPE_PRECISION (atype
)))
10772 tmp1
= TREE_OPERAND (tmp1
, 0);
10773 /* The only case we can still associate with two variables
10774 is if they cancel out. */
10776 || !operand_equal_p (tmp0
, tmp1
, 0))
10779 else if ((var0
&& minus_var1
10780 && ! operand_equal_p (var0
, minus_var1
, 0))
10781 || (minus_var0
&& var1
10782 && ! operand_equal_p (minus_var0
, var1
, 0)))
10786 /* Only do something if we found more than two objects. Otherwise,
10787 nothing has changed and we risk infinite recursion. */
10789 && ((var0
!= 0) + (var1
!= 0)
10790 + (minus_var0
!= 0) + (minus_var1
!= 0)
10791 + (con0
!= 0) + (con1
!= 0)
10792 + (minus_con0
!= 0) + (minus_con1
!= 0)
10793 + (lit0
!= 0) + (lit1
!= 0)
10794 + (minus_lit0
!= 0) + (minus_lit1
!= 0)) > 2)
10796 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10797 minus_var0
= associate_trees (loc
, minus_var0
, minus_var1
,
10799 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10800 minus_con0
= associate_trees (loc
, minus_con0
, minus_con1
,
10802 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10803 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10806 if (minus_var0
&& var0
)
10808 var0
= associate_trees (loc
, var0
, minus_var0
,
10809 MINUS_EXPR
, atype
);
10812 if (minus_con0
&& con0
)
10814 con0
= associate_trees (loc
, con0
, minus_con0
,
10815 MINUS_EXPR
, atype
);
10819 /* Preserve the MINUS_EXPR if the negative part of the literal is
10820 greater than the positive part. Otherwise, the multiplicative
10821 folding code (i.e extract_muldiv) may be fooled in case
10822 unsigned constants are subtracted, like in the following
10823 example: ((X*2 + 4) - 8U)/2. */
10824 if (minus_lit0
&& lit0
)
10826 if (TREE_CODE (lit0
) == INTEGER_CST
10827 && TREE_CODE (minus_lit0
) == INTEGER_CST
10828 && tree_int_cst_lt (lit0
, minus_lit0
)
10829 /* But avoid ending up with only negated parts. */
10832 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10833 MINUS_EXPR
, atype
);
10838 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10839 MINUS_EXPR
, atype
);
10844 /* Don't introduce overflows through reassociation. */
10845 if ((lit0
&& TREE_OVERFLOW_P (lit0
))
10846 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
)))
10849 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10850 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10852 minus_con0
= associate_trees (loc
, minus_con0
, minus_lit0
,
10856 /* Eliminate minus_con0. */
10860 con0
= associate_trees (loc
, con0
, minus_con0
,
10861 MINUS_EXPR
, atype
);
10863 var0
= associate_trees (loc
, var0
, minus_con0
,
10864 MINUS_EXPR
, atype
);
10866 gcc_unreachable ();
10870 /* Eliminate minus_var0. */
10874 con0
= associate_trees (loc
, con0
, minus_var0
,
10875 MINUS_EXPR
, atype
);
10877 gcc_unreachable ();
10882 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10889 case POINTER_DIFF_EXPR
:
10891 /* Fold &a[i] - &a[j] to i-j. */
10892 if (TREE_CODE (arg0
) == ADDR_EXPR
10893 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10894 && TREE_CODE (arg1
) == ADDR_EXPR
10895 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10897 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10898 TREE_OPERAND (arg0
, 0),
10899 TREE_OPERAND (arg1
, 0),
10901 == POINTER_DIFF_EXPR
);
10906 /* Further transformations are not for pointers. */
10907 if (code
== POINTER_DIFF_EXPR
)
10910 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10911 if (TREE_CODE (arg0
) == NEGATE_EXPR
10912 && negate_expr_p (op1
)
10913 /* If arg0 is e.g. unsigned int and type is int, then this could
10914 introduce UB, because if A is INT_MIN at runtime, the original
10915 expression can be well defined while the latter is not.
10917 && !(ANY_INTEGRAL_TYPE_P (type
)
10918 && TYPE_OVERFLOW_UNDEFINED (type
)
10919 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10920 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10921 return fold_build2_loc (loc
, MINUS_EXPR
, type
, negate_expr (op1
),
10922 fold_convert_loc (loc
, type
,
10923 TREE_OPERAND (arg0
, 0)));
10925 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10926 __complex__ ( x, -y ). This is not the same for SNaNs or if
10927 signed zeros are involved. */
10928 if (!HONOR_SNANS (element_mode (arg0
))
10929 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10930 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10932 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10933 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10934 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10935 bool arg0rz
= false, arg0iz
= false;
10936 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10937 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10939 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10940 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10941 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10943 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10945 : build1 (REALPART_EXPR
, rtype
, arg1
));
10946 tree ip
= arg0i
? arg0i
10947 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10948 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10950 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10952 tree rp
= arg0r
? arg0r
10953 : build1 (REALPART_EXPR
, rtype
, arg0
);
10954 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10956 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10957 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10962 /* A - B -> A + (-B) if B is easily negatable. */
10963 if (negate_expr_p (op1
)
10964 && ! TYPE_OVERFLOW_SANITIZED (type
)
10965 && ((FLOAT_TYPE_P (type
)
10966 /* Avoid this transformation if B is a positive REAL_CST. */
10967 && (TREE_CODE (op1
) != REAL_CST
10968 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
10969 || INTEGRAL_TYPE_P (type
)))
10970 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10971 fold_convert_loc (loc
, type
, arg0
),
10972 negate_expr (op1
));
10974 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10975 one. Make sure the type is not saturating and has the signedness of
10976 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10977 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10978 if ((TREE_CODE (arg0
) == MULT_EXPR
10979 || TREE_CODE (arg1
) == MULT_EXPR
)
10980 && !TYPE_SATURATING (type
)
10981 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10982 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10983 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10985 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10993 if (! FLOAT_TYPE_P (type
))
10995 /* Transform x * -C into -x * C if x is easily negatable. */
10996 if (TREE_CODE (op1
) == INTEGER_CST
10997 && tree_int_cst_sgn (op1
) == -1
10998 && negate_expr_p (op0
)
10999 && negate_expr_p (op1
)
11000 && (tem
= negate_expr (op1
)) != op1
11001 && ! TREE_OVERFLOW (tem
))
11002 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11003 fold_convert_loc (loc
, type
,
11004 negate_expr (op0
)), tem
);
11006 strict_overflow_p
= false;
11007 if (TREE_CODE (arg1
) == INTEGER_CST
11008 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11009 &strict_overflow_p
)) != 0)
11011 if (strict_overflow_p
)
11012 fold_overflow_warning (("assuming signed overflow does not "
11013 "occur when simplifying "
11015 WARN_STRICT_OVERFLOW_MISC
);
11016 return fold_convert_loc (loc
, type
, tem
);
11019 /* Optimize z * conj(z) for integer complex numbers. */
11020 if (TREE_CODE (arg0
) == CONJ_EXPR
11021 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11022 return fold_mult_zconjz (loc
, type
, arg1
);
11023 if (TREE_CODE (arg1
) == CONJ_EXPR
11024 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11025 return fold_mult_zconjz (loc
, type
, arg0
);
11029 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11030 This is not the same for NaNs or if signed zeros are
11032 if (!HONOR_NANS (arg0
)
11033 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
11034 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11035 && TREE_CODE (arg1
) == COMPLEX_CST
11036 && real_zerop (TREE_REALPART (arg1
)))
11038 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11039 if (real_onep (TREE_IMAGPART (arg1
)))
11041 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11042 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11044 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11045 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11047 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11048 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11049 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11053 /* Optimize z * conj(z) for floating point complex numbers.
11054 Guarded by flag_unsafe_math_optimizations as non-finite
11055 imaginary components don't produce scalar results. */
11056 if (flag_unsafe_math_optimizations
11057 && TREE_CODE (arg0
) == CONJ_EXPR
11058 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11059 return fold_mult_zconjz (loc
, type
, arg1
);
11060 if (flag_unsafe_math_optimizations
11061 && TREE_CODE (arg1
) == CONJ_EXPR
11062 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11063 return fold_mult_zconjz (loc
, type
, arg0
);
11068 /* Canonicalize (X & C1) | C2. */
11069 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11070 && TREE_CODE (arg1
) == INTEGER_CST
11071 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11073 int width
= TYPE_PRECISION (type
), w
;
11074 wide_int c1
= wi::to_wide (TREE_OPERAND (arg0
, 1));
11075 wide_int c2
= wi::to_wide (arg1
);
11077 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11078 if ((c1
& c2
) == c1
)
11079 return omit_one_operand_loc (loc
, type
, arg1
,
11080 TREE_OPERAND (arg0
, 0));
11082 wide_int msk
= wi::mask (width
, false,
11083 TYPE_PRECISION (TREE_TYPE (arg1
)));
11085 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11086 if (wi::bit_and_not (msk
, c1
| c2
) == 0)
11088 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11089 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11092 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11093 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11094 mode which allows further optimizations. */
11097 wide_int c3
= wi::bit_and_not (c1
, c2
);
11098 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11100 wide_int mask
= wi::mask (w
, false,
11101 TYPE_PRECISION (type
));
11102 if (((c1
| c2
) & mask
) == mask
11103 && wi::bit_and_not (c1
, mask
) == 0)
11112 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11113 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
,
11114 wide_int_to_tree (type
, c3
));
11115 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11119 /* See if this can be simplified into a rotate first. If that
11120 is unsuccessful continue in the association code. */
11124 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11125 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11126 && INTEGRAL_TYPE_P (type
)
11127 && integer_onep (TREE_OPERAND (arg0
, 1))
11128 && integer_onep (arg1
))
11129 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11130 build_zero_cst (TREE_TYPE (arg0
)));
11132 /* See if this can be simplified into a rotate first. If that
11133 is unsuccessful continue in the association code. */
11137 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11138 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11139 && INTEGRAL_TYPE_P (type
)
11140 && integer_onep (TREE_OPERAND (arg0
, 1))
11141 && integer_onep (arg1
))
11144 tem
= TREE_OPERAND (arg0
, 0);
11145 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11146 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11148 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11149 build_zero_cst (TREE_TYPE (tem
)));
11151 /* Fold ~X & 1 as (X & 1) == 0. */
11152 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11153 && INTEGRAL_TYPE_P (type
)
11154 && integer_onep (arg1
))
11157 tem
= TREE_OPERAND (arg0
, 0);
11158 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11159 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11161 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11162 build_zero_cst (TREE_TYPE (tem
)));
11164 /* Fold !X & 1 as X == 0. */
11165 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11166 && integer_onep (arg1
))
11168 tem
= TREE_OPERAND (arg0
, 0);
11169 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11170 build_zero_cst (TREE_TYPE (tem
)));
11173 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11174 multiple of 1 << CST. */
11175 if (TREE_CODE (arg1
) == INTEGER_CST
)
11177 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
11178 wide_int ncst1
= -cst1
;
11179 if ((cst1
& ncst1
) == ncst1
11180 && multiple_of_p (type
, arg0
,
11181 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11182 return fold_convert_loc (loc
, type
, arg0
);
11185 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11187 if (TREE_CODE (arg1
) == INTEGER_CST
11188 && TREE_CODE (arg0
) == MULT_EXPR
11189 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11191 wi::tree_to_wide_ref warg1
= wi::to_wide (arg1
);
11193 = mask_with_tz (type
, warg1
, wi::to_wide (TREE_OPERAND (arg0
, 1)));
11196 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11198 else if (masked
!= warg1
)
11200 /* Avoid the transform if arg1 is a mask of some
11201 mode which allows further optimizations. */
11202 int pop
= wi::popcount (warg1
);
11203 if (!(pop
>= BITS_PER_UNIT
11205 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11206 return fold_build2_loc (loc
, code
, type
, op0
,
11207 wide_int_to_tree (type
, masked
));
11211 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11212 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11213 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11215 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11217 wide_int mask
= wide_int::from (wi::to_wide (arg1
), prec
, UNSIGNED
);
11220 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11226 /* Don't touch a floating-point divide by zero unless the mode
11227 of the constant can represent infinity. */
11228 if (TREE_CODE (arg1
) == REAL_CST
11229 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11230 && real_zerop (arg1
))
11233 /* (-A) / (-B) -> A / B */
11234 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11235 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11236 TREE_OPERAND (arg0
, 0),
11237 negate_expr (arg1
));
11238 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11239 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11240 negate_expr (arg0
),
11241 TREE_OPERAND (arg1
, 0));
11244 case TRUNC_DIV_EXPR
:
11247 case FLOOR_DIV_EXPR
:
11248 /* Simplify A / (B << N) where A and B are positive and B is
11249 a power of 2, to A >> (N + log2(B)). */
11250 strict_overflow_p
= false;
11251 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11252 && (TYPE_UNSIGNED (type
)
11253 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11255 tree sval
= TREE_OPERAND (arg1
, 0);
11256 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11258 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11259 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11260 wi::exact_log2 (wi::to_wide (sval
)));
11262 if (strict_overflow_p
)
11263 fold_overflow_warning (("assuming signed overflow does not "
11264 "occur when simplifying A / (B << N)"),
11265 WARN_STRICT_OVERFLOW_MISC
);
11267 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11269 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11270 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11276 case ROUND_DIV_EXPR
:
11277 case CEIL_DIV_EXPR
:
11278 case EXACT_DIV_EXPR
:
11279 if (integer_zerop (arg1
))
11282 /* Convert -A / -B to A / B when the type is signed and overflow is
11284 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11285 && TREE_CODE (op0
) == NEGATE_EXPR
11286 && negate_expr_p (op1
))
11288 if (ANY_INTEGRAL_TYPE_P (type
))
11289 fold_overflow_warning (("assuming signed overflow does not occur "
11290 "when distributing negation across "
11292 WARN_STRICT_OVERFLOW_MISC
);
11293 return fold_build2_loc (loc
, code
, type
,
11294 fold_convert_loc (loc
, type
,
11295 TREE_OPERAND (arg0
, 0)),
11296 negate_expr (op1
));
11298 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11299 && TREE_CODE (arg1
) == NEGATE_EXPR
11300 && negate_expr_p (op0
))
11302 if (ANY_INTEGRAL_TYPE_P (type
))
11303 fold_overflow_warning (("assuming signed overflow does not occur "
11304 "when distributing negation across "
11306 WARN_STRICT_OVERFLOW_MISC
);
11307 return fold_build2_loc (loc
, code
, type
,
11309 fold_convert_loc (loc
, type
,
11310 TREE_OPERAND (arg1
, 0)));
11313 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11314 operation, EXACT_DIV_EXPR.
11316 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11317 At one time others generated faster code, it's not clear if they do
11318 after the last round to changes to the DIV code in expmed.c. */
11319 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11320 && multiple_of_p (type
, arg0
, arg1
))
11321 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
11322 fold_convert (type
, arg0
),
11323 fold_convert (type
, arg1
));
11325 strict_overflow_p
= false;
11326 if (TREE_CODE (arg1
) == INTEGER_CST
11327 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11328 &strict_overflow_p
)) != 0)
11330 if (strict_overflow_p
)
11331 fold_overflow_warning (("assuming signed overflow does not occur "
11332 "when simplifying division"),
11333 WARN_STRICT_OVERFLOW_MISC
);
11334 return fold_convert_loc (loc
, type
, tem
);
11339 case CEIL_MOD_EXPR
:
11340 case FLOOR_MOD_EXPR
:
11341 case ROUND_MOD_EXPR
:
11342 case TRUNC_MOD_EXPR
:
11343 strict_overflow_p
= false;
11344 if (TREE_CODE (arg1
) == INTEGER_CST
11345 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11346 &strict_overflow_p
)) != 0)
11348 if (strict_overflow_p
)
11349 fold_overflow_warning (("assuming signed overflow does not occur "
11350 "when simplifying modulus"),
11351 WARN_STRICT_OVERFLOW_MISC
);
11352 return fold_convert_loc (loc
, type
, tem
);
11361 /* Since negative shift count is not well-defined,
11362 don't try to compute it in the compiler. */
11363 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11366 prec
= element_precision (type
);
11368 /* If we have a rotate of a bit operation with the rotate count and
11369 the second operand of the bit operation both constant,
11370 permute the two operations. */
11371 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11372 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11373 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11374 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11375 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11377 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11378 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11379 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11380 fold_build2_loc (loc
, code
, type
,
11382 fold_build2_loc (loc
, code
, type
,
11386 /* Two consecutive rotates adding up to the some integer
11387 multiple of the precision of the type can be ignored. */
11388 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11389 && TREE_CODE (arg0
) == RROTATE_EXPR
11390 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11391 && wi::umod_trunc (wi::to_wide (arg1
)
11392 + wi::to_wide (TREE_OPERAND (arg0
, 1)),
11394 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11402 case TRUTH_ANDIF_EXPR
:
11403 /* Note that the operands of this must be ints
11404 and their values must be 0 or 1.
11405 ("true" is a fixed value perhaps depending on the language.) */
11406 /* If first arg is constant zero, return it. */
11407 if (integer_zerop (arg0
))
11408 return fold_convert_loc (loc
, type
, arg0
);
11410 case TRUTH_AND_EXPR
:
11411 /* If either arg is constant true, drop it. */
11412 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11413 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11414 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11415 /* Preserve sequence points. */
11416 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11417 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11418 /* If second arg is constant zero, result is zero, but first arg
11419 must be evaluated. */
11420 if (integer_zerop (arg1
))
11421 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11422 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11423 case will be handled here. */
11424 if (integer_zerop (arg0
))
11425 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11427 /* !X && X is always false. */
11428 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11429 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11430 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11431 /* X && !X is always false. */
11432 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11433 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11434 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11436 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11437 means A >= Y && A != MAX, but in this case we know that
11440 if (!TREE_SIDE_EFFECTS (arg0
)
11441 && !TREE_SIDE_EFFECTS (arg1
))
11443 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11444 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11445 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11447 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11448 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11449 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11452 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11458 case TRUTH_ORIF_EXPR
:
11459 /* Note that the operands of this must be ints
11460 and their values must be 0 or true.
11461 ("true" is a fixed value perhaps depending on the language.) */
11462 /* If first arg is constant true, return it. */
11463 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11464 return fold_convert_loc (loc
, type
, arg0
);
11466 case TRUTH_OR_EXPR
:
11467 /* If either arg is constant zero, drop it. */
11468 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11469 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11470 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11471 /* Preserve sequence points. */
11472 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11473 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11474 /* If second arg is constant true, result is true, but we must
11475 evaluate first arg. */
11476 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11477 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11478 /* Likewise for first arg, but note this only occurs here for
11480 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11481 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11483 /* !X || X is always true. */
11484 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11485 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11486 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11487 /* X || !X is always true. */
11488 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11489 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11490 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11492 /* (X && !Y) || (!X && Y) is X ^ Y */
11493 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
11494 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
11496 tree a0
, a1
, l0
, l1
, n0
, n1
;
11498 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11499 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11501 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11502 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11504 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
11505 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
11507 if ((operand_equal_p (n0
, a0
, 0)
11508 && operand_equal_p (n1
, a1
, 0))
11509 || (operand_equal_p (n0
, a1
, 0)
11510 && operand_equal_p (n1
, a0
, 0)))
11511 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
11514 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11520 case TRUTH_XOR_EXPR
:
11521 /* If the second arg is constant zero, drop it. */
11522 if (integer_zerop (arg1
))
11523 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11524 /* If the second arg is constant true, this is a logical inversion. */
11525 if (integer_onep (arg1
))
11527 tem
= invert_truthvalue_loc (loc
, arg0
);
11528 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
11530 /* Identical arguments cancel to zero. */
11531 if (operand_equal_p (arg0
, arg1
, 0))
11532 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11534 /* !X ^ X is always true. */
11535 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11536 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11537 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11539 /* X ^ !X is always true. */
11540 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11541 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11542 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11551 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11552 if (tem
!= NULL_TREE
)
11555 /* bool_var != 1 becomes !bool_var. */
11556 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11557 && code
== NE_EXPR
)
11558 return fold_convert_loc (loc
, type
,
11559 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11560 TREE_TYPE (arg0
), arg0
));
11562 /* bool_var == 0 becomes !bool_var. */
11563 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11564 && code
== EQ_EXPR
)
11565 return fold_convert_loc (loc
, type
,
11566 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11567 TREE_TYPE (arg0
), arg0
));
11569 /* !exp != 0 becomes !exp */
11570 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
11571 && code
== NE_EXPR
)
11572 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11574 /* If this is an EQ or NE comparison with zero and ARG0 is
11575 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11576 two operations, but the latter can be done in one less insn
11577 on machines that have only two-operand insns or on which a
11578 constant cannot be the first operand. */
11579 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11580 && integer_zerop (arg1
))
11582 tree arg00
= TREE_OPERAND (arg0
, 0);
11583 tree arg01
= TREE_OPERAND (arg0
, 1);
11584 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11585 && integer_onep (TREE_OPERAND (arg00
, 0)))
11587 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
11588 arg01
, TREE_OPERAND (arg00
, 1));
11589 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11590 build_int_cst (TREE_TYPE (arg0
), 1));
11591 return fold_build2_loc (loc
, code
, type
,
11592 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11595 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11596 && integer_onep (TREE_OPERAND (arg01
, 0)))
11598 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
11599 arg00
, TREE_OPERAND (arg01
, 1));
11600 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11601 build_int_cst (TREE_TYPE (arg0
), 1));
11602 return fold_build2_loc (loc
, code
, type
,
11603 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11608 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11609 C1 is a valid shift constant, and C2 is a power of two, i.e.
11611 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11612 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11613 && integer_zerop (arg1
))
11615 tree arg00
= TREE_OPERAND (arg0
, 0);
11616 STRIP_NOPS (arg00
);
11617 if (TREE_CODE (arg00
) == RSHIFT_EXPR
11618 && TREE_CODE (TREE_OPERAND (arg00
, 1)) == INTEGER_CST
)
11620 tree itype
= TREE_TYPE (arg00
);
11621 tree arg001
= TREE_OPERAND (arg00
, 1);
11622 prec
= TYPE_PRECISION (itype
);
11624 /* Check for a valid shift count. */
11625 if (wi::ltu_p (wi::to_wide (arg001
), prec
))
11627 tree arg01
= TREE_OPERAND (arg0
, 1);
11628 tree arg000
= TREE_OPERAND (arg00
, 0);
11629 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11630 /* If (C2 << C1) doesn't overflow, then
11631 ((X >> C1) & C2) != 0 can be rewritten as
11632 (X & (C2 << C1)) != 0. */
11633 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11635 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
,
11637 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11639 return fold_build2_loc (loc
, code
, type
, tem
,
11640 fold_convert_loc (loc
, itype
, arg1
));
11642 /* Otherwise, for signed (arithmetic) shifts,
11643 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11644 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11645 else if (!TYPE_UNSIGNED (itype
))
11646 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
11649 build_int_cst (itype
, 0));
11650 /* Otherwise, of unsigned (logical) shifts,
11651 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11652 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11654 return omit_one_operand_loc (loc
, type
,
11655 code
== EQ_EXPR
? integer_one_node
11656 : integer_zero_node
,
11662 /* If this is a comparison of a field, we may be able to simplify it. */
11663 if ((TREE_CODE (arg0
) == COMPONENT_REF
11664 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11665 /* Handle the constant case even without -O
11666 to make sure the warnings are given. */
11667 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11669 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
11674 /* Optimize comparisons of strlen vs zero to a compare of the
11675 first character of the string vs zero. To wit,
11676 strlen(ptr) == 0 => *ptr == 0
11677 strlen(ptr) != 0 => *ptr != 0
11678 Other cases should reduce to one of these two (or a constant)
11679 due to the return value of strlen being unsigned. */
11680 if (TREE_CODE (arg0
) == CALL_EXPR
&& integer_zerop (arg1
))
11682 tree fndecl
= get_callee_fndecl (arg0
);
11685 && fndecl_built_in_p (fndecl
, BUILT_IN_STRLEN
)
11686 && call_expr_nargs (arg0
) == 1
11687 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0)))
11691 = build_pointer_type (build_qualified_type (char_type_node
,
11693 tree ptr
= fold_convert_loc (loc
, ptrtype
,
11694 CALL_EXPR_ARG (arg0
, 0));
11695 tree iref
= build_fold_indirect_ref_loc (loc
, ptr
);
11696 return fold_build2_loc (loc
, code
, type
, iref
,
11697 build_int_cst (TREE_TYPE (iref
), 0));
11701 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11702 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11703 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11704 && integer_zerop (arg1
)
11705 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11707 tree arg00
= TREE_OPERAND (arg0
, 0);
11708 tree arg01
= TREE_OPERAND (arg0
, 1);
11709 tree itype
= TREE_TYPE (arg00
);
11710 if (wi::to_wide (arg01
) == element_precision (itype
) - 1)
11712 if (TYPE_UNSIGNED (itype
))
11714 itype
= signed_type_for (itype
);
11715 arg00
= fold_convert_loc (loc
, itype
, arg00
);
11717 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11718 type
, arg00
, build_zero_cst (itype
));
11722 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11723 (X & C) == 0 when C is a single bit. */
11724 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11725 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11726 && integer_zerop (arg1
)
11727 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11729 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11730 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11731 TREE_OPERAND (arg0
, 1));
11732 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11734 fold_convert_loc (loc
, TREE_TYPE (arg0
),
11738 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11739 constant C is a power of two, i.e. a single bit. */
11740 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11741 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11742 && integer_zerop (arg1
)
11743 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11744 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11745 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11747 tree arg00
= TREE_OPERAND (arg0
, 0);
11748 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11749 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11752 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11753 when is C is a power of two, i.e. a single bit. */
11754 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11755 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11756 && integer_zerop (arg1
)
11757 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11758 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11759 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11761 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11762 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
11763 arg000
, TREE_OPERAND (arg0
, 1));
11764 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11765 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11768 if (integer_zerop (arg1
)
11769 && tree_expr_nonzero_p (arg0
))
11771 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11772 return omit_one_operand_loc (loc
, type
, res
, arg0
);
11775 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11776 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11778 tree arg00
= TREE_OPERAND (arg0
, 0);
11779 tree arg01
= TREE_OPERAND (arg0
, 1);
11780 tree arg10
= TREE_OPERAND (arg1
, 0);
11781 tree arg11
= TREE_OPERAND (arg1
, 1);
11782 tree itype
= TREE_TYPE (arg0
);
11784 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11785 operand_equal_p guarantees no side-effects so we don't need
11786 to use omit_one_operand on Z. */
11787 if (operand_equal_p (arg01
, arg11
, 0))
11788 return fold_build2_loc (loc
, code
, type
, arg00
,
11789 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11791 if (operand_equal_p (arg01
, arg10
, 0))
11792 return fold_build2_loc (loc
, code
, type
, arg00
,
11793 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11795 if (operand_equal_p (arg00
, arg11
, 0))
11796 return fold_build2_loc (loc
, code
, type
, arg01
,
11797 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11799 if (operand_equal_p (arg00
, arg10
, 0))
11800 return fold_build2_loc (loc
, code
, type
, arg01
,
11801 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11804 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11805 if (TREE_CODE (arg01
) == INTEGER_CST
11806 && TREE_CODE (arg11
) == INTEGER_CST
)
11808 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
11809 fold_convert_loc (loc
, itype
, arg11
));
11810 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
11811 return fold_build2_loc (loc
, code
, type
, tem
,
11812 fold_convert_loc (loc
, itype
, arg10
));
11816 /* Attempt to simplify equality/inequality comparisons of complex
11817 values. Only lower the comparison if the result is known or
11818 can be simplified to a single scalar comparison. */
11819 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
11820 || TREE_CODE (arg0
) == COMPLEX_CST
)
11821 && (TREE_CODE (arg1
) == COMPLEX_EXPR
11822 || TREE_CODE (arg1
) == COMPLEX_CST
))
11824 tree real0
, imag0
, real1
, imag1
;
11827 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
11829 real0
= TREE_OPERAND (arg0
, 0);
11830 imag0
= TREE_OPERAND (arg0
, 1);
11834 real0
= TREE_REALPART (arg0
);
11835 imag0
= TREE_IMAGPART (arg0
);
11838 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
11840 real1
= TREE_OPERAND (arg1
, 0);
11841 imag1
= TREE_OPERAND (arg1
, 1);
11845 real1
= TREE_REALPART (arg1
);
11846 imag1
= TREE_IMAGPART (arg1
);
11849 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
11850 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
11852 if (integer_zerop (rcond
))
11854 if (code
== EQ_EXPR
)
11855 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11857 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
11861 if (code
== NE_EXPR
)
11862 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11864 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11868 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11869 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11871 if (integer_zerop (icond
))
11873 if (code
== EQ_EXPR
)
11874 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11876 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11880 if (code
== NE_EXPR
)
11881 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11883 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11894 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11895 if (tem
!= NULL_TREE
)
11898 /* Transform comparisons of the form X +- C CMP X. */
11899 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11900 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11901 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11902 && !HONOR_SNANS (arg0
))
11904 tree arg01
= TREE_OPERAND (arg0
, 1);
11905 enum tree_code code0
= TREE_CODE (arg0
);
11906 int is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11908 /* (X - c) > X becomes false. */
11909 if (code
== GT_EXPR
11910 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11911 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11912 return constant_boolean_node (0, type
);
11914 /* Likewise (X + c) < X becomes false. */
11915 if (code
== LT_EXPR
11916 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11917 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11918 return constant_boolean_node (0, type
);
11920 /* Convert (X - c) <= X to true. */
11921 if (!HONOR_NANS (arg1
)
11923 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11924 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11925 return constant_boolean_node (1, type
);
11927 /* Convert (X + c) >= X to true. */
11928 if (!HONOR_NANS (arg1
)
11930 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11931 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11932 return constant_boolean_node (1, type
);
11935 /* If we are comparing an ABS_EXPR with a constant, we can
11936 convert all the cases into explicit comparisons, but they may
11937 well not be faster than doing the ABS and one comparison.
11938 But ABS (X) <= C is a range comparison, which becomes a subtraction
11939 and a comparison, and is probably faster. */
11940 if (code
== LE_EXPR
11941 && TREE_CODE (arg1
) == INTEGER_CST
11942 && TREE_CODE (arg0
) == ABS_EXPR
11943 && ! TREE_SIDE_EFFECTS (arg0
)
11944 && (tem
= negate_expr (arg1
)) != 0
11945 && TREE_CODE (tem
) == INTEGER_CST
11946 && !TREE_OVERFLOW (tem
))
11947 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11948 build2 (GE_EXPR
, type
,
11949 TREE_OPERAND (arg0
, 0), tem
),
11950 build2 (LE_EXPR
, type
,
11951 TREE_OPERAND (arg0
, 0), arg1
));
11953 /* Convert ABS_EXPR<x> >= 0 to true. */
11954 strict_overflow_p
= false;
11955 if (code
== GE_EXPR
11956 && (integer_zerop (arg1
)
11957 || (! HONOR_NANS (arg0
)
11958 && real_zerop (arg1
)))
11959 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11961 if (strict_overflow_p
)
11962 fold_overflow_warning (("assuming signed overflow does not occur "
11963 "when simplifying comparison of "
11964 "absolute value and zero"),
11965 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11966 return omit_one_operand_loc (loc
, type
,
11967 constant_boolean_node (true, type
),
11971 /* Convert ABS_EXPR<x> < 0 to false. */
11972 strict_overflow_p
= false;
11973 if (code
== LT_EXPR
11974 && (integer_zerop (arg1
) || real_zerop (arg1
))
11975 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11977 if (strict_overflow_p
)
11978 fold_overflow_warning (("assuming signed overflow does not occur "
11979 "when simplifying comparison of "
11980 "absolute value and zero"),
11981 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11982 return omit_one_operand_loc (loc
, type
,
11983 constant_boolean_node (false, type
),
11987 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11988 and similarly for >= into !=. */
11989 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11990 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11991 && TREE_CODE (arg1
) == LSHIFT_EXPR
11992 && integer_onep (TREE_OPERAND (arg1
, 0)))
11993 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11994 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11995 TREE_OPERAND (arg1
, 1)),
11996 build_zero_cst (TREE_TYPE (arg0
)));
11998 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11999 otherwise Y might be >= # of bits in X's type and thus e.g.
12000 (unsigned char) (1 << Y) for Y 15 might be 0.
12001 If the cast is widening, then 1 << Y should have unsigned type,
12002 otherwise if Y is number of bits in the signed shift type minus 1,
12003 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12004 31 might be 0xffffffff80000000. */
12005 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12006 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12007 && CONVERT_EXPR_P (arg1
)
12008 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12009 && (element_precision (TREE_TYPE (arg1
))
12010 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12011 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12012 || (element_precision (TREE_TYPE (arg1
))
12013 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12014 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12016 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12017 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12018 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12019 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
12020 build_zero_cst (TREE_TYPE (arg0
)));
12025 case UNORDERED_EXPR
:
12033 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12035 tree targ0
= strip_float_extensions (arg0
);
12036 tree targ1
= strip_float_extensions (arg1
);
12037 tree newtype
= TREE_TYPE (targ0
);
12039 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12040 newtype
= TREE_TYPE (targ1
);
12042 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12043 return fold_build2_loc (loc
, code
, type
,
12044 fold_convert_loc (loc
, newtype
, targ0
),
12045 fold_convert_loc (loc
, newtype
, targ1
));
12050 case COMPOUND_EXPR
:
12051 /* When pedantic, a compound expression can be neither an lvalue
12052 nor an integer constant expression. */
12053 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12055 /* Don't let (0, 0) be null pointer constant. */
12056 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12057 : fold_convert_loc (loc
, type
, arg1
);
12058 return pedantic_non_lvalue_loc (loc
, tem
);
12061 /* An ASSERT_EXPR should never be passed to fold_binary. */
12062 gcc_unreachable ();
12066 } /* switch (code) */
12069 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12070 ((A & N) + B) & M -> (A + B) & M
12071 Similarly if (N & M) == 0,
12072 ((A | N) + B) & M -> (A + B) & M
12073 and for - instead of + (or unary - instead of +)
12074 and/or ^ instead of |.
12075 If B is constant and (B & M) == 0, fold into A & M.
12077 This function is a helper for match.pd patterns. Return non-NULL
12078 type in which the simplified operation should be performed only
12079 if any optimization is possible.
12081 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12082 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12083 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12086 fold_bit_and_mask (tree type
, tree arg1
, enum tree_code code
,
12087 tree arg00
, enum tree_code code00
, tree arg000
, tree arg001
,
12088 tree arg01
, enum tree_code code01
, tree arg010
, tree arg011
,
12091 gcc_assert (TREE_CODE (arg1
) == INTEGER_CST
);
12092 gcc_assert (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== NEGATE_EXPR
);
12093 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
12095 || (cst1
& (cst1
+ 1)) != 0
12096 || !INTEGRAL_TYPE_P (type
)
12097 || (!TYPE_OVERFLOW_WRAPS (type
)
12098 && TREE_CODE (type
) != INTEGER_TYPE
)
12099 || (wi::max_value (type
) & cst1
) != cst1
)
12102 enum tree_code codes
[2] = { code00
, code01
};
12103 tree arg0xx
[4] = { arg000
, arg001
, arg010
, arg011
};
12107 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12108 arg1 (M) is == (1LL << cst) - 1.
12109 Store C into PMOP[0] and D into PMOP[1]. */
12112 which
= code
!= NEGATE_EXPR
;
12114 for (; which
>= 0; which
--)
12115 switch (codes
[which
])
12120 gcc_assert (TREE_CODE (arg0xx
[2 * which
+ 1]) == INTEGER_CST
);
12121 cst0
= wi::to_wide (arg0xx
[2 * which
+ 1]) & cst1
;
12122 if (codes
[which
] == BIT_AND_EXPR
)
12127 else if (cst0
!= 0)
12129 /* If C or D is of the form (A & N) where
12130 (N & M) == M, or of the form (A | N) or
12131 (A ^ N) where (N & M) == 0, replace it with A. */
12132 pmop
[which
] = arg0xx
[2 * which
];
12135 if (TREE_CODE (pmop
[which
]) != INTEGER_CST
)
12137 /* If C or D is a N where (N & M) == 0, it can be
12138 omitted (replaced with 0). */
12139 if ((code
== PLUS_EXPR
12140 || (code
== MINUS_EXPR
&& which
== 0))
12141 && (cst1
& wi::to_wide (pmop
[which
])) == 0)
12142 pmop
[which
] = build_int_cst (type
, 0);
12143 /* Similarly, with C - N where (-N & M) == 0. */
12144 if (code
== MINUS_EXPR
12146 && (cst1
& -wi::to_wide (pmop
[which
])) == 0)
12147 pmop
[which
] = build_int_cst (type
, 0);
12150 gcc_unreachable ();
12153 /* Only build anything new if we optimized one or both arguments above. */
12154 if (pmop
[0] == arg00
&& pmop
[1] == arg01
)
12157 if (TYPE_OVERFLOW_WRAPS (type
))
12160 return unsigned_type_for (type
);
12163 /* Used by contains_label_[p1]. */
12165 struct contains_label_data
12167 hash_set
<tree
> *pset
;
12168 bool inside_switch_p
;
12171 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12172 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12173 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12176 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data
)
12178 contains_label_data
*d
= (contains_label_data
*) data
;
12179 switch (TREE_CODE (*tp
))
12184 case CASE_LABEL_EXPR
:
12185 if (!d
->inside_switch_p
)
12190 if (!d
->inside_switch_p
)
12192 if (walk_tree (&SWITCH_COND (*tp
), contains_label_1
, data
, d
->pset
))
12194 d
->inside_switch_p
= true;
12195 if (walk_tree (&SWITCH_BODY (*tp
), contains_label_1
, data
, d
->pset
))
12197 d
->inside_switch_p
= false;
12198 *walk_subtrees
= 0;
12203 *walk_subtrees
= 0;
12211 /* Return whether the sub-tree ST contains a label which is accessible from
12212 outside the sub-tree. */
12215 contains_label_p (tree st
)
12217 hash_set
<tree
> pset
;
12218 contains_label_data data
= { &pset
, false };
12219 return walk_tree (&st
, contains_label_1
, &data
, &pset
) != NULL_TREE
;
12222 /* Fold a ternary expression of code CODE and type TYPE with operands
12223 OP0, OP1, and OP2. Return the folded expression if folding is
12224 successful. Otherwise, return NULL_TREE. */
12227 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
12228 tree op0
, tree op1
, tree op2
)
12231 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
12232 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12234 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12235 && TREE_CODE_LENGTH (code
) == 3);
12237 /* If this is a commutative operation, and OP0 is a constant, move it
12238 to OP1 to reduce the number of tests below. */
12239 if (commutative_ternary_tree_code (code
)
12240 && tree_swap_operands_p (op0
, op1
))
12241 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
12243 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
12247 /* Strip any conversions that don't change the mode. This is safe
12248 for every expression, except for a comparison expression because
12249 its signedness is derived from its operands. So, in the latter
12250 case, only strip conversions that don't change the signedness.
12252 Note that this is done as an internal manipulation within the
12253 constant folder, in order to find the simplest representation of
12254 the arguments so that their form can be studied. In any cases,
12255 the appropriate type conversions should be put back in the tree
12256 that will get out of the constant folder. */
12277 case COMPONENT_REF
:
12278 if (TREE_CODE (arg0
) == CONSTRUCTOR
12279 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12281 unsigned HOST_WIDE_INT idx
;
12283 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12290 case VEC_COND_EXPR
:
12291 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12292 so all simple results must be passed through pedantic_non_lvalue. */
12293 if (TREE_CODE (arg0
) == INTEGER_CST
)
12295 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12296 tem
= integer_zerop (arg0
) ? op2
: op1
;
12297 /* Only optimize constant conditions when the selected branch
12298 has the same type as the COND_EXPR. This avoids optimizing
12299 away "c ? x : throw", where the throw has a void type.
12300 Avoid throwing away that operand which contains label. */
12301 if ((!TREE_SIDE_EFFECTS (unused_op
)
12302 || !contains_label_p (unused_op
))
12303 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12304 || VOID_TYPE_P (type
)))
12305 return pedantic_non_lvalue_loc (loc
, tem
);
12308 else if (TREE_CODE (arg0
) == VECTOR_CST
)
12310 unsigned HOST_WIDE_INT nelts
;
12311 if ((TREE_CODE (arg1
) == VECTOR_CST
12312 || TREE_CODE (arg1
) == CONSTRUCTOR
)
12313 && (TREE_CODE (arg2
) == VECTOR_CST
12314 || TREE_CODE (arg2
) == CONSTRUCTOR
)
12315 && TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
12317 vec_perm_builder
sel (nelts
, nelts
, 1);
12318 for (unsigned int i
= 0; i
< nelts
; i
++)
12320 tree val
= VECTOR_CST_ELT (arg0
, i
);
12321 if (integer_all_onesp (val
))
12322 sel
.quick_push (i
);
12323 else if (integer_zerop (val
))
12324 sel
.quick_push (nelts
+ i
);
12325 else /* Currently unreachable. */
12328 vec_perm_indices
indices (sel
, 2, nelts
);
12329 tree t
= fold_vec_perm (type
, arg1
, arg2
, indices
);
12330 if (t
!= NULL_TREE
)
12335 /* If we have A op B ? A : C, we may be able to convert this to a
12336 simpler expression, depending on the operation and the values
12337 of B and C. Signed zeros prevent all of these transformations,
12338 for reasons given above each one.
12340 Also try swapping the arguments and inverting the conditional. */
12341 if (COMPARISON_CLASS_P (arg0
)
12342 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op1
)
12343 && !HONOR_SIGNED_ZEROS (element_mode (op1
)))
12345 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
12350 if (COMPARISON_CLASS_P (arg0
)
12351 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op2
)
12352 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
12354 location_t loc0
= expr_location_or (arg0
, loc
);
12355 tem
= fold_invert_truthvalue (loc0
, arg0
);
12356 if (tem
&& COMPARISON_CLASS_P (tem
))
12358 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
12364 /* If the second operand is simpler than the third, swap them
12365 since that produces better jump optimization results. */
12366 if (truth_value_p (TREE_CODE (arg0
))
12367 && tree_swap_operands_p (op1
, op2
))
12369 location_t loc0
= expr_location_or (arg0
, loc
);
12370 /* See if this can be inverted. If it can't, possibly because
12371 it was a floating-point inequality comparison, don't do
12373 tem
= fold_invert_truthvalue (loc0
, arg0
);
12375 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
12378 /* Convert A ? 1 : 0 to simply A. */
12379 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
12380 : (integer_onep (op1
)
12381 && !VECTOR_TYPE_P (type
)))
12382 && integer_zerop (op2
)
12383 /* If we try to convert OP0 to our type, the
12384 call to fold will try to move the conversion inside
12385 a COND, which will recurse. In that case, the COND_EXPR
12386 is probably the best choice, so leave it alone. */
12387 && type
== TREE_TYPE (arg0
))
12388 return pedantic_non_lvalue_loc (loc
, arg0
);
12390 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12391 over COND_EXPR in cases such as floating point comparisons. */
12392 if (integer_zerop (op1
)
12393 && code
== COND_EXPR
12394 && integer_onep (op2
)
12395 && !VECTOR_TYPE_P (type
)
12396 && truth_value_p (TREE_CODE (arg0
)))
12397 return pedantic_non_lvalue_loc (loc
,
12398 fold_convert_loc (loc
, type
,
12399 invert_truthvalue_loc (loc
,
12402 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12403 if (TREE_CODE (arg0
) == LT_EXPR
12404 && integer_zerop (TREE_OPERAND (arg0
, 1))
12405 && integer_zerop (op2
)
12406 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12408 /* sign_bit_p looks through both zero and sign extensions,
12409 but for this optimization only sign extensions are
12411 tree tem2
= TREE_OPERAND (arg0
, 0);
12412 while (tem
!= tem2
)
12414 if (TREE_CODE (tem2
) != NOP_EXPR
12415 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
12420 tem2
= TREE_OPERAND (tem2
, 0);
12422 /* sign_bit_p only checks ARG1 bits within A's precision.
12423 If <sign bit of A> has wider type than A, bits outside
12424 of A's precision in <sign bit of A> need to be checked.
12425 If they are all 0, this optimization needs to be done
12426 in unsigned A's type, if they are all 1 in signed A's type,
12427 otherwise this can't be done. */
12429 && TYPE_PRECISION (TREE_TYPE (tem
))
12430 < TYPE_PRECISION (TREE_TYPE (arg1
))
12431 && TYPE_PRECISION (TREE_TYPE (tem
))
12432 < TYPE_PRECISION (type
))
12434 int inner_width
, outer_width
;
12437 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12438 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12439 if (outer_width
> TYPE_PRECISION (type
))
12440 outer_width
= TYPE_PRECISION (type
);
12442 wide_int mask
= wi::shifted_mask
12443 (inner_width
, outer_width
- inner_width
, false,
12444 TYPE_PRECISION (TREE_TYPE (arg1
)));
12446 wide_int common
= mask
& wi::to_wide (arg1
);
12447 if (common
== mask
)
12449 tem_type
= signed_type_for (TREE_TYPE (tem
));
12450 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12452 else if (common
== 0)
12454 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12455 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12463 fold_convert_loc (loc
, type
,
12464 fold_build2_loc (loc
, BIT_AND_EXPR
,
12465 TREE_TYPE (tem
), tem
,
12466 fold_convert_loc (loc
,
12471 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12472 already handled above. */
12473 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12474 && integer_onep (TREE_OPERAND (arg0
, 1))
12475 && integer_zerop (op2
)
12476 && integer_pow2p (arg1
))
12478 tree tem
= TREE_OPERAND (arg0
, 0);
12480 if (TREE_CODE (tem
) == RSHIFT_EXPR
12481 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
12482 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
)
12483 == tree_to_uhwi (TREE_OPERAND (tem
, 1)))
12484 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12485 fold_convert_loc (loc
, type
,
12486 TREE_OPERAND (tem
, 0)),
12490 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12491 is probably obsolete because the first operand should be a
12492 truth value (that's why we have the two cases above), but let's
12493 leave it in until we can confirm this for all front-ends. */
12494 if (integer_zerop (op2
)
12495 && TREE_CODE (arg0
) == NE_EXPR
12496 && integer_zerop (TREE_OPERAND (arg0
, 1))
12497 && integer_pow2p (arg1
)
12498 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12499 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12500 arg1
, OEP_ONLY_CONST
)
12501 /* operand_equal_p compares just value, not precision, so e.g.
12502 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12503 second operand 32-bit -128, which is not a power of two (or vice
12505 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)))
12506 return pedantic_non_lvalue_loc (loc
,
12507 fold_convert_loc (loc
, type
,
12508 TREE_OPERAND (arg0
,
12511 /* Disable the transformations below for vectors, since
12512 fold_binary_op_with_conditional_arg may undo them immediately,
12513 yielding an infinite loop. */
12514 if (code
== VEC_COND_EXPR
)
12517 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12518 if (integer_zerop (op2
)
12519 && truth_value_p (TREE_CODE (arg0
))
12520 && truth_value_p (TREE_CODE (arg1
))
12521 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12522 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
12523 : TRUTH_ANDIF_EXPR
,
12524 type
, fold_convert_loc (loc
, type
, arg0
), op1
);
12526 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12527 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
12528 && truth_value_p (TREE_CODE (arg0
))
12529 && truth_value_p (TREE_CODE (arg1
))
12530 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12532 location_t loc0
= expr_location_or (arg0
, loc
);
12533 /* Only perform transformation if ARG0 is easily inverted. */
12534 tem
= fold_invert_truthvalue (loc0
, arg0
);
12536 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12539 type
, fold_convert_loc (loc
, type
, tem
),
12543 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12544 if (integer_zerop (arg1
)
12545 && truth_value_p (TREE_CODE (arg0
))
12546 && truth_value_p (TREE_CODE (op2
))
12547 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12549 location_t loc0
= expr_location_or (arg0
, loc
);
12550 /* Only perform transformation if ARG0 is easily inverted. */
12551 tem
= fold_invert_truthvalue (loc0
, arg0
);
12553 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12554 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
12555 type
, fold_convert_loc (loc
, type
, tem
),
12559 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12560 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
12561 && truth_value_p (TREE_CODE (arg0
))
12562 && truth_value_p (TREE_CODE (op2
))
12563 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12564 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12565 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
12566 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
12571 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12572 of fold_ternary on them. */
12573 gcc_unreachable ();
12575 case BIT_FIELD_REF
:
12576 if (TREE_CODE (arg0
) == VECTOR_CST
12577 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
12578 || (VECTOR_TYPE_P (type
)
12579 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
))))
12580 && tree_fits_uhwi_p (op1
)
12581 && tree_fits_uhwi_p (op2
))
12583 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
12584 unsigned HOST_WIDE_INT width
12585 = (TREE_CODE (eltype
) == BOOLEAN_TYPE
12586 ? TYPE_PRECISION (eltype
) : tree_to_uhwi (TYPE_SIZE (eltype
)));
12587 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
12588 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
12591 && (idx
% width
) == 0
12592 && (n
% width
) == 0
12593 && known_le ((idx
+ n
) / width
,
12594 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))))
12599 if (TREE_CODE (arg0
) == VECTOR_CST
)
12603 tem
= VECTOR_CST_ELT (arg0
, idx
);
12604 if (VECTOR_TYPE_P (type
))
12605 tem
= fold_build1 (VIEW_CONVERT_EXPR
, type
, tem
);
12609 tree_vector_builder
vals (type
, n
, 1);
12610 for (unsigned i
= 0; i
< n
; ++i
)
12611 vals
.quick_push (VECTOR_CST_ELT (arg0
, idx
+ i
));
12612 return vals
.build ();
12617 /* On constants we can use native encode/interpret to constant
12618 fold (nearly) all BIT_FIELD_REFs. */
12619 if (CONSTANT_CLASS_P (arg0
)
12620 && can_native_interpret_type_p (type
)
12621 && BITS_PER_UNIT
== 8
12622 && tree_fits_uhwi_p (op1
)
12623 && tree_fits_uhwi_p (op2
))
12625 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12626 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
12627 /* Limit us to a reasonable amount of work. To relax the
12628 other limitations we need bit-shifting of the buffer
12629 and rounding up the size. */
12630 if (bitpos
% BITS_PER_UNIT
== 0
12631 && bitsize
% BITS_PER_UNIT
== 0
12632 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
12634 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
12635 unsigned HOST_WIDE_INT len
12636 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
12637 bitpos
/ BITS_PER_UNIT
);
12639 && len
* BITS_PER_UNIT
>= bitsize
)
12641 tree v
= native_interpret_expr (type
, b
,
12642 bitsize
/ BITS_PER_UNIT
);
12651 case VEC_PERM_EXPR
:
12652 /* Perform constant folding of BIT_INSERT_EXPR. */
12653 if (TREE_CODE (arg2
) == VECTOR_CST
12654 && TREE_CODE (op0
) == VECTOR_CST
12655 && TREE_CODE (op1
) == VECTOR_CST
)
12657 /* Build a vector of integers from the tree mask. */
12658 vec_perm_builder builder
;
12659 if (!tree_to_vec_perm_builder (&builder
, arg2
))
12662 /* Create a vec_perm_indices for the integer vector. */
12663 poly_uint64 nelts
= TYPE_VECTOR_SUBPARTS (type
);
12664 bool single_arg
= (op0
== op1
);
12665 vec_perm_indices
sel (builder
, single_arg
? 1 : 2, nelts
);
12666 return fold_vec_perm (type
, op0
, op1
, sel
);
12670 case BIT_INSERT_EXPR
:
12671 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12672 if (TREE_CODE (arg0
) == INTEGER_CST
12673 && TREE_CODE (arg1
) == INTEGER_CST
)
12675 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12676 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
12677 wide_int tem
= (wi::to_wide (arg0
)
12678 & wi::shifted_mask (bitpos
, bitsize
, true,
12679 TYPE_PRECISION (type
)));
12681 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
12683 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
12685 else if (TREE_CODE (arg0
) == VECTOR_CST
12686 && CONSTANT_CLASS_P (arg1
)
12687 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
12690 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12691 unsigned HOST_WIDE_INT elsize
12692 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
12693 if (bitpos
% elsize
== 0)
12695 unsigned k
= bitpos
/ elsize
;
12696 unsigned HOST_WIDE_INT nelts
;
12697 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
12699 else if (VECTOR_CST_NELTS (arg0
).is_constant (&nelts
))
12701 tree_vector_builder
elts (type
, nelts
, 1);
12702 elts
.quick_grow (nelts
);
12703 for (unsigned HOST_WIDE_INT i
= 0; i
< nelts
; ++i
)
12704 elts
[i
] = (i
== k
? arg1
: VECTOR_CST_ELT (arg0
, i
));
12705 return elts
.build ();
12713 } /* switch (code) */
12716 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12717 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12718 constructor element index of the value returned. If the element is
12719 not found NULL_TREE is returned and *CTOR_IDX is updated to
12720 the index of the element after the ACCESS_INDEX position (which
12721 may be outside of the CTOR array). */
12724 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
,
12725 unsigned *ctor_idx
)
12727 tree index_type
= NULL_TREE
;
12728 signop index_sgn
= UNSIGNED
;
12729 offset_int low_bound
= 0;
12731 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
12733 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
12734 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12736 /* Static constructors for variably sized objects makes no sense. */
12737 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
12738 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
12739 /* ??? When it is obvious that the range is signed, treat it so. */
12740 if (TYPE_UNSIGNED (index_type
)
12741 && TYPE_MAX_VALUE (domain_type
)
12742 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
),
12743 TYPE_MIN_VALUE (domain_type
)))
12745 index_sgn
= SIGNED
;
12747 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type
)),
12752 index_sgn
= TYPE_SIGN (index_type
);
12753 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
12759 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
12762 offset_int index
= low_bound
;
12764 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
12766 offset_int max_index
= index
;
12769 bool first_p
= true;
12771 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
12773 /* Array constructor might explicitly set index, or specify a range,
12774 or leave index NULL meaning that it is next index after previous
12778 if (TREE_CODE (cfield
) == INTEGER_CST
)
12780 = offset_int::from (wi::to_wide (cfield
), index_sgn
);
12783 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
12784 index
= offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 0)),
12787 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 1)),
12789 gcc_checking_assert (wi::le_p (index
, max_index
, index_sgn
));
12794 index
= max_index
+ 1;
12796 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
12797 gcc_checking_assert (wi::gt_p (index
, max_index
, index_sgn
));
12803 /* Do we have match? */
12804 if (wi::cmp (access_index
, index
, index_sgn
) >= 0)
12806 if (wi::cmp (access_index
, max_index
, index_sgn
) <= 0)
12813 else if (in_gimple_form
)
12814 /* We're past the element we search for. Note during parsing
12815 the elements might not be sorted.
12816 ??? We should use a binary search and a flag on the
12817 CONSTRUCTOR as to whether elements are sorted in declaration
12826 /* Perform constant folding and related simplification of EXPR.
12827 The related simplifications include x*1 => x, x*0 => 0, etc.,
12828 and application of the associative law.
12829 NOP_EXPR conversions may be removed freely (as long as we
12830 are careful not to change the type of the overall expression).
12831 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12832 but we can constant-fold them if they have constant operands. */
12834 #ifdef ENABLE_FOLD_CHECKING
12835 # define fold(x) fold_1 (x)
12836 static tree
fold_1 (tree
);
12842 const tree t
= expr
;
12843 enum tree_code code
= TREE_CODE (t
);
12844 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12846 location_t loc
= EXPR_LOCATION (expr
);
12848 /* Return right away if a constant. */
12849 if (kind
== tcc_constant
)
12852 /* CALL_EXPR-like objects with variable numbers of operands are
12853 treated specially. */
12854 if (kind
== tcc_vl_exp
)
12856 if (code
== CALL_EXPR
)
12858 tem
= fold_call_expr (loc
, expr
, false);
12859 return tem
? tem
: expr
;
12864 if (IS_EXPR_CODE_CLASS (kind
))
12866 tree type
= TREE_TYPE (t
);
12867 tree op0
, op1
, op2
;
12869 switch (TREE_CODE_LENGTH (code
))
12872 op0
= TREE_OPERAND (t
, 0);
12873 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12874 return tem
? tem
: expr
;
12876 op0
= TREE_OPERAND (t
, 0);
12877 op1
= TREE_OPERAND (t
, 1);
12878 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12879 return tem
? tem
: expr
;
12881 op0
= TREE_OPERAND (t
, 0);
12882 op1
= TREE_OPERAND (t
, 1);
12883 op2
= TREE_OPERAND (t
, 2);
12884 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12885 return tem
? tem
: expr
;
12895 tree op0
= TREE_OPERAND (t
, 0);
12896 tree op1
= TREE_OPERAND (t
, 1);
12898 if (TREE_CODE (op1
) == INTEGER_CST
12899 && TREE_CODE (op0
) == CONSTRUCTOR
12900 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
12902 tree val
= get_array_ctor_element_at_index (op0
,
12903 wi::to_offset (op1
));
12911 /* Return a VECTOR_CST if possible. */
12914 tree type
= TREE_TYPE (t
);
12915 if (TREE_CODE (type
) != VECTOR_TYPE
)
12920 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
12921 if (! CONSTANT_CLASS_P (val
))
12924 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
12928 return fold (DECL_INITIAL (t
));
12932 } /* switch (code) */
12935 #ifdef ENABLE_FOLD_CHECKING
12938 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
12939 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
12940 static void fold_check_failed (const_tree
, const_tree
);
12941 void print_fold_checksum (const_tree
);
12943 /* When --enable-checking=fold, compute a digest of expr before
12944 and after actual fold call to see if fold did not accidentally
12945 change original expr. */
12951 struct md5_ctx ctx
;
12952 unsigned char checksum_before
[16], checksum_after
[16];
12953 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12955 md5_init_ctx (&ctx
);
12956 fold_checksum_tree (expr
, &ctx
, &ht
);
12957 md5_finish_ctx (&ctx
, checksum_before
);
12960 ret
= fold_1 (expr
);
12962 md5_init_ctx (&ctx
);
12963 fold_checksum_tree (expr
, &ctx
, &ht
);
12964 md5_finish_ctx (&ctx
, checksum_after
);
12966 if (memcmp (checksum_before
, checksum_after
, 16))
12967 fold_check_failed (expr
, ret
);
12973 print_fold_checksum (const_tree expr
)
12975 struct md5_ctx ctx
;
12976 unsigned char checksum
[16], cnt
;
12977 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12979 md5_init_ctx (&ctx
);
12980 fold_checksum_tree (expr
, &ctx
, &ht
);
12981 md5_finish_ctx (&ctx
, checksum
);
12982 for (cnt
= 0; cnt
< 16; ++cnt
)
12983 fprintf (stderr
, "%02x", checksum
[cnt
]);
12984 putc ('\n', stderr
);
12988 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12990 internal_error ("fold check: original tree changed by fold");
12994 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12995 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12997 const tree_node
**slot
;
12998 enum tree_code code
;
12999 union tree_node
*buf
;
13005 slot
= ht
->find_slot (expr
, INSERT
);
13009 code
= TREE_CODE (expr
);
13010 if (TREE_CODE_CLASS (code
) == tcc_declaration
13011 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
13013 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13014 size_t sz
= tree_size (expr
);
13015 buf
= XALLOCAVAR (union tree_node
, sz
);
13016 memcpy ((char *) buf
, expr
, sz
);
13017 SET_DECL_ASSEMBLER_NAME ((tree
) buf
, NULL
);
13018 buf
->decl_with_vis
.symtab_node
= NULL
;
13019 buf
->base
.nowarning_flag
= 0;
13022 else if (TREE_CODE_CLASS (code
) == tcc_type
13023 && (TYPE_POINTER_TO (expr
)
13024 || TYPE_REFERENCE_TO (expr
)
13025 || TYPE_CACHED_VALUES_P (expr
)
13026 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13027 || TYPE_NEXT_VARIANT (expr
)
13028 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
13030 /* Allow these fields to be modified. */
13032 size_t sz
= tree_size (expr
);
13033 buf
= XALLOCAVAR (union tree_node
, sz
);
13034 memcpy ((char *) buf
, expr
, sz
);
13035 expr
= tmp
= (tree
) buf
;
13036 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13037 TYPE_POINTER_TO (tmp
) = NULL
;
13038 TYPE_REFERENCE_TO (tmp
) = NULL
;
13039 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13040 TYPE_ALIAS_SET (tmp
) = -1;
13041 if (TYPE_CACHED_VALUES_P (tmp
))
13043 TYPE_CACHED_VALUES_P (tmp
) = 0;
13044 TYPE_CACHED_VALUES (tmp
) = NULL
;
13047 else if (TREE_NO_WARNING (expr
) && (DECL_P (expr
) || EXPR_P (expr
)))
13049 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13050 and change builtins.c etc. instead - see PR89543. */
13051 size_t sz
= tree_size (expr
);
13052 buf
= XALLOCAVAR (union tree_node
, sz
);
13053 memcpy ((char *) buf
, expr
, sz
);
13054 buf
->base
.nowarning_flag
= 0;
13057 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13058 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
13059 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13060 if (TREE_CODE_CLASS (code
) != tcc_type
13061 && TREE_CODE_CLASS (code
) != tcc_declaration
13062 && code
!= TREE_LIST
13063 && code
!= SSA_NAME
13064 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13065 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13066 switch (TREE_CODE_CLASS (code
))
13072 md5_process_bytes (TREE_STRING_POINTER (expr
),
13073 TREE_STRING_LENGTH (expr
), ctx
);
13076 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13077 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13080 len
= vector_cst_encoded_nelts (expr
);
13081 for (i
= 0; i
< len
; ++i
)
13082 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr
, i
), ctx
, ht
);
13088 case tcc_exceptional
:
13092 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13093 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13094 expr
= TREE_CHAIN (expr
);
13095 goto recursive_label
;
13098 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13099 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13105 case tcc_expression
:
13106 case tcc_reference
:
13107 case tcc_comparison
:
13110 case tcc_statement
:
13112 len
= TREE_OPERAND_LENGTH (expr
);
13113 for (i
= 0; i
< len
; ++i
)
13114 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13116 case tcc_declaration
:
13117 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13118 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13119 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13121 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13122 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13123 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13124 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13125 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13128 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13130 if (TREE_CODE (expr
) == FUNCTION_DECL
)
13132 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13133 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
13135 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13139 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13140 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13141 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13142 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13143 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13144 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13145 if (INTEGRAL_TYPE_P (expr
)
13146 || SCALAR_FLOAT_TYPE_P (expr
))
13148 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13149 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13151 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13152 if (TREE_CODE (expr
) == RECORD_TYPE
13153 || TREE_CODE (expr
) == UNION_TYPE
13154 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13155 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13156 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13163 /* Helper function for outputting the checksum of a tree T. When
13164 debugging with gdb, you can "define mynext" to be "next" followed
13165 by "call debug_fold_checksum (op0)", then just trace down till the
13168 DEBUG_FUNCTION
void
13169 debug_fold_checksum (const_tree t
)
13172 unsigned char checksum
[16];
13173 struct md5_ctx ctx
;
13174 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13176 md5_init_ctx (&ctx
);
13177 fold_checksum_tree (t
, &ctx
, &ht
);
13178 md5_finish_ctx (&ctx
, checksum
);
13181 for (i
= 0; i
< 16; i
++)
13182 fprintf (stderr
, "%d ", checksum
[i
]);
13184 fprintf (stderr
, "\n");
13189 /* Fold a unary tree expression with code CODE of type TYPE with an
13190 operand OP0. LOC is the location of the resulting expression.
13191 Return a folded expression if successful. Otherwise, return a tree
13192 expression with code CODE of type TYPE with an operand OP0. */
13195 fold_build1_loc (location_t loc
,
13196 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13199 #ifdef ENABLE_FOLD_CHECKING
13200 unsigned char checksum_before
[16], checksum_after
[16];
13201 struct md5_ctx ctx
;
13202 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13204 md5_init_ctx (&ctx
);
13205 fold_checksum_tree (op0
, &ctx
, &ht
);
13206 md5_finish_ctx (&ctx
, checksum_before
);
13210 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13212 tem
= build1_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
13214 #ifdef ENABLE_FOLD_CHECKING
13215 md5_init_ctx (&ctx
);
13216 fold_checksum_tree (op0
, &ctx
, &ht
);
13217 md5_finish_ctx (&ctx
, checksum_after
);
13219 if (memcmp (checksum_before
, checksum_after
, 16))
13220 fold_check_failed (op0
, tem
);
13225 /* Fold a binary tree expression with code CODE of type TYPE with
13226 operands OP0 and OP1. LOC is the location of the resulting
13227 expression. Return a folded expression if successful. Otherwise,
13228 return a tree expression with code CODE of type TYPE with operands
13232 fold_build2_loc (location_t loc
,
13233 enum tree_code code
, tree type
, tree op0
, tree op1
13237 #ifdef ENABLE_FOLD_CHECKING
13238 unsigned char checksum_before_op0
[16],
13239 checksum_before_op1
[16],
13240 checksum_after_op0
[16],
13241 checksum_after_op1
[16];
13242 struct md5_ctx ctx
;
13243 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13245 md5_init_ctx (&ctx
);
13246 fold_checksum_tree (op0
, &ctx
, &ht
);
13247 md5_finish_ctx (&ctx
, checksum_before_op0
);
13250 md5_init_ctx (&ctx
);
13251 fold_checksum_tree (op1
, &ctx
, &ht
);
13252 md5_finish_ctx (&ctx
, checksum_before_op1
);
13256 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13258 tem
= build2_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
13260 #ifdef ENABLE_FOLD_CHECKING
13261 md5_init_ctx (&ctx
);
13262 fold_checksum_tree (op0
, &ctx
, &ht
);
13263 md5_finish_ctx (&ctx
, checksum_after_op0
);
13266 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13267 fold_check_failed (op0
, tem
);
13269 md5_init_ctx (&ctx
);
13270 fold_checksum_tree (op1
, &ctx
, &ht
);
13271 md5_finish_ctx (&ctx
, checksum_after_op1
);
13273 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13274 fold_check_failed (op1
, tem
);
13279 /* Fold a ternary tree expression with code CODE of type TYPE with
13280 operands OP0, OP1, and OP2. Return a folded expression if
13281 successful. Otherwise, return a tree expression with code CODE of
13282 type TYPE with operands OP0, OP1, and OP2. */
13285 fold_build3_loc (location_t loc
, enum tree_code code
, tree type
,
13286 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13289 #ifdef ENABLE_FOLD_CHECKING
13290 unsigned char checksum_before_op0
[16],
13291 checksum_before_op1
[16],
13292 checksum_before_op2
[16],
13293 checksum_after_op0
[16],
13294 checksum_after_op1
[16],
13295 checksum_after_op2
[16];
13296 struct md5_ctx ctx
;
13297 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13299 md5_init_ctx (&ctx
);
13300 fold_checksum_tree (op0
, &ctx
, &ht
);
13301 md5_finish_ctx (&ctx
, checksum_before_op0
);
13304 md5_init_ctx (&ctx
);
13305 fold_checksum_tree (op1
, &ctx
, &ht
);
13306 md5_finish_ctx (&ctx
, checksum_before_op1
);
13309 md5_init_ctx (&ctx
);
13310 fold_checksum_tree (op2
, &ctx
, &ht
);
13311 md5_finish_ctx (&ctx
, checksum_before_op2
);
13315 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13316 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13318 tem
= build3_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13320 #ifdef ENABLE_FOLD_CHECKING
13321 md5_init_ctx (&ctx
);
13322 fold_checksum_tree (op0
, &ctx
, &ht
);
13323 md5_finish_ctx (&ctx
, checksum_after_op0
);
13326 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13327 fold_check_failed (op0
, tem
);
13329 md5_init_ctx (&ctx
);
13330 fold_checksum_tree (op1
, &ctx
, &ht
);
13331 md5_finish_ctx (&ctx
, checksum_after_op1
);
13334 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13335 fold_check_failed (op1
, tem
);
13337 md5_init_ctx (&ctx
);
13338 fold_checksum_tree (op2
, &ctx
, &ht
);
13339 md5_finish_ctx (&ctx
, checksum_after_op2
);
13341 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13342 fold_check_failed (op2
, tem
);
13347 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13348 arguments in ARGARRAY, and a null static chain.
13349 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13350 of type TYPE from the given operands as constructed by build_call_array. */
13353 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13354 int nargs
, tree
*argarray
)
13357 #ifdef ENABLE_FOLD_CHECKING
13358 unsigned char checksum_before_fn
[16],
13359 checksum_before_arglist
[16],
13360 checksum_after_fn
[16],
13361 checksum_after_arglist
[16];
13362 struct md5_ctx ctx
;
13363 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13366 md5_init_ctx (&ctx
);
13367 fold_checksum_tree (fn
, &ctx
, &ht
);
13368 md5_finish_ctx (&ctx
, checksum_before_fn
);
13371 md5_init_ctx (&ctx
);
13372 for (i
= 0; i
< nargs
; i
++)
13373 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13374 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13378 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13380 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13382 #ifdef ENABLE_FOLD_CHECKING
13383 md5_init_ctx (&ctx
);
13384 fold_checksum_tree (fn
, &ctx
, &ht
);
13385 md5_finish_ctx (&ctx
, checksum_after_fn
);
13388 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13389 fold_check_failed (fn
, tem
);
13391 md5_init_ctx (&ctx
);
13392 for (i
= 0; i
< nargs
; i
++)
13393 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13394 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13396 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13397 fold_check_failed (NULL_TREE
, tem
);
13402 /* Perform constant folding and related simplification of initializer
13403 expression EXPR. These behave identically to "fold_buildN" but ignore
13404 potential run-time traps and exceptions that fold must preserve. */
13406 #define START_FOLD_INIT \
13407 int saved_signaling_nans = flag_signaling_nans;\
13408 int saved_trapping_math = flag_trapping_math;\
13409 int saved_rounding_math = flag_rounding_math;\
13410 int saved_trapv = flag_trapv;\
13411 int saved_folding_initializer = folding_initializer;\
13412 flag_signaling_nans = 0;\
13413 flag_trapping_math = 0;\
13414 flag_rounding_math = 0;\
13416 folding_initializer = 1;
13418 #define END_FOLD_INIT \
13419 flag_signaling_nans = saved_signaling_nans;\
13420 flag_trapping_math = saved_trapping_math;\
13421 flag_rounding_math = saved_rounding_math;\
13422 flag_trapv = saved_trapv;\
13423 folding_initializer = saved_folding_initializer;
13426 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
13427 tree type
, tree op
)
13432 result
= fold_build1_loc (loc
, code
, type
, op
);
13439 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
13440 tree type
, tree op0
, tree op1
)
13445 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
13452 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
13453 int nargs
, tree
*argarray
)
13458 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13464 #undef START_FOLD_INIT
13465 #undef END_FOLD_INIT
13467 /* Determine if first argument is a multiple of second argument. Return 0 if
13468 it is not, or we cannot easily determined it to be.
13470 An example of the sort of thing we care about (at this point; this routine
13471 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13472 fold cases do now) is discovering that
13474 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13480 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13482 This code also handles discovering that
13484 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13486 is a multiple of 8 so we don't have to worry about dealing with a
13487 possible remainder.
13489 Note that we *look* inside a SAVE_EXPR only to determine how it was
13490 calculated; it is not safe for fold to do much of anything else with the
13491 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13492 at run time. For example, the latter example above *cannot* be implemented
13493 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13494 evaluation time of the original SAVE_EXPR is not necessarily the same at
13495 the time the new expression is evaluated. The only optimization of this
13496 sort that would be valid is changing
13498 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13502 SAVE_EXPR (I) * SAVE_EXPR (J)
13504 (where the same SAVE_EXPR (J) is used in the original and the
13505 transformed version). */
13508 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13513 if (operand_equal_p (top
, bottom
, 0))
13516 if (TREE_CODE (type
) != INTEGER_TYPE
)
13519 switch (TREE_CODE (top
))
13522 /* Bitwise and provides a power of two multiple. If the mask is
13523 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13524 if (!integer_pow2p (bottom
))
13526 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13527 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13530 if (TREE_CODE (bottom
) == INTEGER_CST
)
13532 op1
= TREE_OPERAND (top
, 0);
13533 op2
= TREE_OPERAND (top
, 1);
13534 if (TREE_CODE (op1
) == INTEGER_CST
)
13535 std::swap (op1
, op2
);
13536 if (TREE_CODE (op2
) == INTEGER_CST
)
13538 if (multiple_of_p (type
, op2
, bottom
))
13540 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13541 if (multiple_of_p (type
, bottom
, op2
))
13543 widest_int w
= wi::sdiv_trunc (wi::to_widest (bottom
),
13544 wi::to_widest (op2
));
13545 if (wi::fits_to_tree_p (w
, TREE_TYPE (bottom
)))
13547 op2
= wide_int_to_tree (TREE_TYPE (bottom
), w
);
13548 return multiple_of_p (type
, op1
, op2
);
13551 return multiple_of_p (type
, op1
, bottom
);
13554 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13555 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13558 /* It is impossible to prove if op0 - op1 is multiple of bottom
13559 precisely, so be conservative here checking if both op0 and op1
13560 are multiple of bottom. Note we check the second operand first
13561 since it's usually simpler. */
13562 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13563 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13566 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13567 as op0 - 3 if the expression has unsigned type. For example,
13568 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13569 op1
= TREE_OPERAND (top
, 1);
13570 if (TYPE_UNSIGNED (type
)
13571 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
13572 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
13573 return (multiple_of_p (type
, op1
, bottom
)
13574 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13577 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13579 op1
= TREE_OPERAND (top
, 1);
13580 /* const_binop may not detect overflow correctly,
13581 so check for it explicitly here. */
13582 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
13584 && (t1
= fold_convert (type
,
13585 const_binop (LSHIFT_EXPR
, size_one_node
,
13587 && !TREE_OVERFLOW (t1
))
13588 return multiple_of_p (type
, t1
, bottom
);
13593 /* Can't handle conversions from non-integral or wider integral type. */
13594 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13595 || (TYPE_PRECISION (type
)
13596 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13602 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13605 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13606 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
13609 if (TREE_CODE (bottom
) != INTEGER_CST
13610 || integer_zerop (bottom
)
13611 || (TYPE_UNSIGNED (type
)
13612 && (tree_int_cst_sgn (top
) < 0
13613 || tree_int_cst_sgn (bottom
) < 0)))
13615 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
13619 if (TREE_CODE (bottom
) == INTEGER_CST
13620 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
13621 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
13623 enum tree_code code
= gimple_assign_rhs_code (stmt
);
13625 /* Check for special cases to see if top is defined as multiple
13628 top = (X & ~(bottom - 1) ; bottom is power of 2
13634 if (code
== BIT_AND_EXPR
13635 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
13636 && TREE_CODE (op2
) == INTEGER_CST
13637 && integer_pow2p (bottom
)
13638 && wi::multiple_of_p (wi::to_widest (op2
),
13639 wi::to_widest (bottom
), UNSIGNED
))
13642 op1
= gimple_assign_rhs1 (stmt
);
13643 if (code
== MINUS_EXPR
13644 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
13645 && TREE_CODE (op2
) == SSA_NAME
13646 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
13647 && gimple_code (stmt
) == GIMPLE_ASSIGN
13648 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
13649 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
13650 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
13657 if (POLY_INT_CST_P (top
) && poly_int_tree_p (bottom
))
13658 return multiple_p (wi::to_poly_widest (top
),
13659 wi::to_poly_widest (bottom
));
13665 #define tree_expr_nonnegative_warnv_p(X, Y) \
13666 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13668 #define RECURSE(X) \
13669 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13671 /* Return true if CODE or TYPE is known to be non-negative. */
13674 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13676 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13677 && truth_value_p (code
))
13678 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13679 have a signed:1 type (where the value is -1 and 0). */
13684 /* Return true if (CODE OP0) is known to be non-negative. If the return
13685 value is based on the assumption that signed overflow is undefined,
13686 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13687 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13690 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13691 bool *strict_overflow_p
, int depth
)
13693 if (TYPE_UNSIGNED (type
))
13699 /* We can't return 1 if flag_wrapv is set because
13700 ABS_EXPR<INT_MIN> = INT_MIN. */
13701 if (!ANY_INTEGRAL_TYPE_P (type
))
13703 if (TYPE_OVERFLOW_UNDEFINED (type
))
13705 *strict_overflow_p
= true;
13710 case NON_LVALUE_EXPR
:
13712 case FIX_TRUNC_EXPR
:
13713 return RECURSE (op0
);
13717 tree inner_type
= TREE_TYPE (op0
);
13718 tree outer_type
= type
;
13720 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13722 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13723 return RECURSE (op0
);
13724 if (INTEGRAL_TYPE_P (inner_type
))
13726 if (TYPE_UNSIGNED (inner_type
))
13728 return RECURSE (op0
);
13731 else if (INTEGRAL_TYPE_P (outer_type
))
13733 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13734 return RECURSE (op0
);
13735 if (INTEGRAL_TYPE_P (inner_type
))
13736 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13737 && TYPE_UNSIGNED (inner_type
);
13743 return tree_simple_nonnegative_warnv_p (code
, type
);
13746 /* We don't know sign of `t', so be conservative and return false. */
13750 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13751 value is based on the assumption that signed overflow is undefined,
13752 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13753 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13756 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13757 tree op1
, bool *strict_overflow_p
,
13760 if (TYPE_UNSIGNED (type
))
13765 case POINTER_PLUS_EXPR
:
13767 if (FLOAT_TYPE_P (type
))
13768 return RECURSE (op0
) && RECURSE (op1
);
13770 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13771 both unsigned and at least 2 bits shorter than the result. */
13772 if (TREE_CODE (type
) == INTEGER_TYPE
13773 && TREE_CODE (op0
) == NOP_EXPR
13774 && TREE_CODE (op1
) == NOP_EXPR
)
13776 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13777 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13778 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13779 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13781 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13782 TYPE_PRECISION (inner2
)) + 1;
13783 return prec
< TYPE_PRECISION (type
);
13789 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
13791 /* x * x is always non-negative for floating point x
13792 or without overflow. */
13793 if (operand_equal_p (op0
, op1
, 0)
13794 || (RECURSE (op0
) && RECURSE (op1
)))
13796 if (ANY_INTEGRAL_TYPE_P (type
)
13797 && TYPE_OVERFLOW_UNDEFINED (type
))
13798 *strict_overflow_p
= true;
13803 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13804 both unsigned and their total bits is shorter than the result. */
13805 if (TREE_CODE (type
) == INTEGER_TYPE
13806 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
13807 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
13809 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
13810 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
13812 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
13813 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
13816 bool unsigned0
= TYPE_UNSIGNED (inner0
);
13817 bool unsigned1
= TYPE_UNSIGNED (inner1
);
13819 if (TREE_CODE (op0
) == INTEGER_CST
)
13820 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
13822 if (TREE_CODE (op1
) == INTEGER_CST
)
13823 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
13825 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
13826 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
13828 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
13829 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
13830 : TYPE_PRECISION (inner0
);
13832 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
13833 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
13834 : TYPE_PRECISION (inner1
);
13836 return precision0
+ precision1
< TYPE_PRECISION (type
);
13843 return RECURSE (op0
) || RECURSE (op1
);
13849 case TRUNC_DIV_EXPR
:
13850 case CEIL_DIV_EXPR
:
13851 case FLOOR_DIV_EXPR
:
13852 case ROUND_DIV_EXPR
:
13853 return RECURSE (op0
) && RECURSE (op1
);
13855 case TRUNC_MOD_EXPR
:
13856 return RECURSE (op0
);
13858 case FLOOR_MOD_EXPR
:
13859 return RECURSE (op1
);
13861 case CEIL_MOD_EXPR
:
13862 case ROUND_MOD_EXPR
:
13864 return tree_simple_nonnegative_warnv_p (code
, type
);
13867 /* We don't know sign of `t', so be conservative and return false. */
13871 /* Return true if T is known to be non-negative. If the return
13872 value is based on the assumption that signed overflow is undefined,
13873 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13874 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13877 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13879 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13882 switch (TREE_CODE (t
))
13885 return tree_int_cst_sgn (t
) >= 0;
13888 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13891 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13894 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13897 /* Limit the depth of recursion to avoid quadratic behavior.
13898 This is expected to catch almost all occurrences in practice.
13899 If this code misses important cases that unbounded recursion
13900 would not, passes that need this information could be revised
13901 to provide it through dataflow propagation. */
13902 return (!name_registered_for_update_p (t
)
13903 && depth
< param_max_ssa_name_query_depth
13904 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
13905 strict_overflow_p
, depth
));
13908 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13912 /* Return true if T is known to be non-negative. If the return
13913 value is based on the assumption that signed overflow is undefined,
13914 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13915 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13918 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
13919 bool *strict_overflow_p
, int depth
)
13940 case CFN_BUILT_IN_BSWAP16
:
13941 case CFN_BUILT_IN_BSWAP32
:
13942 case CFN_BUILT_IN_BSWAP64
:
13943 case CFN_BUILT_IN_BSWAP128
:
13949 /* sqrt(-0.0) is -0.0. */
13950 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
13952 return RECURSE (arg0
);
13980 CASE_CFN_NEARBYINT
:
13981 CASE_CFN_NEARBYINT_FN
:
13986 CASE_CFN_ROUNDEVEN
:
13987 CASE_CFN_ROUNDEVEN_FN
:
13992 CASE_CFN_SIGNIFICAND
:
13997 /* True if the 1st argument is nonnegative. */
13998 return RECURSE (arg0
);
14002 /* True if the 1st OR 2nd arguments are nonnegative. */
14003 return RECURSE (arg0
) || RECURSE (arg1
);
14007 /* True if the 1st AND 2nd arguments are nonnegative. */
14008 return RECURSE (arg0
) && RECURSE (arg1
);
14011 CASE_CFN_COPYSIGN_FN
:
14012 /* True if the 2nd argument is nonnegative. */
14013 return RECURSE (arg1
);
14016 /* True if the 1st argument is nonnegative or the second
14017 argument is an even integer. */
14018 if (TREE_CODE (arg1
) == INTEGER_CST
14019 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14021 return RECURSE (arg0
);
14024 /* True if the 1st argument is nonnegative or the second
14025 argument is an even integer valued real. */
14026 if (TREE_CODE (arg1
) == REAL_CST
)
14031 c
= TREE_REAL_CST (arg1
);
14032 n
= real_to_integer (&c
);
14035 REAL_VALUE_TYPE cint
;
14036 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14037 if (real_identical (&c
, &cint
))
14041 return RECURSE (arg0
);
14046 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
14049 /* Return true if T is known to be non-negative. If the return
14050 value is based on the assumption that signed overflow is undefined,
14051 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14052 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14055 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14057 enum tree_code code
= TREE_CODE (t
);
14058 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14065 tree temp
= TARGET_EXPR_SLOT (t
);
14066 t
= TARGET_EXPR_INITIAL (t
);
14068 /* If the initializer is non-void, then it's a normal expression
14069 that will be assigned to the slot. */
14070 if (!VOID_TYPE_P (t
))
14071 return RECURSE (t
);
14073 /* Otherwise, the initializer sets the slot in some way. One common
14074 way is an assignment statement at the end of the initializer. */
14077 if (TREE_CODE (t
) == BIND_EXPR
)
14078 t
= expr_last (BIND_EXPR_BODY (t
));
14079 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14080 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14081 t
= expr_last (TREE_OPERAND (t
, 0));
14082 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14087 if (TREE_CODE (t
) == MODIFY_EXPR
14088 && TREE_OPERAND (t
, 0) == temp
)
14089 return RECURSE (TREE_OPERAND (t
, 1));
14096 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14097 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14099 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14100 get_call_combined_fn (t
),
14103 strict_overflow_p
, depth
);
14105 case COMPOUND_EXPR
:
14107 return RECURSE (TREE_OPERAND (t
, 1));
14110 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
14113 return RECURSE (TREE_OPERAND (t
, 0));
14116 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
14121 #undef tree_expr_nonnegative_warnv_p
14123 /* Return true if T is known to be non-negative. If the return
14124 value is based on the assumption that signed overflow is undefined,
14125 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14126 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14129 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14131 enum tree_code code
;
14132 if (t
== error_mark_node
)
14135 code
= TREE_CODE (t
);
14136 switch (TREE_CODE_CLASS (code
))
14139 case tcc_comparison
:
14140 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14142 TREE_OPERAND (t
, 0),
14143 TREE_OPERAND (t
, 1),
14144 strict_overflow_p
, depth
);
14147 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14149 TREE_OPERAND (t
, 0),
14150 strict_overflow_p
, depth
);
14153 case tcc_declaration
:
14154 case tcc_reference
:
14155 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14163 case TRUTH_AND_EXPR
:
14164 case TRUTH_OR_EXPR
:
14165 case TRUTH_XOR_EXPR
:
14166 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14168 TREE_OPERAND (t
, 0),
14169 TREE_OPERAND (t
, 1),
14170 strict_overflow_p
, depth
);
14171 case TRUTH_NOT_EXPR
:
14172 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14174 TREE_OPERAND (t
, 0),
14175 strict_overflow_p
, depth
);
14182 case WITH_SIZE_EXPR
:
14184 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14187 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14191 /* Return true if `t' is known to be non-negative. Handle warnings
14192 about undefined signed overflow. */
14195 tree_expr_nonnegative_p (tree t
)
14197 bool ret
, strict_overflow_p
;
14199 strict_overflow_p
= false;
14200 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14201 if (strict_overflow_p
)
14202 fold_overflow_warning (("assuming signed overflow does not occur when "
14203 "determining that expression is always "
14205 WARN_STRICT_OVERFLOW_MISC
);
14210 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14211 For floating point we further ensure that T is not denormal.
14212 Similar logic is present in nonzero_address in rtlanal.h.
14214 If the return value is based on the assumption that signed overflow
14215 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14216 change *STRICT_OVERFLOW_P. */
14219 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14220 bool *strict_overflow_p
)
14225 return tree_expr_nonzero_warnv_p (op0
,
14226 strict_overflow_p
);
14230 tree inner_type
= TREE_TYPE (op0
);
14231 tree outer_type
= type
;
14233 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14234 && tree_expr_nonzero_warnv_p (op0
,
14235 strict_overflow_p
));
14239 case NON_LVALUE_EXPR
:
14240 return tree_expr_nonzero_warnv_p (op0
,
14241 strict_overflow_p
);
14250 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14251 For floating point we further ensure that T is not denormal.
14252 Similar logic is present in nonzero_address in rtlanal.h.
14254 If the return value is based on the assumption that signed overflow
14255 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14256 change *STRICT_OVERFLOW_P. */
14259 tree_binary_nonzero_warnv_p (enum tree_code code
,
14262 tree op1
, bool *strict_overflow_p
)
14264 bool sub_strict_overflow_p
;
14267 case POINTER_PLUS_EXPR
:
14269 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
14271 /* With the presence of negative values it is hard
14272 to say something. */
14273 sub_strict_overflow_p
= false;
14274 if (!tree_expr_nonnegative_warnv_p (op0
,
14275 &sub_strict_overflow_p
)
14276 || !tree_expr_nonnegative_warnv_p (op1
,
14277 &sub_strict_overflow_p
))
14279 /* One of operands must be positive and the other non-negative. */
14280 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14281 overflows, on a twos-complement machine the sum of two
14282 nonnegative numbers can never be zero. */
14283 return (tree_expr_nonzero_warnv_p (op0
,
14285 || tree_expr_nonzero_warnv_p (op1
,
14286 strict_overflow_p
));
14291 if (TYPE_OVERFLOW_UNDEFINED (type
))
14293 if (tree_expr_nonzero_warnv_p (op0
,
14295 && tree_expr_nonzero_warnv_p (op1
,
14296 strict_overflow_p
))
14298 *strict_overflow_p
= true;
14305 sub_strict_overflow_p
= false;
14306 if (tree_expr_nonzero_warnv_p (op0
,
14307 &sub_strict_overflow_p
)
14308 && tree_expr_nonzero_warnv_p (op1
,
14309 &sub_strict_overflow_p
))
14311 if (sub_strict_overflow_p
)
14312 *strict_overflow_p
= true;
14317 sub_strict_overflow_p
= false;
14318 if (tree_expr_nonzero_warnv_p (op0
,
14319 &sub_strict_overflow_p
))
14321 if (sub_strict_overflow_p
)
14322 *strict_overflow_p
= true;
14324 /* When both operands are nonzero, then MAX must be too. */
14325 if (tree_expr_nonzero_warnv_p (op1
,
14326 strict_overflow_p
))
14329 /* MAX where operand 0 is positive is positive. */
14330 return tree_expr_nonnegative_warnv_p (op0
,
14331 strict_overflow_p
);
14333 /* MAX where operand 1 is positive is positive. */
14334 else if (tree_expr_nonzero_warnv_p (op1
,
14335 &sub_strict_overflow_p
)
14336 && tree_expr_nonnegative_warnv_p (op1
,
14337 &sub_strict_overflow_p
))
14339 if (sub_strict_overflow_p
)
14340 *strict_overflow_p
= true;
14346 return (tree_expr_nonzero_warnv_p (op1
,
14348 || tree_expr_nonzero_warnv_p (op0
,
14349 strict_overflow_p
));
14358 /* Return true when T is an address and is known to be nonzero.
14359 For floating point we further ensure that T is not denormal.
14360 Similar logic is present in nonzero_address in rtlanal.h.
14362 If the return value is based on the assumption that signed overflow
14363 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14364 change *STRICT_OVERFLOW_P. */
14367 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14369 bool sub_strict_overflow_p
;
14370 switch (TREE_CODE (t
))
14373 return !integer_zerop (t
);
14377 tree base
= TREE_OPERAND (t
, 0);
14379 if (!DECL_P (base
))
14380 base
= get_base_address (base
);
14382 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
14383 base
= TARGET_EXPR_SLOT (base
);
14388 /* For objects in symbol table check if we know they are non-zero.
14389 Don't do anything for variables and functions before symtab is built;
14390 it is quite possible that they will be declared weak later. */
14391 int nonzero_addr
= maybe_nonzero_address (base
);
14392 if (nonzero_addr
>= 0)
14393 return nonzero_addr
;
14395 /* Constants are never weak. */
14396 if (CONSTANT_CLASS_P (base
))
14403 sub_strict_overflow_p
= false;
14404 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14405 &sub_strict_overflow_p
)
14406 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14407 &sub_strict_overflow_p
))
14409 if (sub_strict_overflow_p
)
14410 *strict_overflow_p
= true;
14416 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
14418 return expr_not_equal_to (t
, wi::zero (TYPE_PRECISION (TREE_TYPE (t
))));
14426 #define integer_valued_real_p(X) \
14427 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14429 #define RECURSE(X) \
14430 ((integer_valued_real_p) (X, depth + 1))
14432 /* Return true if the floating point result of (CODE OP0) has an
14433 integer value. We also allow +Inf, -Inf and NaN to be considered
14434 integer values. Return false for signaling NaN.
14436 DEPTH is the current nesting depth of the query. */
14439 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
14447 return RECURSE (op0
);
14451 tree type
= TREE_TYPE (op0
);
14452 if (TREE_CODE (type
) == INTEGER_TYPE
)
14454 if (TREE_CODE (type
) == REAL_TYPE
)
14455 return RECURSE (op0
);
14465 /* Return true if the floating point result of (CODE OP0 OP1) has an
14466 integer value. We also allow +Inf, -Inf and NaN to be considered
14467 integer values. Return false for signaling NaN.
14469 DEPTH is the current nesting depth of the query. */
14472 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
14481 return RECURSE (op0
) && RECURSE (op1
);
14489 /* Return true if the floating point result of calling FNDECL with arguments
14490 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14491 considered integer values. Return false for signaling NaN. If FNDECL
14492 takes fewer than 2 arguments, the remaining ARGn are null.
14494 DEPTH is the current nesting depth of the query. */
14497 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
14505 CASE_CFN_NEARBYINT
:
14506 CASE_CFN_NEARBYINT_FN
:
14511 CASE_CFN_ROUNDEVEN
:
14512 CASE_CFN_ROUNDEVEN_FN
:
14521 return RECURSE (arg0
) && RECURSE (arg1
);
14529 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14530 has an integer value. We also allow +Inf, -Inf and NaN to be
14531 considered integer values. Return false for signaling NaN.
14533 DEPTH is the current nesting depth of the query. */
14536 integer_valued_real_single_p (tree t
, int depth
)
14538 switch (TREE_CODE (t
))
14541 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
14544 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
14547 /* Limit the depth of recursion to avoid quadratic behavior.
14548 This is expected to catch almost all occurrences in practice.
14549 If this code misses important cases that unbounded recursion
14550 would not, passes that need this information could be revised
14551 to provide it through dataflow propagation. */
14552 return (!name_registered_for_update_p (t
)
14553 && depth
< param_max_ssa_name_query_depth
14554 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
14563 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14564 has an integer value. We also allow +Inf, -Inf and NaN to be
14565 considered integer values. Return false for signaling NaN.
14567 DEPTH is the current nesting depth of the query. */
14570 integer_valued_real_invalid_p (tree t
, int depth
)
14572 switch (TREE_CODE (t
))
14574 case COMPOUND_EXPR
:
14577 return RECURSE (TREE_OPERAND (t
, 1));
14580 return RECURSE (TREE_OPERAND (t
, 0));
14589 #undef integer_valued_real_p
14591 /* Return true if the floating point expression T has an integer value.
14592 We also allow +Inf, -Inf and NaN to be considered integer values.
14593 Return false for signaling NaN.
14595 DEPTH is the current nesting depth of the query. */
14598 integer_valued_real_p (tree t
, int depth
)
14600 if (t
== error_mark_node
)
14603 STRIP_ANY_LOCATION_WRAPPER (t
);
14605 tree_code code
= TREE_CODE (t
);
14606 switch (TREE_CODE_CLASS (code
))
14609 case tcc_comparison
:
14610 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
14611 TREE_OPERAND (t
, 1), depth
);
14614 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
14617 case tcc_declaration
:
14618 case tcc_reference
:
14619 return integer_valued_real_single_p (t
, depth
);
14629 return integer_valued_real_single_p (t
, depth
);
14633 tree arg0
= (call_expr_nargs (t
) > 0
14634 ? CALL_EXPR_ARG (t
, 0)
14636 tree arg1
= (call_expr_nargs (t
) > 1
14637 ? CALL_EXPR_ARG (t
, 1)
14639 return integer_valued_real_call_p (get_call_combined_fn (t
),
14640 arg0
, arg1
, depth
);
14644 return integer_valued_real_invalid_p (t
, depth
);
14648 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14649 attempt to fold the expression to a constant without modifying TYPE,
14652 If the expression could be simplified to a constant, then return
14653 the constant. If the expression would not be simplified to a
14654 constant, then return NULL_TREE. */
14657 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14659 tree tem
= fold_binary (code
, type
, op0
, op1
);
14660 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14663 /* Given the components of a unary expression CODE, TYPE and OP0,
14664 attempt to fold the expression to a constant without modifying
14667 If the expression could be simplified to a constant, then return
14668 the constant. If the expression would not be simplified to a
14669 constant, then return NULL_TREE. */
14672 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14674 tree tem
= fold_unary (code
, type
, op0
);
14675 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14678 /* If EXP represents referencing an element in a constant string
14679 (either via pointer arithmetic or array indexing), return the
14680 tree representing the value accessed, otherwise return NULL. */
14683 fold_read_from_constant_string (tree exp
)
14685 if ((TREE_CODE (exp
) == INDIRECT_REF
14686 || TREE_CODE (exp
) == ARRAY_REF
)
14687 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14689 tree exp1
= TREE_OPERAND (exp
, 0);
14692 location_t loc
= EXPR_LOCATION (exp
);
14694 if (TREE_CODE (exp
) == INDIRECT_REF
)
14695 string
= string_constant (exp1
, &index
, NULL
, NULL
);
14698 tree low_bound
= array_ref_low_bound (exp
);
14699 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
14701 /* Optimize the special-case of a zero lower bound.
14703 We convert the low_bound to sizetype to avoid some problems
14704 with constant folding. (E.g. suppose the lower bound is 1,
14705 and its mode is QI. Without the conversion,l (ARRAY
14706 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14707 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14708 if (! integer_zerop (low_bound
))
14709 index
= size_diffop_loc (loc
, index
,
14710 fold_convert_loc (loc
, sizetype
, low_bound
));
14715 scalar_int_mode char_mode
;
14717 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14718 && TREE_CODE (string
) == STRING_CST
14719 && TREE_CODE (index
) == INTEGER_CST
14720 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14721 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))),
14723 && GET_MODE_SIZE (char_mode
) == 1)
14724 return build_int_cst_type (TREE_TYPE (exp
),
14725 (TREE_STRING_POINTER (string
)
14726 [TREE_INT_CST_LOW (index
)]));
14731 /* Folds a read from vector element at IDX of vector ARG. */
14734 fold_read_from_vector (tree arg
, poly_uint64 idx
)
14736 unsigned HOST_WIDE_INT i
;
14737 if (known_lt (idx
, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)))
14738 && known_ge (idx
, 0u)
14739 && idx
.is_constant (&i
))
14741 if (TREE_CODE (arg
) == VECTOR_CST
)
14742 return VECTOR_CST_ELT (arg
, i
);
14743 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
14745 if (i
>= CONSTRUCTOR_NELTS (arg
))
14746 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg
)));
14747 return CONSTRUCTOR_ELT (arg
, i
)->value
;
14753 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14754 an integer constant, real, or fixed-point constant.
14756 TYPE is the type of the result. */
14759 fold_negate_const (tree arg0
, tree type
)
14761 tree t
= NULL_TREE
;
14763 switch (TREE_CODE (arg0
))
14766 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14771 FIXED_VALUE_TYPE f
;
14772 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14773 &(TREE_FIXED_CST (arg0
)), NULL
,
14774 TYPE_SATURATING (type
));
14775 t
= build_fixed (type
, f
);
14776 /* Propagate overflow flags. */
14777 if (overflow_p
| TREE_OVERFLOW (arg0
))
14778 TREE_OVERFLOW (t
) = 1;
14783 if (poly_int_tree_p (arg0
))
14785 wi::overflow_type overflow
;
14786 poly_wide_int res
= wi::neg (wi::to_poly_wide (arg0
), &overflow
);
14787 t
= force_fit_type (type
, res
, 1,
14788 (overflow
&& ! TYPE_UNSIGNED (type
))
14789 || TREE_OVERFLOW (arg0
));
14793 gcc_unreachable ();
14799 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14800 an integer constant or real constant.
14802 TYPE is the type of the result. */
14805 fold_abs_const (tree arg0
, tree type
)
14807 tree t
= NULL_TREE
;
14809 switch (TREE_CODE (arg0
))
14813 /* If the value is unsigned or non-negative, then the absolute value
14814 is the same as the ordinary value. */
14815 wide_int val
= wi::to_wide (arg0
);
14816 wi::overflow_type overflow
= wi::OVF_NONE
;
14817 if (!wi::neg_p (val
, TYPE_SIGN (TREE_TYPE (arg0
))))
14820 /* If the value is negative, then the absolute value is
14823 val
= wi::neg (val
, &overflow
);
14825 /* Force to the destination type, set TREE_OVERFLOW for signed
14827 t
= force_fit_type (type
, val
, 1, overflow
| TREE_OVERFLOW (arg0
));
14832 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14833 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14839 gcc_unreachable ();
14845 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14846 constant. TYPE is the type of the result. */
14849 fold_not_const (const_tree arg0
, tree type
)
14851 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14853 return force_fit_type (type
, ~wi::to_wide (arg0
), 0, TREE_OVERFLOW (arg0
));
14856 /* Given CODE, a relational operator, the target type, TYPE and two
14857 constant operands OP0 and OP1, return the result of the
14858 relational operation. If the result is not a compile time
14859 constant, then return NULL_TREE. */
14862 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14864 int result
, invert
;
14866 /* From here on, the only cases we handle are when the result is
14867 known to be a constant. */
14869 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14871 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14872 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14874 /* Handle the cases where either operand is a NaN. */
14875 if (real_isnan (c0
) || real_isnan (c1
))
14885 case UNORDERED_EXPR
:
14899 if (flag_trapping_math
)
14905 gcc_unreachable ();
14908 return constant_boolean_node (result
, type
);
14911 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14914 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14916 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14917 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14918 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14921 /* Handle equality/inequality of complex constants. */
14922 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14924 tree rcond
= fold_relational_const (code
, type
,
14925 TREE_REALPART (op0
),
14926 TREE_REALPART (op1
));
14927 tree icond
= fold_relational_const (code
, type
,
14928 TREE_IMAGPART (op0
),
14929 TREE_IMAGPART (op1
));
14930 if (code
== EQ_EXPR
)
14931 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14932 else if (code
== NE_EXPR
)
14933 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14938 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
14940 if (!VECTOR_TYPE_P (type
))
14942 /* Have vector comparison with scalar boolean result. */
14943 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
14944 && known_eq (VECTOR_CST_NELTS (op0
),
14945 VECTOR_CST_NELTS (op1
)));
14946 unsigned HOST_WIDE_INT nunits
;
14947 if (!VECTOR_CST_NELTS (op0
).is_constant (&nunits
))
14949 for (unsigned i
= 0; i
< nunits
; i
++)
14951 tree elem0
= VECTOR_CST_ELT (op0
, i
);
14952 tree elem1
= VECTOR_CST_ELT (op1
, i
);
14953 tree tmp
= fold_relational_const (EQ_EXPR
, type
, elem0
, elem1
);
14954 if (tmp
== NULL_TREE
)
14956 if (integer_zerop (tmp
))
14957 return constant_boolean_node (code
== NE_EXPR
, type
);
14959 return constant_boolean_node (code
== EQ_EXPR
, type
);
14961 tree_vector_builder elts
;
14962 if (!elts
.new_binary_operation (type
, op0
, op1
, false))
14964 unsigned int count
= elts
.encoded_nelts ();
14965 for (unsigned i
= 0; i
< count
; i
++)
14967 tree elem_type
= TREE_TYPE (type
);
14968 tree elem0
= VECTOR_CST_ELT (op0
, i
);
14969 tree elem1
= VECTOR_CST_ELT (op1
, i
);
14971 tree tem
= fold_relational_const (code
, elem_type
,
14974 if (tem
== NULL_TREE
)
14977 elts
.quick_push (build_int_cst (elem_type
,
14978 integer_zerop (tem
) ? 0 : -1));
14981 return elts
.build ();
14984 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14986 To compute GT, swap the arguments and do LT.
14987 To compute GE, do LT and invert the result.
14988 To compute LE, swap the arguments, do LT and invert the result.
14989 To compute NE, do EQ and invert the result.
14991 Therefore, the code below must handle only EQ and LT. */
14993 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14995 std::swap (op0
, op1
);
14996 code
= swap_tree_comparison (code
);
14999 /* Note that it is safe to invert for real values here because we
15000 have already handled the one case that it matters. */
15003 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15006 code
= invert_tree_comparison (code
, false);
15009 /* Compute a result for LT or EQ if args permit;
15010 Otherwise return T. */
15011 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15013 if (code
== EQ_EXPR
)
15014 result
= tree_int_cst_equal (op0
, op1
);
15016 result
= tree_int_cst_lt (op0
, op1
);
15023 return constant_boolean_node (result
, type
);
15026 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15027 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15031 fold_build_cleanup_point_expr (tree type
, tree expr
)
15033 /* If the expression does not have side effects then we don't have to wrap
15034 it with a cleanup point expression. */
15035 if (!TREE_SIDE_EFFECTS (expr
))
15038 /* If the expression is a return, check to see if the expression inside the
15039 return has no side effects or the right hand side of the modify expression
15040 inside the return. If either don't have side effects set we don't need to
15041 wrap the expression in a cleanup point expression. Note we don't check the
15042 left hand side of the modify because it should always be a return decl. */
15043 if (TREE_CODE (expr
) == RETURN_EXPR
)
15045 tree op
= TREE_OPERAND (expr
, 0);
15046 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15048 op
= TREE_OPERAND (op
, 1);
15049 if (!TREE_SIDE_EFFECTS (op
))
15053 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
15056 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15057 of an indirection through OP0, or NULL_TREE if no simplification is
15061 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15065 poly_uint64 const_op01
;
15068 subtype
= TREE_TYPE (sub
);
15069 if (!POINTER_TYPE_P (subtype
)
15070 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0
)))
15073 if (TREE_CODE (sub
) == ADDR_EXPR
)
15075 tree op
= TREE_OPERAND (sub
, 0);
15076 tree optype
= TREE_TYPE (op
);
15078 /* *&CONST_DECL -> to the value of the const decl. */
15079 if (TREE_CODE (op
) == CONST_DECL
)
15080 return DECL_INITIAL (op
);
15081 /* *&p => p; make sure to handle *&"str"[cst] here. */
15082 if (type
== optype
)
15084 tree fop
= fold_read_from_constant_string (op
);
15090 /* *(foo *)&fooarray => fooarray[0] */
15091 else if (TREE_CODE (optype
) == ARRAY_TYPE
15092 && type
== TREE_TYPE (optype
)
15093 && (!in_gimple_form
15094 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15096 tree type_domain
= TYPE_DOMAIN (optype
);
15097 tree min_val
= size_zero_node
;
15098 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15099 min_val
= TYPE_MIN_VALUE (type_domain
);
15101 && TREE_CODE (min_val
) != INTEGER_CST
)
15103 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15104 NULL_TREE
, NULL_TREE
);
15106 /* *(foo *)&complexfoo => __real__ complexfoo */
15107 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15108 && type
== TREE_TYPE (optype
))
15109 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15110 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15111 else if (VECTOR_TYPE_P (optype
)
15112 && type
== TREE_TYPE (optype
))
15114 tree part_width
= TYPE_SIZE (type
);
15115 tree index
= bitsize_int (0);
15116 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
,
15121 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15122 && poly_int_tree_p (TREE_OPERAND (sub
, 1), &const_op01
))
15124 tree op00
= TREE_OPERAND (sub
, 0);
15125 tree op01
= TREE_OPERAND (sub
, 1);
15128 if (TREE_CODE (op00
) == ADDR_EXPR
)
15131 op00
= TREE_OPERAND (op00
, 0);
15132 op00type
= TREE_TYPE (op00
);
15134 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15135 if (VECTOR_TYPE_P (op00type
)
15136 && type
== TREE_TYPE (op00type
)
15137 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15138 but we want to treat offsets with MSB set as negative.
15139 For the code below negative offsets are invalid and
15140 TYPE_SIZE of the element is something unsigned, so
15141 check whether op01 fits into poly_int64, which implies
15142 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15143 then just use poly_uint64 because we want to treat the
15144 value as unsigned. */
15145 && tree_fits_poly_int64_p (op01
))
15147 tree part_width
= TYPE_SIZE (type
);
15148 poly_uint64 max_offset
15149 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
15150 * TYPE_VECTOR_SUBPARTS (op00type
));
15151 if (known_lt (const_op01
, max_offset
))
15153 tree index
= bitsize_int (const_op01
* BITS_PER_UNIT
);
15154 return fold_build3_loc (loc
,
15155 BIT_FIELD_REF
, type
, op00
,
15156 part_width
, index
);
15159 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15160 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15161 && type
== TREE_TYPE (op00type
))
15163 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type
)),
15165 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15167 /* ((foo *)&fooarray)[1] => fooarray[1] */
15168 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15169 && type
== TREE_TYPE (op00type
))
15171 tree type_domain
= TYPE_DOMAIN (op00type
);
15172 tree min_val
= size_zero_node
;
15173 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15174 min_val
= TYPE_MIN_VALUE (type_domain
);
15175 poly_uint64 type_size
, index
;
15176 if (poly_int_tree_p (min_val
)
15177 && poly_int_tree_p (TYPE_SIZE_UNIT (type
), &type_size
)
15178 && multiple_p (const_op01
, type_size
, &index
))
15180 poly_offset_int off
= index
+ wi::to_poly_offset (min_val
);
15181 op01
= wide_int_to_tree (sizetype
, off
);
15182 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15183 NULL_TREE
, NULL_TREE
);
15189 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15190 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15191 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15192 && (!in_gimple_form
15193 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15196 tree min_val
= size_zero_node
;
15197 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15198 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15199 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15200 min_val
= TYPE_MIN_VALUE (type_domain
);
15202 && TREE_CODE (min_val
) != INTEGER_CST
)
15204 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15211 /* Builds an expression for an indirection through T, simplifying some
15215 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15217 tree type
= TREE_TYPE (TREE_TYPE (t
));
15218 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15223 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15226 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15229 fold_indirect_ref_loc (location_t loc
, tree t
)
15231 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15239 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15240 whose result is ignored. The type of the returned tree need not be
15241 the same as the original expression. */
15244 fold_ignored_result (tree t
)
15246 if (!TREE_SIDE_EFFECTS (t
))
15247 return integer_zero_node
;
15250 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15253 t
= TREE_OPERAND (t
, 0);
15257 case tcc_comparison
:
15258 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15259 t
= TREE_OPERAND (t
, 0);
15260 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15261 t
= TREE_OPERAND (t
, 1);
15266 case tcc_expression
:
15267 switch (TREE_CODE (t
))
15269 case COMPOUND_EXPR
:
15270 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15272 t
= TREE_OPERAND (t
, 0);
15276 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15277 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15279 t
= TREE_OPERAND (t
, 0);
15292 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15295 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15297 tree div
= NULL_TREE
;
15302 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15303 have to do anything. Only do this when we are not given a const,
15304 because in that case, this check is more expensive than just
15306 if (TREE_CODE (value
) != INTEGER_CST
)
15308 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15310 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15314 /* If divisor is a power of two, simplify this to bit manipulation. */
15315 if (pow2_or_zerop (divisor
))
15317 if (TREE_CODE (value
) == INTEGER_CST
)
15319 wide_int val
= wi::to_wide (value
);
15322 if ((val
& (divisor
- 1)) == 0)
15325 overflow_p
= TREE_OVERFLOW (value
);
15326 val
+= divisor
- 1;
15327 val
&= (int) -divisor
;
15331 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
15337 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15338 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15339 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
15340 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15346 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15347 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15348 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15354 /* Likewise, but round down. */
15357 round_down_loc (location_t loc
, tree value
, int divisor
)
15359 tree div
= NULL_TREE
;
15361 gcc_assert (divisor
> 0);
15365 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15366 have to do anything. Only do this when we are not given a const,
15367 because in that case, this check is more expensive than just
15369 if (TREE_CODE (value
) != INTEGER_CST
)
15371 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15373 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15377 /* If divisor is a power of two, simplify this to bit manipulation. */
15378 if (pow2_or_zerop (divisor
))
15382 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15383 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15388 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15389 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15390 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15396 /* Returns the pointer to the base of the object addressed by EXP and
15397 extracts the information about the offset of the access, storing it
15398 to PBITPOS and POFFSET. */
15401 split_address_to_core_and_offset (tree exp
,
15402 poly_int64_pod
*pbitpos
, tree
*poffset
)
15406 int unsignedp
, reversep
, volatilep
;
15407 poly_int64 bitsize
;
15408 location_t loc
= EXPR_LOCATION (exp
);
15410 if (TREE_CODE (exp
) == ADDR_EXPR
)
15412 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15413 poffset
, &mode
, &unsignedp
, &reversep
,
15415 core
= build_fold_addr_expr_loc (loc
, core
);
15417 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
15419 core
= TREE_OPERAND (exp
, 0);
15422 *poffset
= TREE_OPERAND (exp
, 1);
15423 if (poly_int_tree_p (*poffset
))
15425 poly_offset_int tem
15426 = wi::sext (wi::to_poly_offset (*poffset
),
15427 TYPE_PRECISION (TREE_TYPE (*poffset
)));
15428 tem
<<= LOG2_BITS_PER_UNIT
;
15429 if (tem
.to_shwi (pbitpos
))
15430 *poffset
= NULL_TREE
;
15437 *poffset
= NULL_TREE
;
15443 /* Returns true if addresses of E1 and E2 differ by a constant, false
15444 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15447 ptr_difference_const (tree e1
, tree e2
, poly_int64_pod
*diff
)
15450 poly_int64 bitpos1
, bitpos2
;
15451 tree toffset1
, toffset2
, tdiff
, type
;
15453 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15454 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15456 poly_int64 bytepos1
, bytepos2
;
15457 if (!multiple_p (bitpos1
, BITS_PER_UNIT
, &bytepos1
)
15458 || !multiple_p (bitpos2
, BITS_PER_UNIT
, &bytepos2
)
15459 || !operand_equal_p (core1
, core2
, 0))
15462 if (toffset1
&& toffset2
)
15464 type
= TREE_TYPE (toffset1
);
15465 if (type
!= TREE_TYPE (toffset2
))
15466 toffset2
= fold_convert (type
, toffset2
);
15468 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15469 if (!cst_and_fits_in_hwi (tdiff
))
15472 *diff
= int_cst_value (tdiff
);
15474 else if (toffset1
|| toffset2
)
15476 /* If only one of the offsets is non-constant, the difference cannot
15483 *diff
+= bytepos1
- bytepos2
;
15487 /* Return OFF converted to a pointer offset type suitable as offset for
15488 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15490 convert_to_ptrofftype_loc (location_t loc
, tree off
)
15492 if (ptrofftype_p (TREE_TYPE (off
)))
15494 return fold_convert_loc (loc
, sizetype
, off
);
15497 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15499 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
15501 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15502 ptr
, convert_to_ptrofftype_loc (loc
, off
));
15505 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15507 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
15509 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15510 ptr
, size_int (off
));
15513 /* Return a pointer to a NUL-terminated string containing the sequence
15514 of bytes corresponding to the representation of the object referred to
15515 by SRC (or a subsequence of such bytes within it if SRC is a reference
15516 to an initialized constant array plus some constant offset).
15517 Set *STRSIZE the number of bytes in the constant sequence including
15518 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
15519 where A is the array that stores the constant sequence that SRC points
15520 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
15521 need not point to a string or even an array of characters but may point
15522 to an object of any type. */
15525 getbyterep (tree src
, unsigned HOST_WIDE_INT
*strsize
)
15527 /* The offset into the array A storing the string, and A's byte size. */
15535 src
= byte_representation (src
, &offset_node
, &mem_size
, NULL
);
15537 src
= string_constant (src
, &offset_node
, &mem_size
, NULL
);
15541 unsigned HOST_WIDE_INT offset
= 0;
15542 if (offset_node
!= NULL_TREE
)
15544 if (!tree_fits_uhwi_p (offset_node
))
15547 offset
= tree_to_uhwi (offset_node
);
15550 if (!tree_fits_uhwi_p (mem_size
))
15553 /* ARRAY_SIZE is the byte size of the array the constant sequence
15554 is stored in and equal to sizeof A. INIT_BYTES is the number
15555 of bytes in the constant sequence used to initialize the array,
15556 including any embedded NULs as well as the terminating NUL (for
15557 strings), but not including any trailing zeros/NULs past
15558 the terminating one appended implicitly to a string literal to
15559 zero out the remainder of the array it's stored in. For example,
15561 const char a[7] = "abc\0d";
15562 n = strlen (a + 1);
15563 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
15564 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
15565 is equal to strlen (A) + 1. */
15566 const unsigned HOST_WIDE_INT array_size
= tree_to_uhwi (mem_size
);
15567 unsigned HOST_WIDE_INT init_bytes
= TREE_STRING_LENGTH (src
);
15569 /* Ideally this would turn into a gcc_checking_assert over time. */
15570 if (init_bytes
> array_size
)
15571 init_bytes
= array_size
;
15573 const char *string
= TREE_STRING_POINTER (src
);
15575 /* Ideally this would turn into a gcc_checking_assert over time. */
15576 if (init_bytes
> array_size
)
15577 init_bytes
= array_size
;
15579 if (init_bytes
== 0 || offset
>= array_size
)
15584 /* Compute and store the number of characters from the beginning
15585 of the substring at OFFSET to the end, including the terminating
15586 nul. Offsets past the initial length refer to null strings. */
15587 if (offset
< init_bytes
)
15588 *strsize
= init_bytes
- offset
;
15594 tree eltype
= TREE_TYPE (TREE_TYPE (src
));
15595 /* Support only properly NUL-terminated single byte strings. */
15596 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype
)) != 1)
15598 if (string
[init_bytes
- 1] != '\0')
15602 return offset
< init_bytes
? string
+ offset
: "";
15605 /* Return a pointer to a NUL-terminated string corresponding to
15606 the expression STR referencing a constant string, possibly
15607 involving a constant offset. Return null if STR either doesn't
15608 reference a constant string or if it involves a nonconstant
15612 c_getstr (tree str
)
15614 return getbyterep (str
, NULL
);
15617 /* Given a tree T, compute which bits in T may be nonzero. */
15620 tree_nonzero_bits (const_tree t
)
15622 switch (TREE_CODE (t
))
15625 return wi::to_wide (t
);
15627 return get_nonzero_bits (t
);
15628 case NON_LVALUE_EXPR
:
15630 return tree_nonzero_bits (TREE_OPERAND (t
, 0));
15632 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
15633 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
15636 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
15637 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
15639 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 1)),
15640 tree_nonzero_bits (TREE_OPERAND (t
, 2)));
15642 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
15643 TYPE_PRECISION (TREE_TYPE (t
)),
15644 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t
, 0))));
15646 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
15648 wide_int nzbits1
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
15649 wide_int nzbits2
= tree_nonzero_bits (TREE_OPERAND (t
, 1));
15650 if (wi::bit_and (nzbits1
, nzbits2
) == 0)
15651 return wi::bit_or (nzbits1
, nzbits2
);
15655 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
15657 tree type
= TREE_TYPE (t
);
15658 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
15659 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
15660 TYPE_PRECISION (type
));
15661 return wi::neg_p (arg1
)
15662 ? wi::rshift (nzbits
, -arg1
, TYPE_SIGN (type
))
15663 : wi::lshift (nzbits
, arg1
);
15667 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
15669 tree type
= TREE_TYPE (t
);
15670 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
15671 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
15672 TYPE_PRECISION (type
));
15673 return wi::neg_p (arg1
)
15674 ? wi::lshift (nzbits
, -arg1
)
15675 : wi::rshift (nzbits
, arg1
, TYPE_SIGN (type
));
15682 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t
)));
15687 namespace selftest
{
15689 /* Helper functions for writing tests of folding trees. */
15691 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15694 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
15697 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
15700 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15701 wrapping WRAPPED_EXPR. */
15704 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
15707 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
15708 ASSERT_NE (wrapped_expr
, result
);
15709 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
15710 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
15713 /* Verify that various arithmetic binary operations are folded
15717 test_arithmetic_folding ()
15719 tree type
= integer_type_node
;
15720 tree x
= create_tmp_var_raw (type
, "x");
15721 tree zero
= build_zero_cst (type
);
15722 tree one
= build_int_cst (type
, 1);
15725 /* 1 <-- (0 + 1) */
15726 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
15728 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
15731 /* (nonlvalue)x <-- (x + 0) */
15732 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
15736 /* 0 <-- (x - x) */
15737 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
15739 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
15742 /* Multiplication. */
15743 /* 0 <-- (x * 0) */
15744 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
15747 /* (nonlvalue)x <-- (x * 1) */
15748 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
15752 /* Verify that various binary operations on vectors are folded
15756 test_vector_folding ()
15758 tree inner_type
= integer_type_node
;
15759 tree type
= build_vector_type (inner_type
, 4);
15760 tree zero
= build_zero_cst (type
);
15761 tree one
= build_one_cst (type
);
15762 tree index
= build_index_vector (type
, 0, 1);
15764 /* Verify equality tests that return a scalar boolean result. */
15765 tree res_type
= boolean_type_node
;
15766 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
15767 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
15768 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
15769 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
15770 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, index
, one
)));
15771 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
15773 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
,
15775 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
15779 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15782 test_vec_duplicate_folding ()
15784 scalar_int_mode int_mode
= SCALAR_INT_TYPE_MODE (ssizetype
);
15785 machine_mode vec_mode
= targetm
.vectorize
.preferred_simd_mode (int_mode
);
15786 /* This will be 1 if VEC_MODE isn't a vector mode. */
15787 poly_uint64 nunits
= GET_MODE_NUNITS (vec_mode
);
15789 tree type
= build_vector_type (ssizetype
, nunits
);
15790 tree dup5_expr
= fold_unary (VEC_DUPLICATE_EXPR
, type
, ssize_int (5));
15791 tree dup5_cst
= build_vector_from_val (type
, ssize_int (5));
15792 ASSERT_TRUE (operand_equal_p (dup5_expr
, dup5_cst
, 0));
15795 /* Run all of the selftests within this file. */
15798 fold_const_c_tests ()
15800 test_arithmetic_folding ();
15801 test_vector_folding ();
15802 test_vec_duplicate_folding ();
15805 } // namespace selftest
15807 #endif /* CHECKING_P */