1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
56 #include "diagnostic-core.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
63 #include "tree-iterator.h"
66 #include "langhooks.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
76 #include "case-cfn-macros.h"
78 #ifndef LOAD_EXTEND_OP
79 #define LOAD_EXTEND_OP(M) UNKNOWN
82 /* Nonzero if we are folding constants inside an initializer; zero
84 int folding_initializer
= 0;
86 /* The following constants represent a bit based encoding of GCC's
87 comparison operators. This encoding simplifies transformations
88 on relational comparison operators, such as AND and OR. */
89 enum comparison_code
{
108 static bool negate_expr_p (tree
);
109 static tree
negate_expr (tree
);
110 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
111 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
112 static enum comparison_code
comparison_to_compcode (enum tree_code
);
113 static enum tree_code
compcode_to_comparison (enum comparison_code
);
114 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
115 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
116 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
117 static tree
make_bit_field_ref (location_t
, tree
, tree
,
118 HOST_WIDE_INT
, HOST_WIDE_INT
, int, int);
119 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
121 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
123 machine_mode
*, int *, int *, int *,
125 static int simple_operand_p (const_tree
);
126 static bool simple_operand_p_2 (tree
);
127 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
128 static tree
range_predecessor (tree
);
129 static tree
range_successor (tree
);
130 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
131 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
132 static tree
unextend (tree
, int, int, tree
);
133 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
135 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
136 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
137 static tree
fold_binary_op_with_conditional_arg (location_t
,
138 enum tree_code
, tree
,
141 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (const_tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
147 static tree
fold_view_convert_expr (tree
, tree
);
148 static bool vec_cst_ctor_to_array (tree
, tree
*);
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
155 expr_location_or (tree t
, location_t loc
)
157 location_t tloc
= EXPR_LOCATION (t
);
158 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
165 protected_set_expr_location_unshare (tree x
, location_t loc
)
167 if (CAN_HAVE_LOCATION_P (x
)
168 && EXPR_LOCATION (x
) != loc
169 && !(TREE_CODE (x
) == SAVE_EXPR
170 || TREE_CODE (x
) == TARGET_EXPR
171 || TREE_CODE (x
) == BIND_EXPR
))
174 SET_EXPR_LOCATION (x
, loc
);
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
184 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
188 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
190 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
204 static int fold_deferring_overflow_warnings
;
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
211 static const char* fold_deferred_overflow_warning
;
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
216 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
222 fold_defer_overflow_warnings (void)
224 ++fold_deferring_overflow_warnings
;
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
237 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
242 gcc_assert (fold_deferring_overflow_warnings
> 0);
243 --fold_deferring_overflow_warnings
;
244 if (fold_deferring_overflow_warnings
> 0)
246 if (fold_deferred_overflow_warning
!= NULL
248 && code
< (int) fold_deferred_overflow_code
)
249 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
253 warnmsg
= fold_deferred_overflow_warning
;
254 fold_deferred_overflow_warning
= NULL
;
256 if (!issue
|| warnmsg
== NULL
)
259 if (gimple_no_warning_p (stmt
))
262 /* Use the smallest code level when deciding to issue the
264 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
265 code
= fold_deferred_overflow_code
;
267 if (!issue_strict_overflow_warning (code
))
271 locus
= input_location
;
273 locus
= gimple_location (stmt
);
274 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
277 /* Stop deferring overflow warnings, ignoring any deferred
281 fold_undefer_and_ignore_overflow_warnings (void)
283 fold_undefer_overflow_warnings (false, NULL
, 0);
286 /* Whether we are deferring overflow warnings. */
289 fold_deferring_overflow_warnings_p (void)
291 return fold_deferring_overflow_warnings
> 0;
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
298 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
300 if (fold_deferring_overflow_warnings
> 0)
302 if (fold_deferred_overflow_warning
== NULL
303 || wc
< fold_deferred_overflow_code
)
305 fold_deferred_overflow_warning
= gmsgid
;
306 fold_deferred_overflow_code
= wc
;
309 else if (issue_strict_overflow_warning (wc
))
310 warning (OPT_Wstrict_overflow
, gmsgid
);
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
317 negate_mathfn_p (combined_fn fn
)
350 return !flag_rounding_math
;
358 /* Check whether we may negate an integer constant T without causing
362 may_negate_without_overflow_p (const_tree t
)
366 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
368 type
= TREE_TYPE (t
);
369 if (TYPE_UNSIGNED (type
))
372 return !wi::only_sign_bit_p (t
);
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
379 negate_expr_p (tree t
)
386 type
= TREE_TYPE (t
);
389 switch (TREE_CODE (t
))
392 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t
);
398 return (INTEGRAL_TYPE_P (type
)
399 && TYPE_OVERFLOW_WRAPS (type
));
405 return !TYPE_OVERFLOW_SANITIZED (type
);
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
413 return negate_expr_p (TREE_REALPART (t
))
414 && negate_expr_p (TREE_IMAGPART (t
));
418 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
421 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
423 for (i
= 0; i
< count
; i
++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
431 return negate_expr_p (TREE_OPERAND (t
, 0))
432 && negate_expr_p (TREE_OPERAND (t
, 1));
435 return negate_expr_p (TREE_OPERAND (t
, 0));
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
439 || HONOR_SIGNED_ZEROS (element_mode (type
))
440 || (INTEGRAL_TYPE_P (type
)
441 && ! TYPE_OVERFLOW_WRAPS (type
)))
443 /* -(A + B) -> (-B) - A. */
444 if (negate_expr_p (TREE_OPERAND (t
, 1))
445 && reorder_operands_p (TREE_OPERAND (t
, 0),
446 TREE_OPERAND (t
, 1)))
448 /* -(A + B) -> (-A) - B. */
449 return negate_expr_p (TREE_OPERAND (t
, 0));
452 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
453 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
454 && !HONOR_SIGNED_ZEROS (element_mode (type
))
455 && (! INTEGRAL_TYPE_P (type
)
456 || TYPE_OVERFLOW_WRAPS (type
))
457 && reorder_operands_p (TREE_OPERAND (t
, 0),
458 TREE_OPERAND (t
, 1));
461 if (TYPE_UNSIGNED (type
))
463 /* INT_MIN/n * n doesn't overflow while negating one operand it does
464 if n is a power of two. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
466 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
467 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
468 && ! integer_pow2p (TREE_OPERAND (t
, 0)))
469 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t
, 1)))))
476 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
477 return negate_expr_p (TREE_OPERAND (t
, 1))
478 || negate_expr_p (TREE_OPERAND (t
, 0));
484 if (TYPE_UNSIGNED (type
))
486 if (negate_expr_p (TREE_OPERAND (t
, 0)))
488 /* In general we can't negate B in A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. */
491 if (! INTEGRAL_TYPE_P (TREE_TYPE (t
))
492 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
493 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
494 && ! integer_onep (TREE_OPERAND (t
, 1))))
495 return negate_expr_p (TREE_OPERAND (t
, 1));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type
) == REAL_TYPE
)
502 tree tem
= strip_float_extensions (t
);
504 return negate_expr_p (tem
);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (get_call_combined_fn (t
)))
511 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
516 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
518 tree op1
= TREE_OPERAND (t
, 1);
519 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
530 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
531 simplification is possible.
532 If negate_expr_p would return true for T, NULL_TREE will never be
536 fold_negate_expr (location_t loc
, tree t
)
538 tree type
= TREE_TYPE (t
);
541 switch (TREE_CODE (t
))
543 /* Convert - (~A) to A + 1. */
545 if (INTEGRAL_TYPE_P (type
))
546 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
547 build_one_cst (type
));
551 tem
= fold_negate_const (t
, type
);
552 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
553 || (ANY_INTEGRAL_TYPE_P (type
)
554 && !TYPE_OVERFLOW_TRAPS (type
)
555 && TYPE_OVERFLOW_WRAPS (type
))
556 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
561 tem
= fold_negate_const (t
, type
);
565 tem
= fold_negate_const (t
, type
);
570 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
571 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
573 return build_complex (type
, rpart
, ipart
);
579 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
580 tree
*elts
= XALLOCAVEC (tree
, count
);
582 for (i
= 0; i
< count
; i
++)
584 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
585 if (elts
[i
] == NULL_TREE
)
589 return build_vector (type
, elts
);
593 if (negate_expr_p (t
))
594 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
595 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
596 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
600 if (negate_expr_p (t
))
601 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
602 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
606 if (!TYPE_OVERFLOW_SANITIZED (type
))
607 return TREE_OPERAND (t
, 0);
611 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
612 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
614 /* -(A + B) -> (-B) - A. */
615 if (negate_expr_p (TREE_OPERAND (t
, 1))
616 && reorder_operands_p (TREE_OPERAND (t
, 0),
617 TREE_OPERAND (t
, 1)))
619 tem
= negate_expr (TREE_OPERAND (t
, 1));
620 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
621 tem
, TREE_OPERAND (t
, 0));
624 /* -(A + B) -> (-A) - B. */
625 if (negate_expr_p (TREE_OPERAND (t
, 0)))
627 tem
= negate_expr (TREE_OPERAND (t
, 0));
628 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
629 tem
, TREE_OPERAND (t
, 1));
635 /* - (A - B) -> B - A */
636 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
637 && !HONOR_SIGNED_ZEROS (element_mode (type
))
638 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
639 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
640 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
644 if (TYPE_UNSIGNED (type
))
650 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
652 tem
= TREE_OPERAND (t
, 1);
653 if (negate_expr_p (tem
))
654 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
655 TREE_OPERAND (t
, 0), negate_expr (tem
));
656 tem
= TREE_OPERAND (t
, 0);
657 if (negate_expr_p (tem
))
658 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
659 negate_expr (tem
), TREE_OPERAND (t
, 1));
666 if (TYPE_UNSIGNED (type
))
668 if (negate_expr_p (TREE_OPERAND (t
, 0)))
669 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
670 negate_expr (TREE_OPERAND (t
, 0)),
671 TREE_OPERAND (t
, 1));
672 /* In general we can't negate B in A / B, because if A is INT_MIN and
673 B is 1, we may turn this into INT_MIN / -1 which is undefined
674 and actually traps on some architectures. */
675 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t
))
676 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
677 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
678 && ! integer_onep (TREE_OPERAND (t
, 1))))
679 && negate_expr_p (TREE_OPERAND (t
, 1)))
680 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
682 negate_expr (TREE_OPERAND (t
, 1)));
686 /* Convert -((double)float) into (double)(-float). */
687 if (TREE_CODE (type
) == REAL_TYPE
)
689 tem
= strip_float_extensions (t
);
690 if (tem
!= t
&& negate_expr_p (tem
))
691 return fold_convert_loc (loc
, type
, negate_expr (tem
));
696 /* Negate -f(x) as f(-x). */
697 if (negate_mathfn_p (get_call_combined_fn (t
))
698 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
702 fndecl
= get_callee_fndecl (t
);
703 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
704 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
709 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
710 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
712 tree op1
= TREE_OPERAND (t
, 1);
713 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
715 tree ntype
= TYPE_UNSIGNED (type
)
716 ? signed_type_for (type
)
717 : unsigned_type_for (type
);
718 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
719 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
720 return fold_convert_loc (loc
, type
, temp
);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
745 loc
= EXPR_LOCATION (t
);
746 type
= TREE_TYPE (t
);
749 tem
= fold_negate_expr (loc
, t
);
751 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
752 return fold_convert_loc (loc
, type
, tem
);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
776 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
777 tree
*minus_litp
, int negate_p
)
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in
);
788 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
789 || TREE_CODE (in
) == FIXED_CST
)
791 else if (TREE_CODE (in
) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
799 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
801 tree op0
= TREE_OPERAND (in
, 0);
802 tree op1
= TREE_OPERAND (in
, 1);
803 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
804 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
808 || TREE_CODE (op0
) == FIXED_CST
)
809 *litp
= op0
, op0
= 0;
810 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
811 || TREE_CODE (op1
) == FIXED_CST
)
812 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
814 if (op0
!= 0 && TREE_CONSTANT (op0
))
815 *conp
= op0
, op0
= 0;
816 else if (op1
!= 0 && TREE_CONSTANT (op1
))
817 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0
!= 0 && op1
!= 0)
826 var
= op1
, neg_var_p
= neg1_p
;
828 /* Now do any needed negations. */
830 *minus_litp
= *litp
, *litp
= 0;
832 *conp
= negate_expr (*conp
);
834 var
= negate_expr (var
);
836 else if (TREE_CODE (in
) == BIT_NOT_EXPR
837 && code
== PLUS_EXPR
)
839 /* -X - 1 is folded to ~X, undo that here. */
840 *minus_litp
= build_one_cst (TREE_TYPE (in
));
841 var
= negate_expr (TREE_OPERAND (in
, 0));
843 else if (TREE_CONSTANT (in
))
851 *minus_litp
= *litp
, *litp
= 0;
852 else if (*minus_litp
)
853 *litp
= *minus_litp
, *minus_litp
= 0;
854 *conp
= negate_expr (*conp
);
855 var
= negate_expr (var
);
861 /* Re-associate trees split by the above function. T1 and T2 are
862 either expressions to associate or null. Return the new
863 expression, if any. LOC is the location of the new expression. If
864 we build an operation, do it in TYPE and with CODE. */
867 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
874 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
875 try to fold this since we will have infinite recursion. But do
876 deal with any NEGATE_EXPRs. */
877 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
878 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
880 if (code
== PLUS_EXPR
)
882 if (TREE_CODE (t1
) == NEGATE_EXPR
)
883 return build2_loc (loc
, MINUS_EXPR
, type
,
884 fold_convert_loc (loc
, type
, t2
),
885 fold_convert_loc (loc
, type
,
886 TREE_OPERAND (t1
, 0)));
887 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
888 return build2_loc (loc
, MINUS_EXPR
, type
,
889 fold_convert_loc (loc
, type
, t1
),
890 fold_convert_loc (loc
, type
,
891 TREE_OPERAND (t2
, 0)));
892 else if (integer_zerop (t2
))
893 return fold_convert_loc (loc
, type
, t1
);
895 else if (code
== MINUS_EXPR
)
897 if (integer_zerop (t2
))
898 return fold_convert_loc (loc
, type
, t1
);
901 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
, t2
));
905 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
906 fold_convert_loc (loc
, type
, t2
));
909 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
910 for use in int_const_binop, size_binop and size_diffop. */
913 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
915 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
917 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
932 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
933 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
934 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
938 /* Combine two integer constants ARG1 and ARG2 under operation CODE
939 to produce a new constant. Return NULL_TREE if we don't know how
940 to evaluate CODE at compile-time. */
943 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
948 tree type
= TREE_TYPE (arg1
);
949 signop sign
= TYPE_SIGN (type
);
950 bool overflow
= false;
952 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
953 TYPE_SIGN (TREE_TYPE (parg2
)));
958 res
= wi::bit_or (arg1
, arg2
);
962 res
= wi::bit_xor (arg1
, arg2
);
966 res
= wi::bit_and (arg1
, arg2
);
971 if (wi::neg_p (arg2
))
974 if (code
== RSHIFT_EXPR
)
980 if (code
== RSHIFT_EXPR
)
981 /* It's unclear from the C standard whether shifts can overflow.
982 The following code ignores overflow; perhaps a C standard
983 interpretation ruling is needed. */
984 res
= wi::rshift (arg1
, arg2
, sign
);
986 res
= wi::lshift (arg1
, arg2
);
991 if (wi::neg_p (arg2
))
994 if (code
== RROTATE_EXPR
)
1000 if (code
== RROTATE_EXPR
)
1001 res
= wi::rrotate (arg1
, arg2
);
1003 res
= wi::lrotate (arg1
, arg2
);
1007 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1011 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1015 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1018 case MULT_HIGHPART_EXPR
:
1019 res
= wi::mul_high (arg1
, arg2
, sign
);
1022 case TRUNC_DIV_EXPR
:
1023 case EXACT_DIV_EXPR
:
1026 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1029 case FLOOR_DIV_EXPR
:
1032 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1038 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1041 case ROUND_DIV_EXPR
:
1044 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1047 case TRUNC_MOD_EXPR
:
1050 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1053 case FLOOR_MOD_EXPR
:
1056 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1062 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1065 case ROUND_MOD_EXPR
:
1068 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1072 res
= wi::min (arg1
, arg2
, sign
);
1076 res
= wi::max (arg1
, arg2
, sign
);
1083 t
= force_fit_type (type
, res
, overflowable
,
1084 (((sign
== SIGNED
|| overflowable
== -1)
1086 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1092 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1094 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1103 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1105 /* Sanity check for the recursive cases. */
1112 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1114 if (code
== POINTER_PLUS_EXPR
)
1115 return int_const_binop (PLUS_EXPR
,
1116 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1118 return int_const_binop (code
, arg1
, arg2
);
1121 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1126 REAL_VALUE_TYPE value
;
1127 REAL_VALUE_TYPE result
;
1131 /* The following codes are handled by real_arithmetic. */
1146 d1
= TREE_REAL_CST (arg1
);
1147 d2
= TREE_REAL_CST (arg2
);
1149 type
= TREE_TYPE (arg1
);
1150 mode
= TYPE_MODE (type
);
1152 /* Don't perform operation if we honor signaling NaNs and
1153 either operand is a NaN. */
1154 if (HONOR_SNANS (mode
)
1155 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1158 /* Don't perform operation if it would raise a division
1159 by zero exception. */
1160 if (code
== RDIV_EXPR
1161 && real_equal (&d2
, &dconst0
)
1162 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1165 /* If either operand is a NaN, just return it. Otherwise, set up
1166 for floating-point trap; we return an overflow. */
1167 if (REAL_VALUE_ISNAN (d1
))
1169 else if (REAL_VALUE_ISNAN (d2
))
1172 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1173 real_convert (&result
, mode
, &value
);
1175 /* Don't constant fold this floating point operation if
1176 the result has overflowed and flag_trapping_math. */
1177 if (flag_trapping_math
1178 && MODE_HAS_INFINITIES (mode
)
1179 && REAL_VALUE_ISINF (result
)
1180 && !REAL_VALUE_ISINF (d1
)
1181 && !REAL_VALUE_ISINF (d2
))
1184 /* Don't constant fold this floating point operation if the
1185 result may dependent upon the run-time rounding mode and
1186 flag_rounding_math is set, or if GCC's software emulation
1187 is unable to accurately represent the result. */
1188 if ((flag_rounding_math
1189 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1190 && (inexact
|| !real_identical (&result
, &value
)))
1193 t
= build_real (type
, result
);
1195 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1199 if (TREE_CODE (arg1
) == FIXED_CST
)
1201 FIXED_VALUE_TYPE f1
;
1202 FIXED_VALUE_TYPE f2
;
1203 FIXED_VALUE_TYPE result
;
1208 /* The following codes are handled by fixed_arithmetic. */
1214 case TRUNC_DIV_EXPR
:
1215 if (TREE_CODE (arg2
) != FIXED_CST
)
1217 f2
= TREE_FIXED_CST (arg2
);
1223 if (TREE_CODE (arg2
) != INTEGER_CST
)
1226 f2
.data
.high
= w2
.elt (1);
1227 f2
.data
.low
= w2
.elt (0);
1236 f1
= TREE_FIXED_CST (arg1
);
1237 type
= TREE_TYPE (arg1
);
1238 sat_p
= TYPE_SATURATING (type
);
1239 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1240 t
= build_fixed (type
, result
);
1241 /* Propagate overflow flags. */
1242 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1243 TREE_OVERFLOW (t
) = 1;
1247 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1249 tree type
= TREE_TYPE (arg1
);
1250 tree r1
= TREE_REALPART (arg1
);
1251 tree i1
= TREE_IMAGPART (arg1
);
1252 tree r2
= TREE_REALPART (arg2
);
1253 tree i2
= TREE_IMAGPART (arg2
);
1260 real
= const_binop (code
, r1
, r2
);
1261 imag
= const_binop (code
, i1
, i2
);
1265 if (COMPLEX_FLOAT_TYPE_P (type
))
1266 return do_mpc_arg2 (arg1
, arg2
, type
,
1267 /* do_nonfinite= */ folding_initializer
,
1270 real
= const_binop (MINUS_EXPR
,
1271 const_binop (MULT_EXPR
, r1
, r2
),
1272 const_binop (MULT_EXPR
, i1
, i2
));
1273 imag
= const_binop (PLUS_EXPR
,
1274 const_binop (MULT_EXPR
, r1
, i2
),
1275 const_binop (MULT_EXPR
, i1
, r2
));
1279 if (COMPLEX_FLOAT_TYPE_P (type
))
1280 return do_mpc_arg2 (arg1
, arg2
, type
,
1281 /* do_nonfinite= */ folding_initializer
,
1284 case TRUNC_DIV_EXPR
:
1286 case FLOOR_DIV_EXPR
:
1287 case ROUND_DIV_EXPR
:
1288 if (flag_complex_method
== 0)
1290 /* Keep this algorithm in sync with
1291 tree-complex.c:expand_complex_div_straight().
1293 Expand complex division to scalars, straightforward algorithm.
1294 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1298 = const_binop (PLUS_EXPR
,
1299 const_binop (MULT_EXPR
, r2
, r2
),
1300 const_binop (MULT_EXPR
, i2
, i2
));
1302 = const_binop (PLUS_EXPR
,
1303 const_binop (MULT_EXPR
, r1
, r2
),
1304 const_binop (MULT_EXPR
, i1
, i2
));
1306 = const_binop (MINUS_EXPR
,
1307 const_binop (MULT_EXPR
, i1
, r2
),
1308 const_binop (MULT_EXPR
, r1
, i2
));
1310 real
= const_binop (code
, t1
, magsquared
);
1311 imag
= const_binop (code
, t2
, magsquared
);
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_wide().
1318 Expand complex division to scalars, modified algorithm to minimize
1319 overflow with wide input ranges. */
1320 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1321 fold_abs_const (r2
, TREE_TYPE (type
)),
1322 fold_abs_const (i2
, TREE_TYPE (type
)));
1324 if (integer_nonzerop (compare
))
1326 /* In the TRUE branch, we compute
1328 div = (br * ratio) + bi;
1329 tr = (ar * ratio) + ai;
1330 ti = (ai * ratio) - ar;
1333 tree ratio
= const_binop (code
, r2
, i2
);
1334 tree div
= const_binop (PLUS_EXPR
, i2
,
1335 const_binop (MULT_EXPR
, r2
, ratio
));
1336 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1337 real
= const_binop (PLUS_EXPR
, real
, i1
);
1338 real
= const_binop (code
, real
, div
);
1340 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1341 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1342 imag
= const_binop (code
, imag
, div
);
1346 /* In the FALSE branch, we compute
1348 divisor = (d * ratio) + c;
1349 tr = (b * ratio) + a;
1350 ti = b - (a * ratio);
1353 tree ratio
= const_binop (code
, i2
, r2
);
1354 tree div
= const_binop (PLUS_EXPR
, r2
,
1355 const_binop (MULT_EXPR
, i2
, ratio
));
1357 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1358 real
= const_binop (PLUS_EXPR
, real
, r1
);
1359 real
= const_binop (code
, real
, div
);
1361 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1362 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1363 imag
= const_binop (code
, imag
, div
);
1373 return build_complex (type
, real
, imag
);
1376 if (TREE_CODE (arg1
) == VECTOR_CST
1377 && TREE_CODE (arg2
) == VECTOR_CST
)
1379 tree type
= TREE_TYPE (arg1
);
1380 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1381 tree
*elts
= XALLOCAVEC (tree
, count
);
1383 for (i
= 0; i
< count
; i
++)
1385 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1386 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1388 elts
[i
] = const_binop (code
, elem1
, elem2
);
1390 /* It is possible that const_binop cannot handle the given
1391 code and return NULL_TREE */
1392 if (elts
[i
] == NULL_TREE
)
1396 return build_vector (type
, elts
);
1399 /* Shifts allow a scalar offset for a vector. */
1400 if (TREE_CODE (arg1
) == VECTOR_CST
1401 && TREE_CODE (arg2
) == INTEGER_CST
)
1403 tree type
= TREE_TYPE (arg1
);
1404 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1405 tree
*elts
= XALLOCAVEC (tree
, count
);
1407 for (i
= 0; i
< count
; i
++)
1409 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1411 elts
[i
] = const_binop (code
, elem1
, arg2
);
1413 /* It is possible that const_binop cannot handle the given
1414 code and return NULL_TREE. */
1415 if (elts
[i
] == NULL_TREE
)
1419 return build_vector (type
, elts
);
1424 /* Overload that adds a TYPE parameter to be able to dispatch
1425 to fold_relational_const. */
1428 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1430 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1431 return fold_relational_const (code
, type
, arg1
, arg2
);
1433 /* ??? Until we make the const_binop worker take the type of the
1434 result as argument put those cases that need it here. */
1438 if ((TREE_CODE (arg1
) == REAL_CST
1439 && TREE_CODE (arg2
) == REAL_CST
)
1440 || (TREE_CODE (arg1
) == INTEGER_CST
1441 && TREE_CODE (arg2
) == INTEGER_CST
))
1442 return build_complex (type
, arg1
, arg2
);
1445 case VEC_PACK_TRUNC_EXPR
:
1446 case VEC_PACK_FIX_TRUNC_EXPR
:
1448 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1451 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1452 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1453 if (TREE_CODE (arg1
) != VECTOR_CST
1454 || TREE_CODE (arg2
) != VECTOR_CST
)
1457 elts
= XALLOCAVEC (tree
, nelts
);
1458 if (!vec_cst_ctor_to_array (arg1
, elts
)
1459 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1462 for (i
= 0; i
< nelts
; i
++)
1464 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1465 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1466 TREE_TYPE (type
), elts
[i
]);
1467 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1471 return build_vector (type
, elts
);
1474 case VEC_WIDEN_MULT_LO_EXPR
:
1475 case VEC_WIDEN_MULT_HI_EXPR
:
1476 case VEC_WIDEN_MULT_EVEN_EXPR
:
1477 case VEC_WIDEN_MULT_ODD_EXPR
:
1479 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1480 unsigned int out
, ofs
, scale
;
1483 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1484 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1485 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1488 elts
= XALLOCAVEC (tree
, nelts
* 4);
1489 if (!vec_cst_ctor_to_array (arg1
, elts
)
1490 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1493 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1494 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1495 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1496 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1497 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1499 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1502 for (out
= 0; out
< nelts
; out
++)
1504 unsigned int in1
= (out
<< scale
) + ofs
;
1505 unsigned int in2
= in1
+ nelts
* 2;
1508 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1509 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1511 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1513 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1514 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1518 return build_vector (type
, elts
);
1524 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1527 /* Make sure type and arg0 have the same saturating flag. */
1528 gcc_checking_assert (TYPE_SATURATING (type
)
1529 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1531 return const_binop (code
, arg1
, arg2
);
1534 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1535 Return zero if computing the constants is not possible. */
1538 const_unop (enum tree_code code
, tree type
, tree arg0
)
1544 case FIX_TRUNC_EXPR
:
1545 case FIXED_CONVERT_EXPR
:
1546 return fold_convert_const (code
, type
, arg0
);
1548 case ADDR_SPACE_CONVERT_EXPR
:
1549 /* If the source address is 0, and the source address space
1550 cannot have a valid object at 0, fold to dest type null. */
1551 if (integer_zerop (arg0
)
1552 && !(targetm
.addr_space
.zero_address_valid
1553 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1554 return fold_convert_const (code
, type
, arg0
);
1557 case VIEW_CONVERT_EXPR
:
1558 return fold_view_convert_expr (type
, arg0
);
1562 /* Can't call fold_negate_const directly here as that doesn't
1563 handle all cases and we might not be able to negate some
1565 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1566 if (tem
&& CONSTANT_CLASS_P (tem
))
1572 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1573 return fold_abs_const (arg0
, type
);
1577 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1579 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1581 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1586 if (TREE_CODE (arg0
) == INTEGER_CST
)
1587 return fold_not_const (arg0
, type
);
1588 /* Perform BIT_NOT_EXPR on each element individually. */
1589 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1593 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1595 elements
= XALLOCAVEC (tree
, count
);
1596 for (i
= 0; i
< count
; i
++)
1598 elem
= VECTOR_CST_ELT (arg0
, i
);
1599 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1600 if (elem
== NULL_TREE
)
1605 return build_vector (type
, elements
);
1609 case TRUTH_NOT_EXPR
:
1610 if (TREE_CODE (arg0
) == INTEGER_CST
)
1611 return constant_boolean_node (integer_zerop (arg0
), type
);
1615 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1616 return fold_convert (type
, TREE_REALPART (arg0
));
1620 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1621 return fold_convert (type
, TREE_IMAGPART (arg0
));
1624 case VEC_UNPACK_LO_EXPR
:
1625 case VEC_UNPACK_HI_EXPR
:
1626 case VEC_UNPACK_FLOAT_LO_EXPR
:
1627 case VEC_UNPACK_FLOAT_HI_EXPR
:
1629 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1631 enum tree_code subcode
;
1633 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1634 if (TREE_CODE (arg0
) != VECTOR_CST
)
1637 elts
= XALLOCAVEC (tree
, nelts
* 2);
1638 if (!vec_cst_ctor_to_array (arg0
, elts
))
1641 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1642 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1645 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1648 subcode
= FLOAT_EXPR
;
1650 for (i
= 0; i
< nelts
; i
++)
1652 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1653 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1657 return build_vector (type
, elts
);
1660 case REDUC_MIN_EXPR
:
1661 case REDUC_MAX_EXPR
:
1662 case REDUC_PLUS_EXPR
:
1664 unsigned int nelts
, i
;
1666 enum tree_code subcode
;
1668 if (TREE_CODE (arg0
) != VECTOR_CST
)
1670 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1672 elts
= XALLOCAVEC (tree
, nelts
);
1673 if (!vec_cst_ctor_to_array (arg0
, elts
))
1678 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1679 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1680 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1681 default: gcc_unreachable ();
1684 for (i
= 1; i
< nelts
; i
++)
1686 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1687 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1701 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1702 indicates which particular sizetype to create. */
1705 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1707 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1710 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1711 is a tree code. The type of the result is taken from the operands.
1712 Both must be equivalent integer types, ala int_binop_types_match_p.
1713 If the operands are constant, so is the result. */
1716 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1718 tree type
= TREE_TYPE (arg0
);
1720 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1721 return error_mark_node
;
1723 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1726 /* Handle the special case of two integer constants faster. */
1727 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1729 /* And some specific cases even faster than that. */
1730 if (code
== PLUS_EXPR
)
1732 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1734 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1737 else if (code
== MINUS_EXPR
)
1739 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1742 else if (code
== MULT_EXPR
)
1744 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1748 /* Handle general case of two integer constants. For sizetype
1749 constant calculations we always want to know about overflow,
1750 even in the unsigned case. */
1751 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1754 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1757 /* Given two values, either both of sizetype or both of bitsizetype,
1758 compute the difference between the two values. Return the value
1759 in signed type corresponding to the type of the operands. */
1762 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1764 tree type
= TREE_TYPE (arg0
);
1767 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1770 /* If the type is already signed, just do the simple thing. */
1771 if (!TYPE_UNSIGNED (type
))
1772 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1774 if (type
== sizetype
)
1776 else if (type
== bitsizetype
)
1777 ctype
= sbitsizetype
;
1779 ctype
= signed_type_for (type
);
1781 /* If either operand is not a constant, do the conversions to the signed
1782 type and subtract. The hardware will do the right thing with any
1783 overflow in the subtraction. */
1784 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1785 return size_binop_loc (loc
, MINUS_EXPR
,
1786 fold_convert_loc (loc
, ctype
, arg0
),
1787 fold_convert_loc (loc
, ctype
, arg1
));
1789 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1790 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1791 overflow) and negate (which can't either). Special-case a result
1792 of zero while we're here. */
1793 if (tree_int_cst_equal (arg0
, arg1
))
1794 return build_int_cst (ctype
, 0);
1795 else if (tree_int_cst_lt (arg1
, arg0
))
1796 return fold_convert_loc (loc
, ctype
,
1797 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1799 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1800 fold_convert_loc (loc
, ctype
,
1801 size_binop_loc (loc
,
1806 /* A subroutine of fold_convert_const handling conversions of an
1807 INTEGER_CST to another integer type. */
1810 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1812 /* Given an integer constant, make new constant with new type,
1813 appropriately sign-extended or truncated. Use widest_int
1814 so that any extension is done according ARG1's type. */
1815 return force_fit_type (type
, wi::to_widest (arg1
),
1816 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1817 TREE_OVERFLOW (arg1
));
1820 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1821 to an integer type. */
1824 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1826 bool overflow
= false;
1829 /* The following code implements the floating point to integer
1830 conversion rules required by the Java Language Specification,
1831 that IEEE NaNs are mapped to zero and values that overflow
1832 the target precision saturate, i.e. values greater than
1833 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1834 are mapped to INT_MIN. These semantics are allowed by the
1835 C and C++ standards that simply state that the behavior of
1836 FP-to-integer conversion is unspecified upon overflow. */
1840 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1844 case FIX_TRUNC_EXPR
:
1845 real_trunc (&r
, VOIDmode
, &x
);
1852 /* If R is NaN, return zero and show we have an overflow. */
1853 if (REAL_VALUE_ISNAN (r
))
1856 val
= wi::zero (TYPE_PRECISION (type
));
1859 /* See if R is less than the lower bound or greater than the
1864 tree lt
= TYPE_MIN_VALUE (type
);
1865 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1866 if (real_less (&r
, &l
))
1875 tree ut
= TYPE_MAX_VALUE (type
);
1878 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1879 if (real_less (&u
, &r
))
1888 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1890 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1894 /* A subroutine of fold_convert_const handling conversions of a
1895 FIXED_CST to an integer type. */
1898 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1901 double_int temp
, temp_trunc
;
1904 /* Right shift FIXED_CST to temp by fbit. */
1905 temp
= TREE_FIXED_CST (arg1
).data
;
1906 mode
= TREE_FIXED_CST (arg1
).mode
;
1907 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1909 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1910 HOST_BITS_PER_DOUBLE_INT
,
1911 SIGNED_FIXED_POINT_MODE_P (mode
));
1913 /* Left shift temp to temp_trunc by fbit. */
1914 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1915 HOST_BITS_PER_DOUBLE_INT
,
1916 SIGNED_FIXED_POINT_MODE_P (mode
));
1920 temp
= double_int_zero
;
1921 temp_trunc
= double_int_zero
;
1924 /* If FIXED_CST is negative, we need to round the value toward 0.
1925 By checking if the fractional bits are not zero to add 1 to temp. */
1926 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1927 && temp_trunc
.is_negative ()
1928 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1929 temp
+= double_int_one
;
1931 /* Given a fixed-point constant, make new constant with new type,
1932 appropriately sign-extended or truncated. */
1933 t
= force_fit_type (type
, temp
, -1,
1934 (temp
.is_negative ()
1935 && (TYPE_UNSIGNED (type
)
1936 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1937 | TREE_OVERFLOW (arg1
));
1942 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1943 to another floating point type. */
1946 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1948 REAL_VALUE_TYPE value
;
1951 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1952 t
= build_real (type
, value
);
1954 /* If converting an infinity or NAN to a representation that doesn't
1955 have one, set the overflow bit so that we can produce some kind of
1956 error message at the appropriate point if necessary. It's not the
1957 most user-friendly message, but it's better than nothing. */
1958 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1959 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1960 TREE_OVERFLOW (t
) = 1;
1961 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1962 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1963 TREE_OVERFLOW (t
) = 1;
1964 /* Regular overflow, conversion produced an infinity in a mode that
1965 can't represent them. */
1966 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1967 && REAL_VALUE_ISINF (value
)
1968 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1969 TREE_OVERFLOW (t
) = 1;
1971 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1975 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1976 to a floating point type. */
1979 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1981 REAL_VALUE_TYPE value
;
1984 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1985 t
= build_real (type
, value
);
1987 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to another fixed-point type. */
1995 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1997 FIXED_VALUE_TYPE value
;
2001 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2002 TYPE_SATURATING (type
));
2003 t
= build_fixed (type
, value
);
2005 /* Propagate overflow flags. */
2006 if (overflow_p
| TREE_OVERFLOW (arg1
))
2007 TREE_OVERFLOW (t
) = 1;
2011 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2012 to a fixed-point type. */
2015 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2017 FIXED_VALUE_TYPE value
;
2022 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2024 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2025 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2026 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2028 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2030 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2031 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2032 TYPE_SATURATING (type
));
2033 t
= build_fixed (type
, value
);
2035 /* Propagate overflow flags. */
2036 if (overflow_p
| TREE_OVERFLOW (arg1
))
2037 TREE_OVERFLOW (t
) = 1;
2041 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2042 to a fixed-point type. */
2045 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2047 FIXED_VALUE_TYPE value
;
2051 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2052 &TREE_REAL_CST (arg1
),
2053 TYPE_SATURATING (type
));
2054 t
= build_fixed (type
, value
);
2056 /* Propagate overflow flags. */
2057 if (overflow_p
| TREE_OVERFLOW (arg1
))
2058 TREE_OVERFLOW (t
) = 1;
2062 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2063 type TYPE. If no simplification can be done return NULL_TREE. */
2066 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2068 if (TREE_TYPE (arg1
) == type
)
2071 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2072 || TREE_CODE (type
) == OFFSET_TYPE
)
2074 if (TREE_CODE (arg1
) == INTEGER_CST
)
2075 return fold_convert_const_int_from_int (type
, arg1
);
2076 else if (TREE_CODE (arg1
) == REAL_CST
)
2077 return fold_convert_const_int_from_real (code
, type
, arg1
);
2078 else if (TREE_CODE (arg1
) == FIXED_CST
)
2079 return fold_convert_const_int_from_fixed (type
, arg1
);
2081 else if (TREE_CODE (type
) == REAL_TYPE
)
2083 if (TREE_CODE (arg1
) == INTEGER_CST
)
2084 return build_real_from_int_cst (type
, arg1
);
2085 else if (TREE_CODE (arg1
) == REAL_CST
)
2086 return fold_convert_const_real_from_real (type
, arg1
);
2087 else if (TREE_CODE (arg1
) == FIXED_CST
)
2088 return fold_convert_const_real_from_fixed (type
, arg1
);
2090 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2092 if (TREE_CODE (arg1
) == FIXED_CST
)
2093 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2094 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2095 return fold_convert_const_fixed_from_int (type
, arg1
);
2096 else if (TREE_CODE (arg1
) == REAL_CST
)
2097 return fold_convert_const_fixed_from_real (type
, arg1
);
2099 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2101 if (TREE_CODE (arg1
) == VECTOR_CST
2102 && TYPE_VECTOR_SUBPARTS (type
) == VECTOR_CST_NELTS (arg1
))
2104 int len
= TYPE_VECTOR_SUBPARTS (type
);
2105 tree elttype
= TREE_TYPE (type
);
2106 tree
*v
= XALLOCAVEC (tree
, len
);
2107 for (int i
= 0; i
< len
; ++i
)
2109 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2110 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2111 if (cvt
== NULL_TREE
)
2115 return build_vector (type
, v
);
2121 /* Construct a vector of zero elements of vector type TYPE. */
2124 build_zero_vector (tree type
)
2128 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2129 return build_vector_from_val (type
, t
);
2132 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2135 fold_convertible_p (const_tree type
, const_tree arg
)
2137 tree orig
= TREE_TYPE (arg
);
2142 if (TREE_CODE (arg
) == ERROR_MARK
2143 || TREE_CODE (type
) == ERROR_MARK
2144 || TREE_CODE (orig
) == ERROR_MARK
)
2147 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2150 switch (TREE_CODE (type
))
2152 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2153 case POINTER_TYPE
: case REFERENCE_TYPE
:
2155 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2156 || TREE_CODE (orig
) == OFFSET_TYPE
)
2158 return (TREE_CODE (orig
) == VECTOR_TYPE
2159 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2162 case FIXED_POINT_TYPE
:
2166 return TREE_CODE (type
) == TREE_CODE (orig
);
2173 /* Convert expression ARG to type TYPE. Used by the middle-end for
2174 simple conversions in preference to calling the front-end's convert. */
2177 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2179 tree orig
= TREE_TYPE (arg
);
2185 if (TREE_CODE (arg
) == ERROR_MARK
2186 || TREE_CODE (type
) == ERROR_MARK
2187 || TREE_CODE (orig
) == ERROR_MARK
)
2188 return error_mark_node
;
2190 switch (TREE_CODE (type
))
2193 case REFERENCE_TYPE
:
2194 /* Handle conversions between pointers to different address spaces. */
2195 if (POINTER_TYPE_P (orig
)
2196 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2197 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2198 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2201 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2203 if (TREE_CODE (arg
) == INTEGER_CST
)
2205 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2206 if (tem
!= NULL_TREE
)
2209 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2210 || TREE_CODE (orig
) == OFFSET_TYPE
)
2211 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2212 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2213 return fold_convert_loc (loc
, type
,
2214 fold_build1_loc (loc
, REALPART_EXPR
,
2215 TREE_TYPE (orig
), arg
));
2216 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2217 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2218 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2221 if (TREE_CODE (arg
) == INTEGER_CST
)
2223 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2224 if (tem
!= NULL_TREE
)
2227 else if (TREE_CODE (arg
) == REAL_CST
)
2229 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2230 if (tem
!= NULL_TREE
)
2233 else if (TREE_CODE (arg
) == FIXED_CST
)
2235 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2236 if (tem
!= NULL_TREE
)
2240 switch (TREE_CODE (orig
))
2243 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2244 case POINTER_TYPE
: case REFERENCE_TYPE
:
2245 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2248 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2250 case FIXED_POINT_TYPE
:
2251 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2254 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2255 return fold_convert_loc (loc
, type
, tem
);
2261 case FIXED_POINT_TYPE
:
2262 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2263 || TREE_CODE (arg
) == REAL_CST
)
2265 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2266 if (tem
!= NULL_TREE
)
2267 goto fold_convert_exit
;
2270 switch (TREE_CODE (orig
))
2272 case FIXED_POINT_TYPE
:
2277 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2280 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2281 return fold_convert_loc (loc
, type
, tem
);
2288 switch (TREE_CODE (orig
))
2291 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2292 case POINTER_TYPE
: case REFERENCE_TYPE
:
2294 case FIXED_POINT_TYPE
:
2295 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2296 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2297 fold_convert_loc (loc
, TREE_TYPE (type
),
2298 integer_zero_node
));
2303 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2305 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2306 TREE_OPERAND (arg
, 0));
2307 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2308 TREE_OPERAND (arg
, 1));
2309 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2312 arg
= save_expr (arg
);
2313 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2314 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2315 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2316 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2317 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2325 if (integer_zerop (arg
))
2326 return build_zero_vector (type
);
2327 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2328 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2329 || TREE_CODE (orig
) == VECTOR_TYPE
);
2330 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2333 tem
= fold_ignored_result (arg
);
2334 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2337 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2338 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2342 protected_set_expr_location_unshare (tem
, loc
);
2346 /* Return false if expr can be assumed not to be an lvalue, true
2350 maybe_lvalue_p (const_tree x
)
2352 /* We only need to wrap lvalue tree codes. */
2353 switch (TREE_CODE (x
))
2366 case ARRAY_RANGE_REF
:
2372 case PREINCREMENT_EXPR
:
2373 case PREDECREMENT_EXPR
:
2375 case TRY_CATCH_EXPR
:
2376 case WITH_CLEANUP_EXPR
:
2385 /* Assume the worst for front-end tree codes. */
2386 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2394 /* Return an expr equal to X but certainly not valid as an lvalue. */
2397 non_lvalue_loc (location_t loc
, tree x
)
2399 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2404 if (! maybe_lvalue_p (x
))
2406 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2409 /* When pedantic, return an expr equal to X but certainly not valid as a
2410 pedantic lvalue. Otherwise, return X. */
2413 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2415 return protected_set_expr_location_unshare (x
, loc
);
2418 /* Given a tree comparison code, return the code that is the logical inverse.
2419 It is generally not safe to do this for floating-point comparisons, except
2420 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2421 ERROR_MARK in this case. */
2424 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2426 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2427 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2437 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2439 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2441 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2443 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2457 return UNORDERED_EXPR
;
2458 case UNORDERED_EXPR
:
2459 return ORDERED_EXPR
;
2465 /* Similar, but return the comparison that results if the operands are
2466 swapped. This is safe for floating-point. */
2469 swap_tree_comparison (enum tree_code code
)
2476 case UNORDERED_EXPR
:
2502 /* Convert a comparison tree code from an enum tree_code representation
2503 into a compcode bit-based encoding. This function is the inverse of
2504 compcode_to_comparison. */
2506 static enum comparison_code
2507 comparison_to_compcode (enum tree_code code
)
2524 return COMPCODE_ORD
;
2525 case UNORDERED_EXPR
:
2526 return COMPCODE_UNORD
;
2528 return COMPCODE_UNLT
;
2530 return COMPCODE_UNEQ
;
2532 return COMPCODE_UNLE
;
2534 return COMPCODE_UNGT
;
2536 return COMPCODE_LTGT
;
2538 return COMPCODE_UNGE
;
2544 /* Convert a compcode bit-based encoding of a comparison operator back
2545 to GCC's enum tree_code representation. This function is the
2546 inverse of comparison_to_compcode. */
2548 static enum tree_code
2549 compcode_to_comparison (enum comparison_code code
)
2566 return ORDERED_EXPR
;
2567 case COMPCODE_UNORD
:
2568 return UNORDERED_EXPR
;
2586 /* Return a tree for the comparison which is the combination of
2587 doing the AND or OR (depending on CODE) of the two operations LCODE
2588 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2589 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2590 if this makes the transformation invalid. */
2593 combine_comparisons (location_t loc
,
2594 enum tree_code code
, enum tree_code lcode
,
2595 enum tree_code rcode
, tree truth_type
,
2596 tree ll_arg
, tree lr_arg
)
2598 bool honor_nans
= HONOR_NANS (ll_arg
);
2599 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2600 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2605 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2606 compcode
= lcompcode
& rcompcode
;
2609 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2610 compcode
= lcompcode
| rcompcode
;
2619 /* Eliminate unordered comparisons, as well as LTGT and ORD
2620 which are not used unless the mode has NaNs. */
2621 compcode
&= ~COMPCODE_UNORD
;
2622 if (compcode
== COMPCODE_LTGT
)
2623 compcode
= COMPCODE_NE
;
2624 else if (compcode
== COMPCODE_ORD
)
2625 compcode
= COMPCODE_TRUE
;
2627 else if (flag_trapping_math
)
2629 /* Check that the original operation and the optimized ones will trap
2630 under the same condition. */
2631 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2632 && (lcompcode
!= COMPCODE_EQ
)
2633 && (lcompcode
!= COMPCODE_ORD
);
2634 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2635 && (rcompcode
!= COMPCODE_EQ
)
2636 && (rcompcode
!= COMPCODE_ORD
);
2637 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2638 && (compcode
!= COMPCODE_EQ
)
2639 && (compcode
!= COMPCODE_ORD
);
2641 /* In a short-circuited boolean expression the LHS might be
2642 such that the RHS, if evaluated, will never trap. For
2643 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2644 if neither x nor y is NaN. (This is a mixed blessing: for
2645 example, the expression above will never trap, hence
2646 optimizing it to x < y would be invalid). */
2647 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2648 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2651 /* If the comparison was short-circuited, and only the RHS
2652 trapped, we may now generate a spurious trap. */
2654 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2657 /* If we changed the conditions that cause a trap, we lose. */
2658 if ((ltrap
|| rtrap
) != trap
)
2662 if (compcode
== COMPCODE_TRUE
)
2663 return constant_boolean_node (true, truth_type
);
2664 else if (compcode
== COMPCODE_FALSE
)
2665 return constant_boolean_node (false, truth_type
);
2668 enum tree_code tcode
;
2670 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2671 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2675 /* Return nonzero if two operands (typically of the same tree node)
2676 are necessarily equal. FLAGS modifies behavior as follows:
2678 If OEP_ONLY_CONST is set, only return nonzero for constants.
2679 This function tests whether the operands are indistinguishable;
2680 it does not test whether they are equal using C's == operation.
2681 The distinction is important for IEEE floating point, because
2682 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2683 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2685 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2686 even though it may hold multiple values during a function.
2687 This is because a GCC tree node guarantees that nothing else is
2688 executed between the evaluation of its "operands" (which may often
2689 be evaluated in arbitrary order). Hence if the operands themselves
2690 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2691 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2692 unset means assuming isochronic (or instantaneous) tree equivalence.
2693 Unless comparing arbitrary expression trees, such as from different
2694 statements, this flag can usually be left unset.
2696 If OEP_PURE_SAME is set, then pure functions with identical arguments
2697 are considered the same. It is used when the caller has other ways
2698 to ensure that global memory is unchanged in between.
2700 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2701 not values of expressions.
2703 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2704 any operand with side effect. This is unnecesarily conservative in the
2705 case we know that arg0 and arg1 are in disjoint code paths (such as in
2706 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2707 addresses with TREE_CONSTANT flag set so we know that &var == &var
2708 even if var is volatile. */
2711 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2713 /* If either is ERROR_MARK, they aren't equal. */
2714 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2715 || TREE_TYPE (arg0
) == error_mark_node
2716 || TREE_TYPE (arg1
) == error_mark_node
)
2719 /* Similar, if either does not have a type (like a released SSA name),
2720 they aren't equal. */
2721 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2724 /* We cannot consider pointers to different address space equal. */
2725 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2726 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2727 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2728 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2731 /* Check equality of integer constants before bailing out due to
2732 precision differences. */
2733 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2735 /* Address of INTEGER_CST is not defined; check that we did not forget
2736 to drop the OEP_ADDRESS_OF flags. */
2737 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2738 return tree_int_cst_equal (arg0
, arg1
);
2741 if (!(flags
& OEP_ADDRESS_OF
))
2743 /* If both types don't have the same signedness, then we can't consider
2744 them equal. We must check this before the STRIP_NOPS calls
2745 because they may change the signedness of the arguments. As pointers
2746 strictly don't have a signedness, require either two pointers or
2747 two non-pointers as well. */
2748 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2749 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2750 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2753 /* If both types don't have the same precision, then it is not safe
2755 if (element_precision (TREE_TYPE (arg0
))
2756 != element_precision (TREE_TYPE (arg1
)))
2763 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2764 sanity check once the issue is solved. */
2766 /* Addresses of conversions and SSA_NAMEs (and many other things)
2767 are not defined. Check that we did not forget to drop the
2768 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2769 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
2770 && TREE_CODE (arg0
) != SSA_NAME
);
2773 /* In case both args are comparisons but with different comparison
2774 code, try to swap the comparison operands of one arg to produce
2775 a match and compare that variant. */
2776 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2777 && COMPARISON_CLASS_P (arg0
)
2778 && COMPARISON_CLASS_P (arg1
))
2780 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2782 if (TREE_CODE (arg0
) == swap_code
)
2783 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2784 TREE_OPERAND (arg1
, 1), flags
)
2785 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2786 TREE_OPERAND (arg1
, 0), flags
);
2789 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
2791 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2792 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
2794 else if (flags
& OEP_ADDRESS_OF
)
2796 /* If we are interested in comparing addresses ignore
2797 MEM_REF wrappings of the base that can appear just for
2799 if (TREE_CODE (arg0
) == MEM_REF
2801 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
2802 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
2803 && integer_zerop (TREE_OPERAND (arg0
, 1)))
2805 else if (TREE_CODE (arg1
) == MEM_REF
2807 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
2808 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
2809 && integer_zerop (TREE_OPERAND (arg1
, 1)))
2817 /* When not checking adddresses, this is needed for conversions and for
2818 COMPONENT_REF. Might as well play it safe and always test this. */
2819 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2820 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2821 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
2822 && !(flags
& OEP_ADDRESS_OF
)))
2825 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2826 We don't care about side effects in that case because the SAVE_EXPR
2827 takes care of that for us. In all other cases, two expressions are
2828 equal if they have no side effects. If we have two identical
2829 expressions with side effects that should be treated the same due
2830 to the only side effects being identical SAVE_EXPR's, that will
2831 be detected in the recursive calls below.
2832 If we are taking an invariant address of two identical objects
2833 they are necessarily equal as well. */
2834 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2835 && (TREE_CODE (arg0
) == SAVE_EXPR
2836 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
2837 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2840 /* Next handle constant cases, those for which we can return 1 even
2841 if ONLY_CONST is set. */
2842 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2843 switch (TREE_CODE (arg0
))
2846 return tree_int_cst_equal (arg0
, arg1
);
2849 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2850 TREE_FIXED_CST (arg1
));
2853 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
2857 if (!HONOR_SIGNED_ZEROS (arg0
))
2859 /* If we do not distinguish between signed and unsigned zero,
2860 consider them equal. */
2861 if (real_zerop (arg0
) && real_zerop (arg1
))
2870 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2873 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2875 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2876 VECTOR_CST_ELT (arg1
, i
), flags
))
2883 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2885 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2889 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2890 && ! memcmp (TREE_STRING_POINTER (arg0
),
2891 TREE_STRING_POINTER (arg1
),
2892 TREE_STRING_LENGTH (arg0
)));
2895 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2896 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2897 flags
| OEP_ADDRESS_OF
2898 | OEP_MATCH_SIDE_EFFECTS
);
2900 /* In GIMPLE empty constructors are allowed in initializers of
2902 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0
))
2903 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1
)));
2908 if (flags
& OEP_ONLY_CONST
)
2911 /* Define macros to test an operand from arg0 and arg1 for equality and a
2912 variant that allows null and views null as being different from any
2913 non-null value. In the latter case, if either is null, the both
2914 must be; otherwise, do the normal comparison. */
2915 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2916 TREE_OPERAND (arg1, N), flags)
2918 #define OP_SAME_WITH_NULL(N) \
2919 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2920 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2922 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2925 /* Two conversions are equal only if signedness and modes match. */
2926 switch (TREE_CODE (arg0
))
2929 case FIX_TRUNC_EXPR
:
2930 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2931 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2941 case tcc_comparison
:
2943 if (OP_SAME (0) && OP_SAME (1))
2946 /* For commutative ops, allow the other order. */
2947 return (commutative_tree_code (TREE_CODE (arg0
))
2948 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2949 TREE_OPERAND (arg1
, 1), flags
)
2950 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2951 TREE_OPERAND (arg1
, 0), flags
));
2954 /* If either of the pointer (or reference) expressions we are
2955 dereferencing contain a side effect, these cannot be equal,
2956 but their addresses can be. */
2957 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
2958 && (TREE_SIDE_EFFECTS (arg0
)
2959 || TREE_SIDE_EFFECTS (arg1
)))
2962 switch (TREE_CODE (arg0
))
2965 if (!(flags
& OEP_ADDRESS_OF
)
2966 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2967 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2969 flags
&= ~OEP_ADDRESS_OF
;
2974 case VIEW_CONVERT_EXPR
:
2977 case TARGET_MEM_REF
:
2979 if (!(flags
& OEP_ADDRESS_OF
))
2981 /* Require equal access sizes */
2982 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
2983 && (!TYPE_SIZE (TREE_TYPE (arg0
))
2984 || !TYPE_SIZE (TREE_TYPE (arg1
))
2985 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2986 TYPE_SIZE (TREE_TYPE (arg1
)),
2989 /* Verify that accesses are TBAA compatible. */
2990 if (flag_strict_aliasing
2991 && (!alias_ptr_types_compatible_p
2992 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2993 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2994 || (MR_DEPENDENCE_CLIQUE (arg0
)
2995 != MR_DEPENDENCE_CLIQUE (arg1
))
2996 || (MR_DEPENDENCE_BASE (arg0
)
2997 != MR_DEPENDENCE_BASE (arg1
))))
2999 /* Verify that alignment is compatible. */
3000 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3001 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3004 flags
&= ~OEP_ADDRESS_OF
;
3005 return (OP_SAME (0) && OP_SAME (1)
3006 /* TARGET_MEM_REF require equal extra operands. */
3007 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3008 || (OP_SAME_WITH_NULL (2)
3009 && OP_SAME_WITH_NULL (3)
3010 && OP_SAME_WITH_NULL (4))));
3013 case ARRAY_RANGE_REF
:
3014 /* Operands 2 and 3 may be null.
3015 Compare the array index by value if it is constant first as we
3016 may have different types but same value here. */
3019 flags
&= ~OEP_ADDRESS_OF
;
3020 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3021 TREE_OPERAND (arg1
, 1))
3023 && OP_SAME_WITH_NULL (2)
3024 && OP_SAME_WITH_NULL (3));
3027 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3028 may be NULL when we're called to compare MEM_EXPRs. */
3029 if (!OP_SAME_WITH_NULL (0)
3032 flags
&= ~OEP_ADDRESS_OF
;
3033 return OP_SAME_WITH_NULL (2);
3038 flags
&= ~OEP_ADDRESS_OF
;
3039 return OP_SAME (1) && OP_SAME (2);
3045 case tcc_expression
:
3046 switch (TREE_CODE (arg0
))
3049 /* Be sure we pass right ADDRESS_OF flag. */
3050 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3051 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3052 TREE_OPERAND (arg1
, 0),
3053 flags
| OEP_ADDRESS_OF
);
3055 case TRUTH_NOT_EXPR
:
3058 case TRUTH_ANDIF_EXPR
:
3059 case TRUTH_ORIF_EXPR
:
3060 return OP_SAME (0) && OP_SAME (1);
3063 case WIDEN_MULT_PLUS_EXPR
:
3064 case WIDEN_MULT_MINUS_EXPR
:
3067 /* The multiplcation operands are commutative. */
3070 case TRUTH_AND_EXPR
:
3072 case TRUTH_XOR_EXPR
:
3073 if (OP_SAME (0) && OP_SAME (1))
3076 /* Otherwise take into account this is a commutative operation. */
3077 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3078 TREE_OPERAND (arg1
, 1), flags
)
3079 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3080 TREE_OPERAND (arg1
, 0), flags
));
3085 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3092 switch (TREE_CODE (arg0
))
3095 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3096 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3097 /* If not both CALL_EXPRs are either internal or normal function
3098 functions, then they are not equal. */
3100 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3102 /* If the CALL_EXPRs call different internal functions, then they
3104 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3109 /* If the CALL_EXPRs call different functions, then they are not
3111 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3116 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3118 unsigned int cef
= call_expr_flags (arg0
);
3119 if (flags
& OEP_PURE_SAME
)
3120 cef
&= ECF_CONST
| ECF_PURE
;
3127 /* Now see if all the arguments are the same. */
3129 const_call_expr_arg_iterator iter0
, iter1
;
3131 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3132 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3134 a0
= next_const_call_expr_arg (&iter0
),
3135 a1
= next_const_call_expr_arg (&iter1
))
3136 if (! operand_equal_p (a0
, a1
, flags
))
3139 /* If we get here and both argument lists are exhausted
3140 then the CALL_EXPRs are equal. */
3141 return ! (a0
|| a1
);
3147 case tcc_declaration
:
3148 /* Consider __builtin_sqrt equal to sqrt. */
3149 return (TREE_CODE (arg0
) == FUNCTION_DECL
3150 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3151 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3152 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3154 case tcc_exceptional
:
3155 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3157 /* In GIMPLE constructors are used only to build vectors from
3158 elements. Individual elements in the constructor must be
3159 indexed in increasing order and form an initial sequence.
3161 We make no effort to compare constructors in generic.
3162 (see sem_variable::equals in ipa-icf which can do so for
3164 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3165 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3168 /* Be sure that vectors constructed have the same representation.
3169 We only tested element precision and modes to match.
3170 Vectors may be BLKmode and thus also check that the number of
3172 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))
3173 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)))
3176 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3177 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3178 unsigned int len
= vec_safe_length (v0
);
3180 if (len
!= vec_safe_length (v1
))
3183 for (unsigned int i
= 0; i
< len
; i
++)
3185 constructor_elt
*c0
= &(*v0
)[i
];
3186 constructor_elt
*c1
= &(*v1
)[i
];
3188 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3189 /* In GIMPLE the indexes can be either NULL or matching i.
3190 Double check this so we won't get false
3191 positives for GENERIC. */
3193 && (TREE_CODE (c0
->index
) != INTEGER_CST
3194 || !compare_tree_int (c0
->index
, i
)))
3196 && (TREE_CODE (c1
->index
) != INTEGER_CST
3197 || !compare_tree_int (c1
->index
, i
))))
3209 #undef OP_SAME_WITH_NULL
3212 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3213 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3215 When in doubt, return 0. */
3218 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3220 int unsignedp1
, unsignedpo
;
3221 tree primarg0
, primarg1
, primother
;
3222 unsigned int correct_width
;
3224 if (operand_equal_p (arg0
, arg1
, 0))
3227 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3228 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3231 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3232 and see if the inner values are the same. This removes any
3233 signedness comparison, which doesn't matter here. */
3234 primarg0
= arg0
, primarg1
= arg1
;
3235 STRIP_NOPS (primarg0
);
3236 STRIP_NOPS (primarg1
);
3237 if (operand_equal_p (primarg0
, primarg1
, 0))
3240 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3241 actual comparison operand, ARG0.
3243 First throw away any conversions to wider types
3244 already present in the operands. */
3246 primarg1
= get_narrower (arg1
, &unsignedp1
);
3247 primother
= get_narrower (other
, &unsignedpo
);
3249 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3250 if (unsignedp1
== unsignedpo
3251 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3252 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3254 tree type
= TREE_TYPE (arg0
);
3256 /* Make sure shorter operand is extended the right way
3257 to match the longer operand. */
3258 primarg1
= fold_convert (signed_or_unsigned_type_for
3259 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3261 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3268 /* See if ARG is an expression that is either a comparison or is performing
3269 arithmetic on comparisons. The comparisons must only be comparing
3270 two different values, which will be stored in *CVAL1 and *CVAL2; if
3271 they are nonzero it means that some operands have already been found.
3272 No variables may be used anywhere else in the expression except in the
3273 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3274 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3276 If this is true, return 1. Otherwise, return zero. */
3279 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3281 enum tree_code code
= TREE_CODE (arg
);
3282 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3284 /* We can handle some of the tcc_expression cases here. */
3285 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3287 else if (tclass
== tcc_expression
3288 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3289 || code
== COMPOUND_EXPR
))
3290 tclass
= tcc_binary
;
3292 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3293 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3295 /* If we've already found a CVAL1 or CVAL2, this expression is
3296 two complex to handle. */
3297 if (*cval1
|| *cval2
)
3307 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3310 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3311 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3312 cval1
, cval2
, save_p
));
3317 case tcc_expression
:
3318 if (code
== COND_EXPR
)
3319 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3320 cval1
, cval2
, save_p
)
3321 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3322 cval1
, cval2
, save_p
)
3323 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3324 cval1
, cval2
, save_p
));
3327 case tcc_comparison
:
3328 /* First see if we can handle the first operand, then the second. For
3329 the second operand, we know *CVAL1 can't be zero. It must be that
3330 one side of the comparison is each of the values; test for the
3331 case where this isn't true by failing if the two operands
3334 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3335 TREE_OPERAND (arg
, 1), 0))
3339 *cval1
= TREE_OPERAND (arg
, 0);
3340 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3342 else if (*cval2
== 0)
3343 *cval2
= TREE_OPERAND (arg
, 0);
3344 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3349 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3351 else if (*cval2
== 0)
3352 *cval2
= TREE_OPERAND (arg
, 1);
3353 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3365 /* ARG is a tree that is known to contain just arithmetic operations and
3366 comparisons. Evaluate the operations in the tree substituting NEW0 for
3367 any occurrence of OLD0 as an operand of a comparison and likewise for
3371 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3372 tree old1
, tree new1
)
3374 tree type
= TREE_TYPE (arg
);
3375 enum tree_code code
= TREE_CODE (arg
);
3376 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3378 /* We can handle some of the tcc_expression cases here. */
3379 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3381 else if (tclass
== tcc_expression
3382 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3383 tclass
= tcc_binary
;
3388 return fold_build1_loc (loc
, code
, type
,
3389 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3390 old0
, new0
, old1
, new1
));
3393 return fold_build2_loc (loc
, code
, type
,
3394 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3395 old0
, new0
, old1
, new1
),
3396 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3397 old0
, new0
, old1
, new1
));
3399 case tcc_expression
:
3403 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3407 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3411 return fold_build3_loc (loc
, code
, type
,
3412 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3413 old0
, new0
, old1
, new1
),
3414 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3415 old0
, new0
, old1
, new1
),
3416 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3417 old0
, new0
, old1
, new1
));
3421 /* Fall through - ??? */
3423 case tcc_comparison
:
3425 tree arg0
= TREE_OPERAND (arg
, 0);
3426 tree arg1
= TREE_OPERAND (arg
, 1);
3428 /* We need to check both for exact equality and tree equality. The
3429 former will be true if the operand has a side-effect. In that
3430 case, we know the operand occurred exactly once. */
3432 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3434 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3437 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3439 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3442 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3450 /* Return a tree for the case when the result of an expression is RESULT
3451 converted to TYPE and OMITTED was previously an operand of the expression
3452 but is now not needed (e.g., we folded OMITTED * 0).
3454 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3455 the conversion of RESULT to TYPE. */
3458 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3460 tree t
= fold_convert_loc (loc
, type
, result
);
3462 /* If the resulting operand is an empty statement, just return the omitted
3463 statement casted to void. */
3464 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3465 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3466 fold_ignored_result (omitted
));
3468 if (TREE_SIDE_EFFECTS (omitted
))
3469 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3470 fold_ignored_result (omitted
), t
);
3472 return non_lvalue_loc (loc
, t
);
3475 /* Return a tree for the case when the result of an expression is RESULT
3476 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3477 of the expression but are now not needed.
3479 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3480 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3481 evaluated before OMITTED2. Otherwise, if neither has side effects,
3482 just do the conversion of RESULT to TYPE. */
3485 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3486 tree omitted1
, tree omitted2
)
3488 tree t
= fold_convert_loc (loc
, type
, result
);
3490 if (TREE_SIDE_EFFECTS (omitted2
))
3491 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3492 if (TREE_SIDE_EFFECTS (omitted1
))
3493 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3495 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3499 /* Return a simplified tree node for the truth-negation of ARG. This
3500 never alters ARG itself. We assume that ARG is an operation that
3501 returns a truth value (0 or 1).
3503 FIXME: one would think we would fold the result, but it causes
3504 problems with the dominator optimizer. */
3507 fold_truth_not_expr (location_t loc
, tree arg
)
3509 tree type
= TREE_TYPE (arg
);
3510 enum tree_code code
= TREE_CODE (arg
);
3511 location_t loc1
, loc2
;
3513 /* If this is a comparison, we can simply invert it, except for
3514 floating-point non-equality comparisons, in which case we just
3515 enclose a TRUTH_NOT_EXPR around what we have. */
3517 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3519 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3520 if (FLOAT_TYPE_P (op_type
)
3521 && flag_trapping_math
3522 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3523 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3526 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3527 if (code
== ERROR_MARK
)
3530 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3531 TREE_OPERAND (arg
, 1));
3537 return constant_boolean_node (integer_zerop (arg
), type
);
3539 case TRUTH_AND_EXPR
:
3540 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3541 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3542 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3543 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3544 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3547 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3548 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3549 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3550 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3551 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3553 case TRUTH_XOR_EXPR
:
3554 /* Here we can invert either operand. We invert the first operand
3555 unless the second operand is a TRUTH_NOT_EXPR in which case our
3556 result is the XOR of the first operand with the inside of the
3557 negation of the second operand. */
3559 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3560 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3561 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3563 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3564 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3565 TREE_OPERAND (arg
, 1));
3567 case TRUTH_ANDIF_EXPR
:
3568 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3569 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3570 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3571 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3572 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3574 case TRUTH_ORIF_EXPR
:
3575 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3576 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3577 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3578 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3579 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3581 case TRUTH_NOT_EXPR
:
3582 return TREE_OPERAND (arg
, 0);
3586 tree arg1
= TREE_OPERAND (arg
, 1);
3587 tree arg2
= TREE_OPERAND (arg
, 2);
3589 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3590 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3592 /* A COND_EXPR may have a throw as one operand, which
3593 then has void type. Just leave void operands
3595 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3596 VOID_TYPE_P (TREE_TYPE (arg1
))
3597 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3598 VOID_TYPE_P (TREE_TYPE (arg2
))
3599 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3603 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3604 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3605 TREE_OPERAND (arg
, 0),
3606 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3608 case NON_LVALUE_EXPR
:
3609 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3610 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3613 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3614 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3616 /* ... fall through ... */
3619 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3620 return build1_loc (loc
, TREE_CODE (arg
), type
,
3621 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3624 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3626 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3629 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3631 case CLEANUP_POINT_EXPR
:
3632 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3633 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3634 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3641 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3642 assume that ARG is an operation that returns a truth value (0 or 1
3643 for scalars, 0 or -1 for vectors). Return the folded expression if
3644 folding is successful. Otherwise, return NULL_TREE. */
3647 fold_invert_truthvalue (location_t loc
, tree arg
)
3649 tree type
= TREE_TYPE (arg
);
3650 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3656 /* Return a simplified tree node for the truth-negation of ARG. This
3657 never alters ARG itself. We assume that ARG is an operation that
3658 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3661 invert_truthvalue_loc (location_t loc
, tree arg
)
3663 if (TREE_CODE (arg
) == ERROR_MARK
)
3666 tree type
= TREE_TYPE (arg
);
3667 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3673 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3674 with code CODE. This optimization is unsafe. */
3676 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3677 tree arg0
, tree arg1
)
3679 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3680 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3682 /* (A / C) +- (B / C) -> (A +- B) / C. */
3684 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3685 TREE_OPERAND (arg1
, 1), 0))
3686 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3687 fold_build2_loc (loc
, code
, type
,
3688 TREE_OPERAND (arg0
, 0),
3689 TREE_OPERAND (arg1
, 0)),
3690 TREE_OPERAND (arg0
, 1));
3692 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3693 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3694 TREE_OPERAND (arg1
, 0), 0)
3695 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3696 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3698 REAL_VALUE_TYPE r0
, r1
;
3699 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3700 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3702 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3704 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3705 real_arithmetic (&r0
, code
, &r0
, &r1
);
3706 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3707 TREE_OPERAND (arg0
, 0),
3708 build_real (type
, r0
));
3714 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3715 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3716 and uses reverse storage order if REVERSEP is nonzero. */
3719 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3720 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
3721 int unsignedp
, int reversep
)
3723 tree result
, bftype
;
3725 if (bitpos
== 0 && !reversep
)
3727 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3728 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3729 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3730 && tree_fits_shwi_p (size
)
3731 && tree_to_shwi (size
) == bitsize
)
3732 return fold_convert_loc (loc
, type
, inner
);
3736 if (TYPE_PRECISION (bftype
) != bitsize
3737 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3738 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3740 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3741 size_int (bitsize
), bitsize_int (bitpos
));
3742 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
3745 result
= fold_convert_loc (loc
, type
, result
);
3750 /* Optimize a bit-field compare.
3752 There are two cases: First is a compare against a constant and the
3753 second is a comparison of two items where the fields are at the same
3754 bit position relative to the start of a chunk (byte, halfword, word)
3755 large enough to contain it. In these cases we can avoid the shift
3756 implicit in bitfield extractions.
3758 For constants, we emit a compare of the shifted constant with the
3759 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3760 compared. For two fields at the same position, we do the ANDs with the
3761 similar mask and compare the result of the ANDs.
3763 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3764 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3765 are the left and right operands of the comparison, respectively.
3767 If the optimization described above can be done, we return the resulting
3768 tree. Otherwise we return zero. */
3771 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3772 tree compare_type
, tree lhs
, tree rhs
)
3774 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3775 tree type
= TREE_TYPE (lhs
);
3777 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3778 machine_mode lmode
, rmode
, nmode
;
3779 int lunsignedp
, runsignedp
;
3780 int lreversep
, rreversep
;
3781 int lvolatilep
= 0, rvolatilep
= 0;
3782 tree linner
, rinner
= NULL_TREE
;
3786 /* Get all the information about the extractions being done. If the bit size
3787 if the same as the size of the underlying object, we aren't doing an
3788 extraction at all and so can do nothing. We also don't want to
3789 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3790 then will no longer be able to replace it. */
3791 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3792 &lunsignedp
, &lreversep
, &lvolatilep
, false);
3793 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3794 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3798 rreversep
= lreversep
;
3801 /* If this is not a constant, we can only do something if bit positions,
3802 sizes, signedness and storage order are the same. */
3804 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3805 &runsignedp
, &rreversep
, &rvolatilep
, false);
3807 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3808 || lunsignedp
!= runsignedp
|| lreversep
!= rreversep
|| offset
!= 0
3809 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3813 /* See if we can find a mode to refer to this field. We should be able to,
3814 but fail if we can't. */
3815 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3816 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3817 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3818 TYPE_ALIGN (TREE_TYPE (rinner
))),
3820 if (nmode
== VOIDmode
)
3823 /* Set signed and unsigned types of the precision of this mode for the
3825 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3827 /* Compute the bit position and size for the new reference and our offset
3828 within it. If the new reference is the same size as the original, we
3829 won't optimize anything, so return zero. */
3830 nbitsize
= GET_MODE_BITSIZE (nmode
);
3831 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3833 if (nbitsize
== lbitsize
)
3836 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
3837 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3839 /* Make the mask to be used against the extracted field. */
3840 mask
= build_int_cst_type (unsigned_type
, -1);
3841 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3842 mask
= const_binop (RSHIFT_EXPR
, mask
,
3843 size_int (nbitsize
- lbitsize
- lbitpos
));
3846 /* If not comparing with constant, just rework the comparison
3848 return fold_build2_loc (loc
, code
, compare_type
,
3849 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3850 make_bit_field_ref (loc
, linner
,
3855 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3856 make_bit_field_ref (loc
, rinner
,
3862 /* Otherwise, we are handling the constant case. See if the constant is too
3863 big for the field. Warn and return a tree for 0 (false) if so. We do
3864 this not only for its own sake, but to avoid having to test for this
3865 error case below. If we didn't, we might generate wrong code.
3867 For unsigned fields, the constant shifted right by the field length should
3868 be all zero. For signed fields, the high-order bits should agree with
3873 if (wi::lrshift (rhs
, lbitsize
) != 0)
3875 warning (0, "comparison is always %d due to width of bit-field",
3877 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3882 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3883 if (tem
!= 0 && tem
!= -1)
3885 warning (0, "comparison is always %d due to width of bit-field",
3887 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3891 /* Single-bit compares should always be against zero. */
3892 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3894 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3895 rhs
= build_int_cst (type
, 0);
3898 /* Make a new bitfield reference, shift the constant over the
3899 appropriate number of bits and mask it with the computed mask
3900 (in case this was a signed field). If we changed it, make a new one. */
3901 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1,
3904 rhs
= const_binop (BIT_AND_EXPR
,
3905 const_binop (LSHIFT_EXPR
,
3906 fold_convert_loc (loc
, unsigned_type
, rhs
),
3907 size_int (lbitpos
)),
3910 lhs
= build2_loc (loc
, code
, compare_type
,
3911 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3915 /* Subroutine for fold_truth_andor_1: decode a field reference.
3917 If EXP is a comparison reference, we return the innermost reference.
3919 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3920 set to the starting bit number.
3922 If the innermost field can be completely contained in a mode-sized
3923 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3925 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3926 otherwise it is not changed.
3928 *PUNSIGNEDP is set to the signedness of the field.
3930 *PREVERSEP is set to the storage order of the field.
3932 *PMASK is set to the mask used. This is either contained in a
3933 BIT_AND_EXPR or derived from the width of the field.
3935 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3937 Return 0 if this is not a component reference or is one that we can't
3938 do anything with. */
3941 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3942 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3943 int *punsignedp
, int *preversep
, int *pvolatilep
,
3944 tree
*pmask
, tree
*pand_mask
)
3946 tree outer_type
= 0;
3948 tree mask
, inner
, offset
;
3950 unsigned int precision
;
3952 /* All the optimizations using this function assume integer fields.
3953 There are problems with FP fields since the type_for_size call
3954 below can fail for, e.g., XFmode. */
3955 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3958 /* We are interested in the bare arrangement of bits, so strip everything
3959 that doesn't affect the machine mode. However, record the type of the
3960 outermost expression if it may matter below. */
3961 if (CONVERT_EXPR_P (exp
)
3962 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3963 outer_type
= TREE_TYPE (exp
);
3966 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3968 and_mask
= TREE_OPERAND (exp
, 1);
3969 exp
= TREE_OPERAND (exp
, 0);
3970 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3971 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3975 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3976 punsignedp
, preversep
, pvolatilep
, false);
3977 if ((inner
== exp
&& and_mask
== 0)
3978 || *pbitsize
< 0 || offset
!= 0
3979 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3982 /* If the number of bits in the reference is the same as the bitsize of
3983 the outer type, then the outer type gives the signedness. Otherwise
3984 (in case of a small bitfield) the signedness is unchanged. */
3985 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3986 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3988 /* Compute the mask to access the bitfield. */
3989 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3990 precision
= TYPE_PRECISION (unsigned_type
);
3992 mask
= build_int_cst_type (unsigned_type
, -1);
3994 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3995 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3997 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3999 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4000 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4003 *pand_mask
= and_mask
;
4007 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4008 bit positions and MASK is SIGNED. */
4011 all_ones_mask_p (const_tree mask
, unsigned int size
)
4013 tree type
= TREE_TYPE (mask
);
4014 unsigned int precision
= TYPE_PRECISION (type
);
4016 /* If this function returns true when the type of the mask is
4017 UNSIGNED, then there will be errors. In particular see
4018 gcc.c-torture/execute/990326-1.c. There does not appear to be
4019 any documentation paper trail as to why this is so. But the pre
4020 wide-int worked with that restriction and it has been preserved
4022 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4025 return wi::mask (size
, false, precision
) == mask
;
4028 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4029 represents the sign bit of EXP's type. If EXP represents a sign
4030 or zero extension, also test VAL against the unextended type.
4031 The return value is the (sub)expression whose sign bit is VAL,
4032 or NULL_TREE otherwise. */
4035 sign_bit_p (tree exp
, const_tree val
)
4040 /* Tree EXP must have an integral type. */
4041 t
= TREE_TYPE (exp
);
4042 if (! INTEGRAL_TYPE_P (t
))
4045 /* Tree VAL must be an integer constant. */
4046 if (TREE_CODE (val
) != INTEGER_CST
4047 || TREE_OVERFLOW (val
))
4050 width
= TYPE_PRECISION (t
);
4051 if (wi::only_sign_bit_p (val
, width
))
4054 /* Handle extension from a narrower type. */
4055 if (TREE_CODE (exp
) == NOP_EXPR
4056 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4057 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4062 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4063 to be evaluated unconditionally. */
4066 simple_operand_p (const_tree exp
)
4068 /* Strip any conversions that don't change the machine mode. */
4071 return (CONSTANT_CLASS_P (exp
)
4072 || TREE_CODE (exp
) == SSA_NAME
4074 && ! TREE_ADDRESSABLE (exp
)
4075 && ! TREE_THIS_VOLATILE (exp
)
4076 && ! DECL_NONLOCAL (exp
)
4077 /* Don't regard global variables as simple. They may be
4078 allocated in ways unknown to the compiler (shared memory,
4079 #pragma weak, etc). */
4080 && ! TREE_PUBLIC (exp
)
4081 && ! DECL_EXTERNAL (exp
)
4082 /* Weakrefs are not safe to be read, since they can be NULL.
4083 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4084 have DECL_WEAK flag set. */
4085 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4086 /* Loading a static variable is unduly expensive, but global
4087 registers aren't expensive. */
4088 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4091 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4092 to be evaluated unconditionally.
4093 I addition to simple_operand_p, we assume that comparisons, conversions,
4094 and logic-not operations are simple, if their operands are simple, too. */
4097 simple_operand_p_2 (tree exp
)
4099 enum tree_code code
;
4101 if (TREE_SIDE_EFFECTS (exp
)
4102 || tree_could_trap_p (exp
))
4105 while (CONVERT_EXPR_P (exp
))
4106 exp
= TREE_OPERAND (exp
, 0);
4108 code
= TREE_CODE (exp
);
4110 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4111 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4112 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4114 if (code
== TRUTH_NOT_EXPR
)
4115 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4117 return simple_operand_p (exp
);
4121 /* The following functions are subroutines to fold_range_test and allow it to
4122 try to change a logical combination of comparisons into a range test.
4125 X == 2 || X == 3 || X == 4 || X == 5
4129 (unsigned) (X - 2) <= 3
4131 We describe each set of comparisons as being either inside or outside
4132 a range, using a variable named like IN_P, and then describe the
4133 range with a lower and upper bound. If one of the bounds is omitted,
4134 it represents either the highest or lowest value of the type.
4136 In the comments below, we represent a range by two numbers in brackets
4137 preceded by a "+" to designate being inside that range, or a "-" to
4138 designate being outside that range, so the condition can be inverted by
4139 flipping the prefix. An omitted bound is represented by a "-". For
4140 example, "- [-, 10]" means being outside the range starting at the lowest
4141 possible value and ending at 10, in other words, being greater than 10.
4142 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4145 We set up things so that the missing bounds are handled in a consistent
4146 manner so neither a missing bound nor "true" and "false" need to be
4147 handled using a special case. */
4149 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4150 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4151 and UPPER1_P are nonzero if the respective argument is an upper bound
4152 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4153 must be specified for a comparison. ARG1 will be converted to ARG0's
4154 type if both are specified. */
4157 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4158 tree arg1
, int upper1_p
)
4164 /* If neither arg represents infinity, do the normal operation.
4165 Else, if not a comparison, return infinity. Else handle the special
4166 comparison rules. Note that most of the cases below won't occur, but
4167 are handled for consistency. */
4169 if (arg0
!= 0 && arg1
!= 0)
4171 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4172 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4174 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4177 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4180 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4181 for neither. In real maths, we cannot assume open ended ranges are
4182 the same. But, this is computer arithmetic, where numbers are finite.
4183 We can therefore make the transformation of any unbounded range with
4184 the value Z, Z being greater than any representable number. This permits
4185 us to treat unbounded ranges as equal. */
4186 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4187 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4191 result
= sgn0
== sgn1
;
4194 result
= sgn0
!= sgn1
;
4197 result
= sgn0
< sgn1
;
4200 result
= sgn0
<= sgn1
;
4203 result
= sgn0
> sgn1
;
4206 result
= sgn0
>= sgn1
;
4212 return constant_boolean_node (result
, type
);
4215 /* Helper routine for make_range. Perform one step for it, return
4216 new expression if the loop should continue or NULL_TREE if it should
4220 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4221 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4222 bool *strict_overflow_p
)
4224 tree arg0_type
= TREE_TYPE (arg0
);
4225 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4226 int in_p
= *p_in_p
, n_in_p
;
4230 case TRUTH_NOT_EXPR
:
4231 /* We can only do something if the range is testing for zero. */
4232 if (low
== NULL_TREE
|| high
== NULL_TREE
4233 || ! integer_zerop (low
) || ! integer_zerop (high
))
4238 case EQ_EXPR
: case NE_EXPR
:
4239 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4240 /* We can only do something if the range is testing for zero
4241 and if the second operand is an integer constant. Note that
4242 saying something is "in" the range we make is done by
4243 complementing IN_P since it will set in the initial case of
4244 being not equal to zero; "out" is leaving it alone. */
4245 if (low
== NULL_TREE
|| high
== NULL_TREE
4246 || ! integer_zerop (low
) || ! integer_zerop (high
)
4247 || TREE_CODE (arg1
) != INTEGER_CST
)
4252 case NE_EXPR
: /* - [c, c] */
4255 case EQ_EXPR
: /* + [c, c] */
4256 in_p
= ! in_p
, low
= high
= arg1
;
4258 case GT_EXPR
: /* - [-, c] */
4259 low
= 0, high
= arg1
;
4261 case GE_EXPR
: /* + [c, -] */
4262 in_p
= ! in_p
, low
= arg1
, high
= 0;
4264 case LT_EXPR
: /* - [c, -] */
4265 low
= arg1
, high
= 0;
4267 case LE_EXPR
: /* + [-, c] */
4268 in_p
= ! in_p
, low
= 0, high
= arg1
;
4274 /* If this is an unsigned comparison, we also know that EXP is
4275 greater than or equal to zero. We base the range tests we make
4276 on that fact, so we record it here so we can parse existing
4277 range tests. We test arg0_type since often the return type
4278 of, e.g. EQ_EXPR, is boolean. */
4279 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4281 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4283 build_int_cst (arg0_type
, 0),
4287 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4289 /* If the high bound is missing, but we have a nonzero low
4290 bound, reverse the range so it goes from zero to the low bound
4292 if (high
== 0 && low
&& ! integer_zerop (low
))
4295 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4296 build_int_cst (TREE_TYPE (low
), 1), 0);
4297 low
= build_int_cst (arg0_type
, 0);
4307 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4308 low and high are non-NULL, then normalize will DTRT. */
4309 if (!TYPE_UNSIGNED (arg0_type
)
4310 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4312 if (low
== NULL_TREE
)
4313 low
= TYPE_MIN_VALUE (arg0_type
);
4314 if (high
== NULL_TREE
)
4315 high
= TYPE_MAX_VALUE (arg0_type
);
4318 /* (-x) IN [a,b] -> x in [-b, -a] */
4319 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4320 build_int_cst (exp_type
, 0),
4322 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4323 build_int_cst (exp_type
, 0),
4325 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4331 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4332 build_int_cst (exp_type
, 1));
4336 if (TREE_CODE (arg1
) != INTEGER_CST
)
4339 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4340 move a constant to the other side. */
4341 if (!TYPE_UNSIGNED (arg0_type
)
4342 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4345 /* If EXP is signed, any overflow in the computation is undefined,
4346 so we don't worry about it so long as our computations on
4347 the bounds don't overflow. For unsigned, overflow is defined
4348 and this is exactly the right thing. */
4349 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4350 arg0_type
, low
, 0, arg1
, 0);
4351 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4352 arg0_type
, high
, 1, arg1
, 0);
4353 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4354 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4357 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4358 *strict_overflow_p
= true;
4361 /* Check for an unsigned range which has wrapped around the maximum
4362 value thus making n_high < n_low, and normalize it. */
4363 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4365 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4366 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4367 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4368 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4370 /* If the range is of the form +/- [ x+1, x ], we won't
4371 be able to normalize it. But then, it represents the
4372 whole range or the empty set, so make it
4374 if (tree_int_cst_equal (n_low
, low
)
4375 && tree_int_cst_equal (n_high
, high
))
4381 low
= n_low
, high
= n_high
;
4389 case NON_LVALUE_EXPR
:
4390 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4393 if (! INTEGRAL_TYPE_P (arg0_type
)
4394 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4395 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4398 n_low
= low
, n_high
= high
;
4401 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4404 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4406 /* If we're converting arg0 from an unsigned type, to exp,
4407 a signed type, we will be doing the comparison as unsigned.
4408 The tests above have already verified that LOW and HIGH
4411 So we have to ensure that we will handle large unsigned
4412 values the same way that the current signed bounds treat
4415 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4419 /* For fixed-point modes, we need to pass the saturating flag
4420 as the 2nd parameter. */
4421 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4423 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4424 TYPE_SATURATING (arg0_type
));
4427 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4429 /* A range without an upper bound is, naturally, unbounded.
4430 Since convert would have cropped a very large value, use
4431 the max value for the destination type. */
4433 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4434 : TYPE_MAX_VALUE (arg0_type
);
4436 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4437 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4438 fold_convert_loc (loc
, arg0_type
,
4440 build_int_cst (arg0_type
, 1));
4442 /* If the low bound is specified, "and" the range with the
4443 range for which the original unsigned value will be
4447 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4448 1, fold_convert_loc (loc
, arg0_type
,
4453 in_p
= (n_in_p
== in_p
);
4457 /* Otherwise, "or" the range with the range of the input
4458 that will be interpreted as negative. */
4459 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4460 1, fold_convert_loc (loc
, arg0_type
,
4465 in_p
= (in_p
!= n_in_p
);
4479 /* Given EXP, a logical expression, set the range it is testing into
4480 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4481 actually being tested. *PLOW and *PHIGH will be made of the same
4482 type as the returned expression. If EXP is not a comparison, we
4483 will most likely not be returning a useful value and range. Set
4484 *STRICT_OVERFLOW_P to true if the return value is only valid
4485 because signed overflow is undefined; otherwise, do not change
4486 *STRICT_OVERFLOW_P. */
4489 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4490 bool *strict_overflow_p
)
4492 enum tree_code code
;
4493 tree arg0
, arg1
= NULL_TREE
;
4494 tree exp_type
, nexp
;
4497 location_t loc
= EXPR_LOCATION (exp
);
4499 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4500 and see if we can refine the range. Some of the cases below may not
4501 happen, but it doesn't seem worth worrying about this. We "continue"
4502 the outer loop when we've changed something; otherwise we "break"
4503 the switch, which will "break" the while. */
4506 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4510 code
= TREE_CODE (exp
);
4511 exp_type
= TREE_TYPE (exp
);
4514 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4516 if (TREE_OPERAND_LENGTH (exp
) > 0)
4517 arg0
= TREE_OPERAND (exp
, 0);
4518 if (TREE_CODE_CLASS (code
) == tcc_binary
4519 || TREE_CODE_CLASS (code
) == tcc_comparison
4520 || (TREE_CODE_CLASS (code
) == tcc_expression
4521 && TREE_OPERAND_LENGTH (exp
) > 1))
4522 arg1
= TREE_OPERAND (exp
, 1);
4524 if (arg0
== NULL_TREE
)
4527 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4528 &high
, &in_p
, strict_overflow_p
);
4529 if (nexp
== NULL_TREE
)
4534 /* If EXP is a constant, we can evaluate whether this is true or false. */
4535 if (TREE_CODE (exp
) == INTEGER_CST
)
4537 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4539 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4545 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4549 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4550 type, TYPE, return an expression to test if EXP is in (or out of, depending
4551 on IN_P) the range. Return 0 if the test couldn't be created. */
4554 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4555 tree low
, tree high
)
4557 tree etype
= TREE_TYPE (exp
), value
;
4559 /* Disable this optimization for function pointer expressions
4560 on targets that require function pointer canonicalization. */
4561 if (targetm
.have_canonicalize_funcptr_for_compare ()
4562 && TREE_CODE (etype
) == POINTER_TYPE
4563 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4568 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4570 return invert_truthvalue_loc (loc
, value
);
4575 if (low
== 0 && high
== 0)
4576 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4579 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4580 fold_convert_loc (loc
, etype
, high
));
4583 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4584 fold_convert_loc (loc
, etype
, low
));
4586 if (operand_equal_p (low
, high
, 0))
4587 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4588 fold_convert_loc (loc
, etype
, low
));
4590 if (integer_zerop (low
))
4592 if (! TYPE_UNSIGNED (etype
))
4594 etype
= unsigned_type_for (etype
);
4595 high
= fold_convert_loc (loc
, etype
, high
);
4596 exp
= fold_convert_loc (loc
, etype
, exp
);
4598 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4601 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4602 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4604 int prec
= TYPE_PRECISION (etype
);
4606 if (wi::mask (prec
- 1, false, prec
) == high
)
4608 if (TYPE_UNSIGNED (etype
))
4610 tree signed_etype
= signed_type_for (etype
);
4611 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4613 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4615 etype
= signed_etype
;
4616 exp
= fold_convert_loc (loc
, etype
, exp
);
4618 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4619 build_int_cst (etype
, 0));
4623 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4624 This requires wrap-around arithmetics for the type of the expression.
4625 First make sure that arithmetics in this type is valid, then make sure
4626 that it wraps around. */
4627 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4628 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4629 TYPE_UNSIGNED (etype
));
4631 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4633 tree utype
, minv
, maxv
;
4635 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4636 for the type in question, as we rely on this here. */
4637 utype
= unsigned_type_for (etype
);
4638 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4639 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4640 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4641 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4643 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4650 high
= fold_convert_loc (loc
, etype
, high
);
4651 low
= fold_convert_loc (loc
, etype
, low
);
4652 exp
= fold_convert_loc (loc
, etype
, exp
);
4654 value
= const_binop (MINUS_EXPR
, high
, low
);
4657 if (POINTER_TYPE_P (etype
))
4659 if (value
!= 0 && !TREE_OVERFLOW (value
))
4661 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4662 return build_range_check (loc
, type
,
4663 fold_build_pointer_plus_loc (loc
, exp
, low
),
4664 1, build_int_cst (etype
, 0), value
);
4669 if (value
!= 0 && !TREE_OVERFLOW (value
))
4670 return build_range_check (loc
, type
,
4671 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4672 1, build_int_cst (etype
, 0), value
);
4677 /* Return the predecessor of VAL in its type, handling the infinite case. */
4680 range_predecessor (tree val
)
4682 tree type
= TREE_TYPE (val
);
4684 if (INTEGRAL_TYPE_P (type
)
4685 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4688 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4689 build_int_cst (TREE_TYPE (val
), 1), 0);
4692 /* Return the successor of VAL in its type, handling the infinite case. */
4695 range_successor (tree val
)
4697 tree type
= TREE_TYPE (val
);
4699 if (INTEGRAL_TYPE_P (type
)
4700 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4703 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4704 build_int_cst (TREE_TYPE (val
), 1), 0);
4707 /* Given two ranges, see if we can merge them into one. Return 1 if we
4708 can, 0 if we can't. Set the output range into the specified parameters. */
4711 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4712 tree high0
, int in1_p
, tree low1
, tree high1
)
4720 int lowequal
= ((low0
== 0 && low1
== 0)
4721 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4722 low0
, 0, low1
, 0)));
4723 int highequal
= ((high0
== 0 && high1
== 0)
4724 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4725 high0
, 1, high1
, 1)));
4727 /* Make range 0 be the range that starts first, or ends last if they
4728 start at the same value. Swap them if it isn't. */
4729 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4732 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4733 high1
, 1, high0
, 1))))
4735 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4736 tem
= low0
, low0
= low1
, low1
= tem
;
4737 tem
= high0
, high0
= high1
, high1
= tem
;
4740 /* Now flag two cases, whether the ranges are disjoint or whether the
4741 second range is totally subsumed in the first. Note that the tests
4742 below are simplified by the ones above. */
4743 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4744 high0
, 1, low1
, 0));
4745 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4746 high1
, 1, high0
, 1));
4748 /* We now have four cases, depending on whether we are including or
4749 excluding the two ranges. */
4752 /* If they don't overlap, the result is false. If the second range
4753 is a subset it is the result. Otherwise, the range is from the start
4754 of the second to the end of the first. */
4756 in_p
= 0, low
= high
= 0;
4758 in_p
= 1, low
= low1
, high
= high1
;
4760 in_p
= 1, low
= low1
, high
= high0
;
4763 else if (in0_p
&& ! in1_p
)
4765 /* If they don't overlap, the result is the first range. If they are
4766 equal, the result is false. If the second range is a subset of the
4767 first, and the ranges begin at the same place, we go from just after
4768 the end of the second range to the end of the first. If the second
4769 range is not a subset of the first, or if it is a subset and both
4770 ranges end at the same place, the range starts at the start of the
4771 first range and ends just before the second range.
4772 Otherwise, we can't describe this as a single range. */
4774 in_p
= 1, low
= low0
, high
= high0
;
4775 else if (lowequal
&& highequal
)
4776 in_p
= 0, low
= high
= 0;
4777 else if (subset
&& lowequal
)
4779 low
= range_successor (high1
);
4784 /* We are in the weird situation where high0 > high1 but
4785 high1 has no successor. Punt. */
4789 else if (! subset
|| highequal
)
4792 high
= range_predecessor (low1
);
4796 /* low0 < low1 but low1 has no predecessor. Punt. */
4804 else if (! in0_p
&& in1_p
)
4806 /* If they don't overlap, the result is the second range. If the second
4807 is a subset of the first, the result is false. Otherwise,
4808 the range starts just after the first range and ends at the
4809 end of the second. */
4811 in_p
= 1, low
= low1
, high
= high1
;
4812 else if (subset
|| highequal
)
4813 in_p
= 0, low
= high
= 0;
4816 low
= range_successor (high0
);
4821 /* high1 > high0 but high0 has no successor. Punt. */
4829 /* The case where we are excluding both ranges. Here the complex case
4830 is if they don't overlap. In that case, the only time we have a
4831 range is if they are adjacent. If the second is a subset of the
4832 first, the result is the first. Otherwise, the range to exclude
4833 starts at the beginning of the first range and ends at the end of the
4837 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4838 range_successor (high0
),
4840 in_p
= 0, low
= low0
, high
= high1
;
4843 /* Canonicalize - [min, x] into - [-, x]. */
4844 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4845 switch (TREE_CODE (TREE_TYPE (low0
)))
4848 if (TYPE_PRECISION (TREE_TYPE (low0
))
4849 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4853 if (tree_int_cst_equal (low0
,
4854 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4858 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4859 && integer_zerop (low0
))
4866 /* Canonicalize - [x, max] into - [x, -]. */
4867 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4868 switch (TREE_CODE (TREE_TYPE (high1
)))
4871 if (TYPE_PRECISION (TREE_TYPE (high1
))
4872 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4876 if (tree_int_cst_equal (high1
,
4877 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4881 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4882 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4884 build_int_cst (TREE_TYPE (high1
), 1),
4892 /* The ranges might be also adjacent between the maximum and
4893 minimum values of the given type. For
4894 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4895 return + [x + 1, y - 1]. */
4896 if (low0
== 0 && high1
== 0)
4898 low
= range_successor (high0
);
4899 high
= range_predecessor (low1
);
4900 if (low
== 0 || high
== 0)
4910 in_p
= 0, low
= low0
, high
= high0
;
4912 in_p
= 0, low
= low0
, high
= high1
;
4915 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4920 /* Subroutine of fold, looking inside expressions of the form
4921 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4922 of the COND_EXPR. This function is being used also to optimize
4923 A op B ? C : A, by reversing the comparison first.
4925 Return a folded expression whose code is not a COND_EXPR
4926 anymore, or NULL_TREE if no folding opportunity is found. */
4929 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4930 tree arg0
, tree arg1
, tree arg2
)
4932 enum tree_code comp_code
= TREE_CODE (arg0
);
4933 tree arg00
= TREE_OPERAND (arg0
, 0);
4934 tree arg01
= TREE_OPERAND (arg0
, 1);
4935 tree arg1_type
= TREE_TYPE (arg1
);
4941 /* If we have A op 0 ? A : -A, consider applying the following
4944 A == 0? A : -A same as -A
4945 A != 0? A : -A same as A
4946 A >= 0? A : -A same as abs (A)
4947 A > 0? A : -A same as abs (A)
4948 A <= 0? A : -A same as -abs (A)
4949 A < 0? A : -A same as -abs (A)
4951 None of these transformations work for modes with signed
4952 zeros. If A is +/-0, the first two transformations will
4953 change the sign of the result (from +0 to -0, or vice
4954 versa). The last four will fix the sign of the result,
4955 even though the original expressions could be positive or
4956 negative, depending on the sign of A.
4958 Note that all these transformations are correct if A is
4959 NaN, since the two alternatives (A and -A) are also NaNs. */
4960 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4961 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4962 ? real_zerop (arg01
)
4963 : integer_zerop (arg01
))
4964 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4965 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4966 /* In the case that A is of the form X-Y, '-A' (arg2) may
4967 have already been folded to Y-X, check for that. */
4968 || (TREE_CODE (arg1
) == MINUS_EXPR
4969 && TREE_CODE (arg2
) == MINUS_EXPR
4970 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4971 TREE_OPERAND (arg2
, 1), 0)
4972 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4973 TREE_OPERAND (arg2
, 0), 0))))
4978 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4979 return pedantic_non_lvalue_loc (loc
,
4980 fold_convert_loc (loc
, type
,
4981 negate_expr (tem
)));
4984 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4987 if (flag_trapping_math
)
4992 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4994 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4995 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4998 if (flag_trapping_math
)
5002 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5004 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5005 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5007 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5011 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5012 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5013 both transformations are correct when A is NaN: A != 0
5014 is then true, and A == 0 is false. */
5016 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5017 && integer_zerop (arg01
) && integer_zerop (arg2
))
5019 if (comp_code
== NE_EXPR
)
5020 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5021 else if (comp_code
== EQ_EXPR
)
5022 return build_zero_cst (type
);
5025 /* Try some transformations of A op B ? A : B.
5027 A == B? A : B same as B
5028 A != B? A : B same as A
5029 A >= B? A : B same as max (A, B)
5030 A > B? A : B same as max (B, A)
5031 A <= B? A : B same as min (A, B)
5032 A < B? A : B same as min (B, A)
5034 As above, these transformations don't work in the presence
5035 of signed zeros. For example, if A and B are zeros of
5036 opposite sign, the first two transformations will change
5037 the sign of the result. In the last four, the original
5038 expressions give different results for (A=+0, B=-0) and
5039 (A=-0, B=+0), but the transformed expressions do not.
5041 The first two transformations are correct if either A or B
5042 is a NaN. In the first transformation, the condition will
5043 be false, and B will indeed be chosen. In the case of the
5044 second transformation, the condition A != B will be true,
5045 and A will be chosen.
5047 The conversions to max() and min() are not correct if B is
5048 a number and A is not. The conditions in the original
5049 expressions will be false, so all four give B. The min()
5050 and max() versions would give a NaN instead. */
5051 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5052 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5053 /* Avoid these transformations if the COND_EXPR may be used
5054 as an lvalue in the C++ front-end. PR c++/19199. */
5056 || VECTOR_TYPE_P (type
)
5057 || (! lang_GNU_CXX ()
5058 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5059 || ! maybe_lvalue_p (arg1
)
5060 || ! maybe_lvalue_p (arg2
)))
5062 tree comp_op0
= arg00
;
5063 tree comp_op1
= arg01
;
5064 tree comp_type
= TREE_TYPE (comp_op0
);
5066 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5067 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5077 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5079 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5084 /* In C++ a ?: expression can be an lvalue, so put the
5085 operand which will be used if they are equal first
5086 so that we can convert this back to the
5087 corresponding COND_EXPR. */
5088 if (!HONOR_NANS (arg1
))
5090 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5091 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5092 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5093 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5094 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5095 comp_op1
, comp_op0
);
5096 return pedantic_non_lvalue_loc (loc
,
5097 fold_convert_loc (loc
, type
, tem
));
5104 if (!HONOR_NANS (arg1
))
5106 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5107 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5108 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5109 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5110 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5111 comp_op1
, comp_op0
);
5112 return pedantic_non_lvalue_loc (loc
,
5113 fold_convert_loc (loc
, type
, tem
));
5117 if (!HONOR_NANS (arg1
))
5118 return pedantic_non_lvalue_loc (loc
,
5119 fold_convert_loc (loc
, type
, arg2
));
5122 if (!HONOR_NANS (arg1
))
5123 return pedantic_non_lvalue_loc (loc
,
5124 fold_convert_loc (loc
, type
, arg1
));
5127 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5132 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5133 we might still be able to simplify this. For example,
5134 if C1 is one less or one more than C2, this might have started
5135 out as a MIN or MAX and been transformed by this function.
5136 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5138 if (INTEGRAL_TYPE_P (type
)
5139 && TREE_CODE (arg01
) == INTEGER_CST
5140 && TREE_CODE (arg2
) == INTEGER_CST
)
5144 if (TREE_CODE (arg1
) == INTEGER_CST
)
5146 /* We can replace A with C1 in this case. */
5147 arg1
= fold_convert_loc (loc
, type
, arg01
);
5148 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5151 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5152 MIN_EXPR, to preserve the signedness of the comparison. */
5153 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5155 && operand_equal_p (arg01
,
5156 const_binop (PLUS_EXPR
, arg2
,
5157 build_int_cst (type
, 1)),
5160 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5161 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5163 return pedantic_non_lvalue_loc (loc
,
5164 fold_convert_loc (loc
, type
, tem
));
5169 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5171 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5173 && operand_equal_p (arg01
,
5174 const_binop (MINUS_EXPR
, arg2
,
5175 build_int_cst (type
, 1)),
5178 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5179 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5181 return pedantic_non_lvalue_loc (loc
,
5182 fold_convert_loc (loc
, type
, tem
));
5187 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5188 MAX_EXPR, to preserve the signedness of the comparison. */
5189 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5191 && operand_equal_p (arg01
,
5192 const_binop (MINUS_EXPR
, arg2
,
5193 build_int_cst (type
, 1)),
5196 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5197 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5199 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5204 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5205 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5207 && operand_equal_p (arg01
,
5208 const_binop (PLUS_EXPR
, arg2
,
5209 build_int_cst (type
, 1)),
5212 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5213 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5215 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5229 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5230 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5231 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5235 /* EXP is some logical combination of boolean tests. See if we can
5236 merge it into some range test. Return the new tree if so. */
5239 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5242 int or_op
= (code
== TRUTH_ORIF_EXPR
5243 || code
== TRUTH_OR_EXPR
);
5244 int in0_p
, in1_p
, in_p
;
5245 tree low0
, low1
, low
, high0
, high1
, high
;
5246 bool strict_overflow_p
= false;
5248 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5249 "when simplifying range test");
5251 if (!INTEGRAL_TYPE_P (type
))
5254 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5255 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5257 /* If this is an OR operation, invert both sides; we will invert
5258 again at the end. */
5260 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5262 /* If both expressions are the same, if we can merge the ranges, and we
5263 can build the range test, return it or it inverted. If one of the
5264 ranges is always true or always false, consider it to be the same
5265 expression as the other. */
5266 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5267 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5269 && 0 != (tem
= (build_range_check (loc
, type
,
5271 : rhs
!= 0 ? rhs
: integer_zero_node
,
5274 if (strict_overflow_p
)
5275 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5276 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5279 /* On machines where the branch cost is expensive, if this is a
5280 short-circuited branch and the underlying object on both sides
5281 is the same, make a non-short-circuit operation. */
5282 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5283 && lhs
!= 0 && rhs
!= 0
5284 && (code
== TRUTH_ANDIF_EXPR
5285 || code
== TRUTH_ORIF_EXPR
)
5286 && operand_equal_p (lhs
, rhs
, 0))
5288 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5289 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5290 which cases we can't do this. */
5291 if (simple_operand_p (lhs
))
5292 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5293 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5296 else if (!lang_hooks
.decls
.global_bindings_p ()
5297 && !CONTAINS_PLACEHOLDER_P (lhs
))
5299 tree common
= save_expr (lhs
);
5301 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5302 or_op
? ! in0_p
: in0_p
,
5304 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5305 or_op
? ! in1_p
: in1_p
,
5308 if (strict_overflow_p
)
5309 fold_overflow_warning (warnmsg
,
5310 WARN_STRICT_OVERFLOW_COMPARISON
);
5311 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5312 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5321 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5322 bit value. Arrange things so the extra bits will be set to zero if and
5323 only if C is signed-extended to its full width. If MASK is nonzero,
5324 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5327 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5329 tree type
= TREE_TYPE (c
);
5330 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5333 if (p
== modesize
|| unsignedp
)
5336 /* We work by getting just the sign bit into the low-order bit, then
5337 into the high-order bit, then sign-extend. We then XOR that value
5339 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5341 /* We must use a signed type in order to get an arithmetic right shift.
5342 However, we must also avoid introducing accidental overflows, so that
5343 a subsequent call to integer_zerop will work. Hence we must
5344 do the type conversion here. At this point, the constant is either
5345 zero or one, and the conversion to a signed type can never overflow.
5346 We could get an overflow if this conversion is done anywhere else. */
5347 if (TYPE_UNSIGNED (type
))
5348 temp
= fold_convert (signed_type_for (type
), temp
);
5350 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5351 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5353 temp
= const_binop (BIT_AND_EXPR
, temp
,
5354 fold_convert (TREE_TYPE (c
), mask
));
5355 /* If necessary, convert the type back to match the type of C. */
5356 if (TYPE_UNSIGNED (type
))
5357 temp
= fold_convert (type
, temp
);
5359 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5362 /* For an expression that has the form
5366 we can drop one of the inner expressions and simplify to
5370 LOC is the location of the resulting expression. OP is the inner
5371 logical operation; the left-hand side in the examples above, while CMPOP
5372 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5373 removing a condition that guards another, as in
5374 (A != NULL && A->...) || A == NULL
5375 which we must not transform. If RHS_ONLY is true, only eliminate the
5376 right-most operand of the inner logical operation. */
5379 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5382 tree type
= TREE_TYPE (cmpop
);
5383 enum tree_code code
= TREE_CODE (cmpop
);
5384 enum tree_code truthop_code
= TREE_CODE (op
);
5385 tree lhs
= TREE_OPERAND (op
, 0);
5386 tree rhs
= TREE_OPERAND (op
, 1);
5387 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5388 enum tree_code rhs_code
= TREE_CODE (rhs
);
5389 enum tree_code lhs_code
= TREE_CODE (lhs
);
5390 enum tree_code inv_code
;
5392 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5395 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5398 if (rhs_code
== truthop_code
)
5400 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5401 if (newrhs
!= NULL_TREE
)
5404 rhs_code
= TREE_CODE (rhs
);
5407 if (lhs_code
== truthop_code
&& !rhs_only
)
5409 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5410 if (newlhs
!= NULL_TREE
)
5413 lhs_code
= TREE_CODE (lhs
);
5417 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5418 if (inv_code
== rhs_code
5419 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5420 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5422 if (!rhs_only
&& inv_code
== lhs_code
5423 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5424 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5426 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5427 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5432 /* Find ways of folding logical expressions of LHS and RHS:
5433 Try to merge two comparisons to the same innermost item.
5434 Look for range tests like "ch >= '0' && ch <= '9'".
5435 Look for combinations of simple terms on machines with expensive branches
5436 and evaluate the RHS unconditionally.
5438 For example, if we have p->a == 2 && p->b == 4 and we can make an
5439 object large enough to span both A and B, we can do this with a comparison
5440 against the object ANDed with the a mask.
5442 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5443 operations to do this with one comparison.
5445 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5446 function and the one above.
5448 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5449 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5451 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5454 We return the simplified tree or 0 if no optimization is possible. */
5457 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5460 /* If this is the "or" of two comparisons, we can do something if
5461 the comparisons are NE_EXPR. If this is the "and", we can do something
5462 if the comparisons are EQ_EXPR. I.e.,
5463 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5465 WANTED_CODE is this operation code. For single bit fields, we can
5466 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5467 comparison for one-bit fields. */
5469 enum tree_code wanted_code
;
5470 enum tree_code lcode
, rcode
;
5471 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5472 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5473 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5474 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5475 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5476 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5477 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5478 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
5479 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5480 machine_mode lnmode
, rnmode
;
5481 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5482 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5483 tree l_const
, r_const
;
5484 tree lntype
, rntype
, result
;
5485 HOST_WIDE_INT first_bit
, end_bit
;
5488 /* Start by getting the comparison codes. Fail if anything is volatile.
5489 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5490 it were surrounded with a NE_EXPR. */
5492 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5495 lcode
= TREE_CODE (lhs
);
5496 rcode
= TREE_CODE (rhs
);
5498 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5500 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5501 build_int_cst (TREE_TYPE (lhs
), 0));
5505 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5507 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5508 build_int_cst (TREE_TYPE (rhs
), 0));
5512 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5513 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5516 ll_arg
= TREE_OPERAND (lhs
, 0);
5517 lr_arg
= TREE_OPERAND (lhs
, 1);
5518 rl_arg
= TREE_OPERAND (rhs
, 0);
5519 rr_arg
= TREE_OPERAND (rhs
, 1);
5521 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5522 if (simple_operand_p (ll_arg
)
5523 && simple_operand_p (lr_arg
))
5525 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5526 && operand_equal_p (lr_arg
, rr_arg
, 0))
5528 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5529 truth_type
, ll_arg
, lr_arg
);
5533 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5534 && operand_equal_p (lr_arg
, rl_arg
, 0))
5536 result
= combine_comparisons (loc
, code
, lcode
,
5537 swap_tree_comparison (rcode
),
5538 truth_type
, ll_arg
, lr_arg
);
5544 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5545 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5547 /* If the RHS can be evaluated unconditionally and its operands are
5548 simple, it wins to evaluate the RHS unconditionally on machines
5549 with expensive branches. In this case, this isn't a comparison
5550 that can be merged. */
5552 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5554 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5555 && simple_operand_p (rl_arg
)
5556 && simple_operand_p (rr_arg
))
5558 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5559 if (code
== TRUTH_OR_EXPR
5560 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5561 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5562 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5563 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5564 return build2_loc (loc
, NE_EXPR
, truth_type
,
5565 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5567 build_int_cst (TREE_TYPE (ll_arg
), 0));
5569 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5570 if (code
== TRUTH_AND_EXPR
5571 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5572 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5573 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5574 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5575 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5576 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5578 build_int_cst (TREE_TYPE (ll_arg
), 0));
5581 /* See if the comparisons can be merged. Then get all the parameters for
5584 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5585 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5588 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
5590 ll_inner
= decode_field_reference (loc
, ll_arg
,
5591 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5592 &ll_unsignedp
, &ll_reversep
, &volatilep
,
5593 &ll_mask
, &ll_and_mask
);
5594 lr_inner
= decode_field_reference (loc
, lr_arg
,
5595 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5596 &lr_unsignedp
, &lr_reversep
, &volatilep
,
5597 &lr_mask
, &lr_and_mask
);
5598 rl_inner
= decode_field_reference (loc
, rl_arg
,
5599 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5600 &rl_unsignedp
, &rl_reversep
, &volatilep
,
5601 &rl_mask
, &rl_and_mask
);
5602 rr_inner
= decode_field_reference (loc
, rr_arg
,
5603 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5604 &rr_unsignedp
, &rr_reversep
, &volatilep
,
5605 &rr_mask
, &rr_and_mask
);
5607 /* It must be true that the inner operation on the lhs of each
5608 comparison must be the same if we are to be able to do anything.
5609 Then see if we have constants. If not, the same must be true for
5612 || ll_reversep
!= rl_reversep
5613 || ll_inner
== 0 || rl_inner
== 0
5614 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5617 if (TREE_CODE (lr_arg
) == INTEGER_CST
5618 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5620 l_const
= lr_arg
, r_const
= rr_arg
;
5621 lr_reversep
= ll_reversep
;
5623 else if (lr_reversep
!= rr_reversep
5624 || lr_inner
== 0 || rr_inner
== 0
5625 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5628 l_const
= r_const
= 0;
5630 /* If either comparison code is not correct for our logical operation,
5631 fail. However, we can convert a one-bit comparison against zero into
5632 the opposite comparison against that bit being set in the field. */
5634 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5635 if (lcode
!= wanted_code
)
5637 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5639 /* Make the left operand unsigned, since we are only interested
5640 in the value of one bit. Otherwise we are doing the wrong
5649 /* This is analogous to the code for l_const above. */
5650 if (rcode
!= wanted_code
)
5652 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5661 /* See if we can find a mode that contains both fields being compared on
5662 the left. If we can't, fail. Otherwise, update all constants and masks
5663 to be relative to a field of that size. */
5664 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5665 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5666 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5667 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5669 if (lnmode
== VOIDmode
)
5672 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5673 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5674 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5675 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5677 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5679 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5680 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5683 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5684 size_int (xll_bitpos
));
5685 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5686 size_int (xrl_bitpos
));
5690 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5691 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5692 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5693 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5694 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5697 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5699 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5704 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5705 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5706 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5707 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5708 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5711 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5713 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5717 /* If the right sides are not constant, do the same for it. Also,
5718 disallow this optimization if a size or signedness mismatch occurs
5719 between the left and right sides. */
5722 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5723 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5724 /* Make sure the two fields on the right
5725 correspond to the left without being swapped. */
5726 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5729 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5730 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5731 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5732 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5734 if (rnmode
== VOIDmode
)
5737 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5738 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5739 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5740 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5742 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5744 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5745 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5748 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5750 size_int (xlr_bitpos
));
5751 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5753 size_int (xrr_bitpos
));
5755 /* Make a mask that corresponds to both fields being compared.
5756 Do this for both items being compared. If the operands are the
5757 same size and the bits being compared are in the same position
5758 then we can do this by masking both and comparing the masked
5760 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5761 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5762 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5764 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5765 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5766 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5767 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5769 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5770 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
5771 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5772 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5774 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5777 /* There is still another way we can do something: If both pairs of
5778 fields being compared are adjacent, we may be able to make a wider
5779 field containing them both.
5781 Note that we still must mask the lhs/rhs expressions. Furthermore,
5782 the mask must be shifted to account for the shift done by
5783 make_bit_field_ref. */
5784 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5785 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5786 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5787 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5791 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5792 ll_bitsize
+ rl_bitsize
,
5793 MIN (ll_bitpos
, rl_bitpos
),
5794 ll_unsignedp
, ll_reversep
);
5795 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5796 lr_bitsize
+ rr_bitsize
,
5797 MIN (lr_bitpos
, rr_bitpos
),
5798 lr_unsignedp
, lr_reversep
);
5800 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5801 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5802 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5803 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5805 /* Convert to the smaller type before masking out unwanted bits. */
5807 if (lntype
!= rntype
)
5809 if (lnbitsize
> rnbitsize
)
5811 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5812 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5815 else if (lnbitsize
< rnbitsize
)
5817 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5818 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5823 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5824 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5826 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5827 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5829 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5835 /* Handle the case of comparisons with constants. If there is something in
5836 common between the masks, those bits of the constants must be the same.
5837 If not, the condition is always false. Test for this to avoid generating
5838 incorrect code below. */
5839 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5840 if (! integer_zerop (result
)
5841 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5842 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5844 if (wanted_code
== NE_EXPR
)
5846 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5847 return constant_boolean_node (true, truth_type
);
5851 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5852 return constant_boolean_node (false, truth_type
);
5856 /* Construct the expression we will return. First get the component
5857 reference we will make. Unless the mask is all ones the width of
5858 that field, perform the mask operation. Then compare with the
5860 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5861 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5863 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5864 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5865 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5867 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5868 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5871 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5875 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5879 enum tree_code op_code
;
5882 int consts_equal
, consts_lt
;
5885 STRIP_SIGN_NOPS (arg0
);
5887 op_code
= TREE_CODE (arg0
);
5888 minmax_const
= TREE_OPERAND (arg0
, 1);
5889 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5890 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5891 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5892 inner
= TREE_OPERAND (arg0
, 0);
5894 /* If something does not permit us to optimize, return the original tree. */
5895 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5896 || TREE_CODE (comp_const
) != INTEGER_CST
5897 || TREE_OVERFLOW (comp_const
)
5898 || TREE_CODE (minmax_const
) != INTEGER_CST
5899 || TREE_OVERFLOW (minmax_const
))
5902 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5903 and GT_EXPR, doing the rest with recursive calls using logical
5907 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5910 = optimize_minmax_comparison (loc
,
5911 invert_tree_comparison (code
, false),
5914 return invert_truthvalue_loc (loc
, tem
);
5920 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5921 optimize_minmax_comparison
5922 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5923 optimize_minmax_comparison
5924 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5927 if (op_code
== MAX_EXPR
&& consts_equal
)
5928 /* MAX (X, 0) == 0 -> X <= 0 */
5929 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5931 else if (op_code
== MAX_EXPR
&& consts_lt
)
5932 /* MAX (X, 0) == 5 -> X == 5 */
5933 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5935 else if (op_code
== MAX_EXPR
)
5936 /* MAX (X, 0) == -1 -> false */
5937 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5939 else if (consts_equal
)
5940 /* MIN (X, 0) == 0 -> X >= 0 */
5941 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5944 /* MIN (X, 0) == 5 -> false */
5945 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5948 /* MIN (X, 0) == -1 -> X == -1 */
5949 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5952 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5953 /* MAX (X, 0) > 0 -> X > 0
5954 MAX (X, 0) > 5 -> X > 5 */
5955 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5957 else if (op_code
== MAX_EXPR
)
5958 /* MAX (X, 0) > -1 -> true */
5959 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5961 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5962 /* MIN (X, 0) > 0 -> false
5963 MIN (X, 0) > 5 -> false */
5964 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5967 /* MIN (X, 0) > -1 -> X > -1 */
5968 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5975 /* T is an integer expression that is being multiplied, divided, or taken a
5976 modulus (CODE says which and what kind of divide or modulus) by a
5977 constant C. See if we can eliminate that operation by folding it with
5978 other operations already in T. WIDE_TYPE, if non-null, is a type that
5979 should be used for the computation if wider than our type.
5981 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5982 (X * 2) + (Y * 4). We must, however, be assured that either the original
5983 expression would not overflow or that overflow is undefined for the type
5984 in the language in question.
5986 If we return a non-null expression, it is an equivalent form of the
5987 original computation, but need not be in the original type.
5989 We set *STRICT_OVERFLOW_P to true if the return values depends on
5990 signed overflow being undefined. Otherwise we do not change
5991 *STRICT_OVERFLOW_P. */
5994 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5995 bool *strict_overflow_p
)
5997 /* To avoid exponential search depth, refuse to allow recursion past
5998 three levels. Beyond that (1) it's highly unlikely that we'll find
5999 something interesting and (2) we've probably processed it before
6000 when we built the inner expression. */
6009 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6016 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6017 bool *strict_overflow_p
)
6019 tree type
= TREE_TYPE (t
);
6020 enum tree_code tcode
= TREE_CODE (t
);
6021 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6022 > GET_MODE_SIZE (TYPE_MODE (type
)))
6023 ? wide_type
: type
);
6025 int same_p
= tcode
== code
;
6026 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6027 bool sub_strict_overflow_p
;
6029 /* Don't deal with constants of zero here; they confuse the code below. */
6030 if (integer_zerop (c
))
6033 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6034 op0
= TREE_OPERAND (t
, 0);
6036 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6037 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6039 /* Note that we need not handle conditional operations here since fold
6040 already handles those cases. So just do arithmetic here. */
6044 /* For a constant, we can always simplify if we are a multiply
6045 or (for divide and modulus) if it is a multiple of our constant. */
6046 if (code
== MULT_EXPR
6047 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
6049 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6050 fold_convert (ctype
, c
));
6051 /* If the multiplication overflowed to INT_MIN then we lost sign
6052 information on it and a subsequent multiplication might
6053 spuriously overflow. See PR68142. */
6054 if (TREE_OVERFLOW (tem
)
6055 && wi::eq_p (tem
, wi::min_value (TYPE_PRECISION (ctype
), SIGNED
)))
6061 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6062 /* If op0 is an expression ... */
6063 if ((COMPARISON_CLASS_P (op0
)
6064 || UNARY_CLASS_P (op0
)
6065 || BINARY_CLASS_P (op0
)
6066 || VL_EXP_CLASS_P (op0
)
6067 || EXPRESSION_CLASS_P (op0
))
6068 /* ... and has wrapping overflow, and its type is smaller
6069 than ctype, then we cannot pass through as widening. */
6070 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6071 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6072 && (TYPE_PRECISION (ctype
)
6073 > TYPE_PRECISION (TREE_TYPE (op0
))))
6074 /* ... or this is a truncation (t is narrower than op0),
6075 then we cannot pass through this narrowing. */
6076 || (TYPE_PRECISION (type
)
6077 < TYPE_PRECISION (TREE_TYPE (op0
)))
6078 /* ... or signedness changes for division or modulus,
6079 then we cannot pass through this conversion. */
6080 || (code
!= MULT_EXPR
6081 && (TYPE_UNSIGNED (ctype
)
6082 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6083 /* ... or has undefined overflow while the converted to
6084 type has not, we cannot do the operation in the inner type
6085 as that would introduce undefined overflow. */
6086 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6088 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6091 /* Pass the constant down and see if we can make a simplification. If
6092 we can, replace this expression with the inner simplification for
6093 possible later conversion to our or some other type. */
6094 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6095 && TREE_CODE (t2
) == INTEGER_CST
6096 && !TREE_OVERFLOW (t2
)
6097 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6099 ? ctype
: NULL_TREE
,
6100 strict_overflow_p
))))
6105 /* If widening the type changes it from signed to unsigned, then we
6106 must avoid building ABS_EXPR itself as unsigned. */
6107 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6109 tree cstype
= (*signed_type_for
) (ctype
);
6110 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6113 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6114 return fold_convert (ctype
, t1
);
6118 /* If the constant is negative, we cannot simplify this. */
6119 if (tree_int_cst_sgn (c
) == -1)
6123 /* For division and modulus, type can't be unsigned, as e.g.
6124 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6125 For signed types, even with wrapping overflow, this is fine. */
6126 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6128 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6130 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6133 case MIN_EXPR
: case MAX_EXPR
:
6134 /* If widening the type changes the signedness, then we can't perform
6135 this optimization as that changes the result. */
6136 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6139 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6140 sub_strict_overflow_p
= false;
6141 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6142 &sub_strict_overflow_p
)) != 0
6143 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6144 &sub_strict_overflow_p
)) != 0)
6146 if (tree_int_cst_sgn (c
) < 0)
6147 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6148 if (sub_strict_overflow_p
)
6149 *strict_overflow_p
= true;
6150 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6151 fold_convert (ctype
, t2
));
6155 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6156 /* If the second operand is constant, this is a multiplication
6157 or floor division, by a power of two, so we can treat it that
6158 way unless the multiplier or divisor overflows. Signed
6159 left-shift overflow is implementation-defined rather than
6160 undefined in C90, so do not convert signed left shift into
6162 if (TREE_CODE (op1
) == INTEGER_CST
6163 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6164 /* const_binop may not detect overflow correctly,
6165 so check for it explicitly here. */
6166 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6167 && 0 != (t1
= fold_convert (ctype
,
6168 const_binop (LSHIFT_EXPR
,
6171 && !TREE_OVERFLOW (t1
))
6172 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6173 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6175 fold_convert (ctype
, op0
),
6177 c
, code
, wide_type
, strict_overflow_p
);
6180 case PLUS_EXPR
: case MINUS_EXPR
:
6181 /* See if we can eliminate the operation on both sides. If we can, we
6182 can return a new PLUS or MINUS. If we can't, the only remaining
6183 cases where we can do anything are if the second operand is a
6185 sub_strict_overflow_p
= false;
6186 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6187 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6188 if (t1
!= 0 && t2
!= 0
6189 && (code
== MULT_EXPR
6190 /* If not multiplication, we can only do this if both operands
6191 are divisible by c. */
6192 || (multiple_of_p (ctype
, op0
, c
)
6193 && multiple_of_p (ctype
, op1
, c
))))
6195 if (sub_strict_overflow_p
)
6196 *strict_overflow_p
= true;
6197 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6198 fold_convert (ctype
, t2
));
6201 /* If this was a subtraction, negate OP1 and set it to be an addition.
6202 This simplifies the logic below. */
6203 if (tcode
== MINUS_EXPR
)
6205 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6206 /* If OP1 was not easily negatable, the constant may be OP0. */
6207 if (TREE_CODE (op0
) == INTEGER_CST
)
6209 std::swap (op0
, op1
);
6214 if (TREE_CODE (op1
) != INTEGER_CST
)
6217 /* If either OP1 or C are negative, this optimization is not safe for
6218 some of the division and remainder types while for others we need
6219 to change the code. */
6220 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6222 if (code
== CEIL_DIV_EXPR
)
6223 code
= FLOOR_DIV_EXPR
;
6224 else if (code
== FLOOR_DIV_EXPR
)
6225 code
= CEIL_DIV_EXPR
;
6226 else if (code
!= MULT_EXPR
6227 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6231 /* If it's a multiply or a division/modulus operation of a multiple
6232 of our constant, do the operation and verify it doesn't overflow. */
6233 if (code
== MULT_EXPR
6234 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6236 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6237 fold_convert (ctype
, c
));
6238 /* We allow the constant to overflow with wrapping semantics. */
6240 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6246 /* If we have an unsigned type, we cannot widen the operation since it
6247 will change the result if the original computation overflowed. */
6248 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6251 /* If we were able to eliminate our operation from the first side,
6252 apply our operation to the second side and reform the PLUS. */
6253 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6254 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6256 /* The last case is if we are a multiply. In that case, we can
6257 apply the distributive law to commute the multiply and addition
6258 if the multiplication of the constants doesn't overflow
6259 and overflow is defined. With undefined overflow
6260 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6261 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6262 return fold_build2 (tcode
, ctype
,
6263 fold_build2 (code
, ctype
,
6264 fold_convert (ctype
, op0
),
6265 fold_convert (ctype
, c
)),
6271 /* We have a special case here if we are doing something like
6272 (C * 8) % 4 since we know that's zero. */
6273 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6274 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6275 /* If the multiplication can overflow we cannot optimize this. */
6276 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6277 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6278 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6280 *strict_overflow_p
= true;
6281 return omit_one_operand (type
, integer_zero_node
, op0
);
6284 /* ... fall through ... */
6286 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6287 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6288 /* If we can extract our operation from the LHS, do so and return a
6289 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6290 do something only if the second operand is a constant. */
6292 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6293 strict_overflow_p
)) != 0)
6294 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6295 fold_convert (ctype
, op1
));
6296 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6297 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6298 strict_overflow_p
)) != 0)
6299 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6300 fold_convert (ctype
, t1
));
6301 else if (TREE_CODE (op1
) != INTEGER_CST
)
6304 /* If these are the same operation types, we can associate them
6305 assuming no overflow. */
6308 bool overflow_p
= false;
6309 bool overflow_mul_p
;
6310 signop sign
= TYPE_SIGN (ctype
);
6311 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6312 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6314 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6318 mul
= wide_int::from (mul
, TYPE_PRECISION (ctype
),
6319 TYPE_SIGN (TREE_TYPE (op1
)));
6320 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6321 wide_int_to_tree (ctype
, mul
));
6325 /* If these operations "cancel" each other, we have the main
6326 optimizations of this pass, which occur when either constant is a
6327 multiple of the other, in which case we replace this with either an
6328 operation or CODE or TCODE.
6330 If we have an unsigned type, we cannot do this since it will change
6331 the result if the original computation overflowed. */
6332 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6333 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6334 || (tcode
== MULT_EXPR
6335 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6336 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6337 && code
!= MULT_EXPR
)))
6339 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6341 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6342 *strict_overflow_p
= true;
6343 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6344 fold_convert (ctype
,
6345 const_binop (TRUNC_DIV_EXPR
,
6348 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6350 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6351 *strict_overflow_p
= true;
6352 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6353 fold_convert (ctype
,
6354 const_binop (TRUNC_DIV_EXPR
,
6367 /* Return a node which has the indicated constant VALUE (either 0 or
6368 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6369 and is of the indicated TYPE. */
6372 constant_boolean_node (bool value
, tree type
)
6374 if (type
== integer_type_node
)
6375 return value
? integer_one_node
: integer_zero_node
;
6376 else if (type
== boolean_type_node
)
6377 return value
? boolean_true_node
: boolean_false_node
;
6378 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6379 return build_vector_from_val (type
,
6380 build_int_cst (TREE_TYPE (type
),
6383 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6387 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6388 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6389 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6390 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6391 COND is the first argument to CODE; otherwise (as in the example
6392 given here), it is the second argument. TYPE is the type of the
6393 original expression. Return NULL_TREE if no simplification is
6397 fold_binary_op_with_conditional_arg (location_t loc
,
6398 enum tree_code code
,
6399 tree type
, tree op0
, tree op1
,
6400 tree cond
, tree arg
, int cond_first_p
)
6402 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6403 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6404 tree test
, true_value
, false_value
;
6405 tree lhs
= NULL_TREE
;
6406 tree rhs
= NULL_TREE
;
6407 enum tree_code cond_code
= COND_EXPR
;
6409 if (TREE_CODE (cond
) == COND_EXPR
6410 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6412 test
= TREE_OPERAND (cond
, 0);
6413 true_value
= TREE_OPERAND (cond
, 1);
6414 false_value
= TREE_OPERAND (cond
, 2);
6415 /* If this operand throws an expression, then it does not make
6416 sense to try to perform a logical or arithmetic operation
6418 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6420 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6425 tree testtype
= TREE_TYPE (cond
);
6427 true_value
= constant_boolean_node (true, testtype
);
6428 false_value
= constant_boolean_node (false, testtype
);
6431 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6432 cond_code
= VEC_COND_EXPR
;
6434 /* This transformation is only worthwhile if we don't have to wrap ARG
6435 in a SAVE_EXPR and the operation can be simplified without recursing
6436 on at least one of the branches once its pushed inside the COND_EXPR. */
6437 if (!TREE_CONSTANT (arg
)
6438 && (TREE_SIDE_EFFECTS (arg
)
6439 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6440 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6443 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6446 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6448 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6450 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6454 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6456 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6458 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6461 /* Check that we have simplified at least one of the branches. */
6462 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6465 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6469 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6471 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6472 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6473 ADDEND is the same as X.
6475 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6476 and finite. The problematic cases are when X is zero, and its mode
6477 has signed zeros. In the case of rounding towards -infinity,
6478 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6479 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6482 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6484 if (!real_zerop (addend
))
6487 /* Don't allow the fold with -fsignaling-nans. */
6488 if (HONOR_SNANS (element_mode (type
)))
6491 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6492 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6495 /* In a vector or complex, we would need to check the sign of all zeros. */
6496 if (TREE_CODE (addend
) != REAL_CST
)
6499 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6500 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6503 /* The mode has signed zeros, and we have to honor their sign.
6504 In this situation, there is only one case we can return true for.
6505 X - 0 is the same as X unless rounding towards -infinity is
6507 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6510 /* Subroutine of fold() that optimizes comparisons of a division by
6511 a nonzero integer constant against an integer constant, i.e.
6514 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6515 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6516 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6518 The function returns the constant folded tree if a simplification
6519 can be made, and NULL_TREE otherwise. */
6522 fold_div_compare (location_t loc
,
6523 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6525 tree prod
, tmp
, hi
, lo
;
6526 tree arg00
= TREE_OPERAND (arg0
, 0);
6527 tree arg01
= TREE_OPERAND (arg0
, 1);
6528 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6529 bool neg_overflow
= false;
6532 /* We have to do this the hard way to detect unsigned overflow.
6533 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6534 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6535 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6536 neg_overflow
= false;
6538 if (sign
== UNSIGNED
)
6540 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6541 build_int_cst (TREE_TYPE (arg01
), 1));
6544 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6545 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6546 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6547 -1, overflow
| TREE_OVERFLOW (prod
));
6549 else if (tree_int_cst_sgn (arg01
) >= 0)
6551 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6552 build_int_cst (TREE_TYPE (arg01
), 1));
6553 switch (tree_int_cst_sgn (arg1
))
6556 neg_overflow
= true;
6557 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6562 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6567 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6577 /* A negative divisor reverses the relational operators. */
6578 code
= swap_tree_comparison (code
);
6580 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6581 build_int_cst (TREE_TYPE (arg01
), 1));
6582 switch (tree_int_cst_sgn (arg1
))
6585 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6590 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6595 neg_overflow
= true;
6596 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6608 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6609 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6610 if (TREE_OVERFLOW (hi
))
6611 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6612 if (TREE_OVERFLOW (lo
))
6613 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6614 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6617 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6618 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6619 if (TREE_OVERFLOW (hi
))
6620 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6621 if (TREE_OVERFLOW (lo
))
6622 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6623 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6626 if (TREE_OVERFLOW (lo
))
6628 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6629 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6631 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6634 if (TREE_OVERFLOW (hi
))
6636 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6637 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6639 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6642 if (TREE_OVERFLOW (hi
))
6644 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6645 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6647 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6650 if (TREE_OVERFLOW (lo
))
6652 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6653 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6655 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6665 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6666 equality/inequality test, then return a simplified form of the test
6667 using a sign testing. Otherwise return NULL. TYPE is the desired
6671 fold_single_bit_test_into_sign_test (location_t loc
,
6672 enum tree_code code
, tree arg0
, tree arg1
,
6675 /* If this is testing a single bit, we can optimize the test. */
6676 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6677 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6678 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6680 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6681 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6682 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6684 if (arg00
!= NULL_TREE
6685 /* This is only a win if casting to a signed type is cheap,
6686 i.e. when arg00's type is not a partial mode. */
6687 && TYPE_PRECISION (TREE_TYPE (arg00
))
6688 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6690 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6691 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6693 fold_convert_loc (loc
, stype
, arg00
),
6694 build_int_cst (stype
, 0));
6701 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6702 equality/inequality test, then return a simplified form of
6703 the test using shifts and logical operations. Otherwise return
6704 NULL. TYPE is the desired result type. */
6707 fold_single_bit_test (location_t loc
, enum tree_code code
,
6708 tree arg0
, tree arg1
, tree result_type
)
6710 /* If this is testing a single bit, we can optimize the test. */
6711 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6712 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6713 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6715 tree inner
= TREE_OPERAND (arg0
, 0);
6716 tree type
= TREE_TYPE (arg0
);
6717 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6718 machine_mode operand_mode
= TYPE_MODE (type
);
6720 tree signed_type
, unsigned_type
, intermediate_type
;
6723 /* First, see if we can fold the single bit test into a sign-bit
6725 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6730 /* Otherwise we have (A & C) != 0 where C is a single bit,
6731 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6732 Similarly for (A & C) == 0. */
6734 /* If INNER is a right shift of a constant and it plus BITNUM does
6735 not overflow, adjust BITNUM and INNER. */
6736 if (TREE_CODE (inner
) == RSHIFT_EXPR
6737 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6738 && bitnum
< TYPE_PRECISION (type
)
6739 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6740 TYPE_PRECISION (type
) - bitnum
))
6742 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6743 inner
= TREE_OPERAND (inner
, 0);
6746 /* If we are going to be able to omit the AND below, we must do our
6747 operations as unsigned. If we must use the AND, we have a choice.
6748 Normally unsigned is faster, but for some machines signed is. */
6749 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6750 && !flag_syntax_only
) ? 0 : 1;
6752 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6753 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6754 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6755 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6758 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6759 inner
, size_int (bitnum
));
6761 one
= build_int_cst (intermediate_type
, 1);
6763 if (code
== EQ_EXPR
)
6764 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6766 /* Put the AND last so it can combine with more things. */
6767 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6769 /* Make sure to return the proper type. */
6770 inner
= fold_convert_loc (loc
, result_type
, inner
);
6777 /* Check whether we are allowed to reorder operands arg0 and arg1,
6778 such that the evaluation of arg1 occurs before arg0. */
6781 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6783 if (! flag_evaluation_order
)
6785 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6787 return ! TREE_SIDE_EFFECTS (arg0
)
6788 && ! TREE_SIDE_EFFECTS (arg1
);
6791 /* Test whether it is preferable two swap two operands, ARG0 and
6792 ARG1, for example because ARG0 is an integer constant and ARG1
6793 isn't. If REORDER is true, only recommend swapping if we can
6794 evaluate the operands in reverse order. */
6797 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6799 if (CONSTANT_CLASS_P (arg1
))
6801 if (CONSTANT_CLASS_P (arg0
))
6807 if (TREE_CONSTANT (arg1
))
6809 if (TREE_CONSTANT (arg0
))
6812 if (reorder
&& flag_evaluation_order
6813 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6816 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6817 for commutative and comparison operators. Ensuring a canonical
6818 form allows the optimizers to find additional redundancies without
6819 having to explicitly check for both orderings. */
6820 if (TREE_CODE (arg0
) == SSA_NAME
6821 && TREE_CODE (arg1
) == SSA_NAME
6822 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6825 /* Put SSA_NAMEs last. */
6826 if (TREE_CODE (arg1
) == SSA_NAME
)
6828 if (TREE_CODE (arg0
) == SSA_NAME
)
6831 /* Put variables last. */
6841 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6842 means A >= Y && A != MAX, but in this case we know that
6843 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6846 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6848 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6850 if (TREE_CODE (bound
) == LT_EXPR
)
6851 a
= TREE_OPERAND (bound
, 0);
6852 else if (TREE_CODE (bound
) == GT_EXPR
)
6853 a
= TREE_OPERAND (bound
, 1);
6857 typea
= TREE_TYPE (a
);
6858 if (!INTEGRAL_TYPE_P (typea
)
6859 && !POINTER_TYPE_P (typea
))
6862 if (TREE_CODE (ineq
) == LT_EXPR
)
6864 a1
= TREE_OPERAND (ineq
, 1);
6865 y
= TREE_OPERAND (ineq
, 0);
6867 else if (TREE_CODE (ineq
) == GT_EXPR
)
6869 a1
= TREE_OPERAND (ineq
, 0);
6870 y
= TREE_OPERAND (ineq
, 1);
6875 if (TREE_TYPE (a1
) != typea
)
6878 if (POINTER_TYPE_P (typea
))
6880 /* Convert the pointer types into integer before taking the difference. */
6881 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6882 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6883 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6886 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6888 if (!diff
|| !integer_onep (diff
))
6891 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6894 /* Fold a sum or difference of at least one multiplication.
6895 Returns the folded tree or NULL if no simplification could be made. */
6898 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6899 tree arg0
, tree arg1
)
6901 tree arg00
, arg01
, arg10
, arg11
;
6902 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6904 /* (A * C) +- (B * C) -> (A+-B) * C.
6905 (A * C) +- A -> A * (C+-1).
6906 We are most concerned about the case where C is a constant,
6907 but other combinations show up during loop reduction. Since
6908 it is not difficult, try all four possibilities. */
6910 if (TREE_CODE (arg0
) == MULT_EXPR
)
6912 arg00
= TREE_OPERAND (arg0
, 0);
6913 arg01
= TREE_OPERAND (arg0
, 1);
6915 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6917 arg00
= build_one_cst (type
);
6922 /* We cannot generate constant 1 for fract. */
6923 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6926 arg01
= build_one_cst (type
);
6928 if (TREE_CODE (arg1
) == MULT_EXPR
)
6930 arg10
= TREE_OPERAND (arg1
, 0);
6931 arg11
= TREE_OPERAND (arg1
, 1);
6933 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6935 arg10
= build_one_cst (type
);
6936 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6937 the purpose of this canonicalization. */
6938 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6939 && negate_expr_p (arg1
)
6940 && code
== PLUS_EXPR
)
6942 arg11
= negate_expr (arg1
);
6950 /* We cannot generate constant 1 for fract. */
6951 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6954 arg11
= build_one_cst (type
);
6958 if (operand_equal_p (arg01
, arg11
, 0))
6959 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6960 else if (operand_equal_p (arg00
, arg10
, 0))
6961 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6962 else if (operand_equal_p (arg00
, arg11
, 0))
6963 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6964 else if (operand_equal_p (arg01
, arg10
, 0))
6965 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6967 /* No identical multiplicands; see if we can find a common
6968 power-of-two factor in non-power-of-two multiplies. This
6969 can help in multi-dimensional array access. */
6970 else if (tree_fits_shwi_p (arg01
)
6971 && tree_fits_shwi_p (arg11
))
6973 HOST_WIDE_INT int01
, int11
, tmp
;
6976 int01
= tree_to_shwi (arg01
);
6977 int11
= tree_to_shwi (arg11
);
6979 /* Move min of absolute values to int11. */
6980 if (absu_hwi (int01
) < absu_hwi (int11
))
6982 tmp
= int01
, int01
= int11
, int11
= tmp
;
6983 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6990 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6991 /* The remainder should not be a constant, otherwise we
6992 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6993 increased the number of multiplications necessary. */
6994 && TREE_CODE (arg10
) != INTEGER_CST
)
6996 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6997 build_int_cst (TREE_TYPE (arg00
),
7002 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7007 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7008 fold_build2_loc (loc
, code
, type
,
7009 fold_convert_loc (loc
, type
, alt0
),
7010 fold_convert_loc (loc
, type
, alt1
)),
7011 fold_convert_loc (loc
, type
, same
));
7016 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7017 specified by EXPR into the buffer PTR of length LEN bytes.
7018 Return the number of bytes placed in the buffer, or zero
7022 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7024 tree type
= TREE_TYPE (expr
);
7025 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7026 int byte
, offset
, word
, words
;
7027 unsigned char value
;
7029 if ((off
== -1 && total_bytes
> len
)
7030 || off
>= total_bytes
)
7034 words
= total_bytes
/ UNITS_PER_WORD
;
7036 for (byte
= 0; byte
< total_bytes
; byte
++)
7038 int bitpos
= byte
* BITS_PER_UNIT
;
7039 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7041 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7043 if (total_bytes
> UNITS_PER_WORD
)
7045 word
= byte
/ UNITS_PER_WORD
;
7046 if (WORDS_BIG_ENDIAN
)
7047 word
= (words
- 1) - word
;
7048 offset
= word
* UNITS_PER_WORD
;
7049 if (BYTES_BIG_ENDIAN
)
7050 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7052 offset
+= byte
% UNITS_PER_WORD
;
7055 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7057 && offset
- off
< len
)
7058 ptr
[offset
- off
] = value
;
7060 return MIN (len
, total_bytes
- off
);
7064 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7065 specified by EXPR into the buffer PTR of length LEN bytes.
7066 Return the number of bytes placed in the buffer, or zero
7070 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7072 tree type
= TREE_TYPE (expr
);
7073 machine_mode mode
= TYPE_MODE (type
);
7074 int total_bytes
= GET_MODE_SIZE (mode
);
7075 FIXED_VALUE_TYPE value
;
7076 tree i_value
, i_type
;
7078 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7081 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7083 if (NULL_TREE
== i_type
7084 || TYPE_PRECISION (i_type
) != total_bytes
)
7087 value
= TREE_FIXED_CST (expr
);
7088 i_value
= double_int_to_tree (i_type
, value
.data
);
7090 return native_encode_int (i_value
, ptr
, len
, off
);
7094 /* Subroutine of native_encode_expr. Encode the REAL_CST
7095 specified by EXPR into the buffer PTR of length LEN bytes.
7096 Return the number of bytes placed in the buffer, or zero
7100 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7102 tree type
= TREE_TYPE (expr
);
7103 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7104 int byte
, offset
, word
, words
, bitpos
;
7105 unsigned char value
;
7107 /* There are always 32 bits in each long, no matter the size of
7108 the hosts long. We handle floating point representations with
7112 if ((off
== -1 && total_bytes
> len
)
7113 || off
>= total_bytes
)
7117 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7119 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7121 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7122 bitpos
+= BITS_PER_UNIT
)
7124 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7125 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7127 if (UNITS_PER_WORD
< 4)
7129 word
= byte
/ UNITS_PER_WORD
;
7130 if (WORDS_BIG_ENDIAN
)
7131 word
= (words
- 1) - word
;
7132 offset
= word
* UNITS_PER_WORD
;
7133 if (BYTES_BIG_ENDIAN
)
7134 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7136 offset
+= byte
% UNITS_PER_WORD
;
7139 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7140 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7142 && offset
- off
< len
)
7143 ptr
[offset
- off
] = value
;
7145 return MIN (len
, total_bytes
- off
);
7148 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7154 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7159 part
= TREE_REALPART (expr
);
7160 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7164 part
= TREE_IMAGPART (expr
);
7166 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7167 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7171 return rsize
+ isize
;
7175 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7181 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7188 count
= VECTOR_CST_NELTS (expr
);
7189 itype
= TREE_TYPE (TREE_TYPE (expr
));
7190 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7191 for (i
= 0; i
< count
; i
++)
7198 elem
= VECTOR_CST_ELT (expr
, i
);
7199 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7200 if ((off
== -1 && res
!= size
)
7213 /* Subroutine of native_encode_expr. Encode the STRING_CST
7214 specified by EXPR into the buffer PTR of length LEN bytes.
7215 Return the number of bytes placed in the buffer, or zero
7219 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7221 tree type
= TREE_TYPE (expr
);
7222 HOST_WIDE_INT total_bytes
;
7224 if (TREE_CODE (type
) != ARRAY_TYPE
7225 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7226 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7227 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7229 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7230 if ((off
== -1 && total_bytes
> len
)
7231 || off
>= total_bytes
)
7235 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7238 if (off
< TREE_STRING_LENGTH (expr
))
7240 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7241 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7243 memset (ptr
+ written
, 0,
7244 MIN (total_bytes
- written
, len
- written
));
7247 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7248 return MIN (total_bytes
- off
, len
);
7252 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7253 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7254 buffer PTR of length LEN bytes. If OFF is not -1 then start
7255 the encoding at byte offset OFF and encode at most LEN bytes.
7256 Return the number of bytes placed in the buffer, or zero upon failure. */
7259 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7261 /* We don't support starting at negative offset and -1 is special. */
7265 switch (TREE_CODE (expr
))
7268 return native_encode_int (expr
, ptr
, len
, off
);
7271 return native_encode_real (expr
, ptr
, len
, off
);
7274 return native_encode_fixed (expr
, ptr
, len
, off
);
7277 return native_encode_complex (expr
, ptr
, len
, off
);
7280 return native_encode_vector (expr
, ptr
, len
, off
);
7283 return native_encode_string (expr
, ptr
, len
, off
);
7291 /* Subroutine of native_interpret_expr. Interpret the contents of
7292 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7293 If the buffer cannot be interpreted, return NULL_TREE. */
7296 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7298 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7300 if (total_bytes
> len
7301 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7304 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7306 return wide_int_to_tree (type
, result
);
7310 /* Subroutine of native_interpret_expr. Interpret the contents of
7311 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7312 If the buffer cannot be interpreted, return NULL_TREE. */
7315 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7317 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7319 FIXED_VALUE_TYPE fixed_value
;
7321 if (total_bytes
> len
7322 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7325 result
= double_int::from_buffer (ptr
, total_bytes
);
7326 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7328 return build_fixed (type
, fixed_value
);
7332 /* Subroutine of native_interpret_expr. Interpret the contents of
7333 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7334 If the buffer cannot be interpreted, return NULL_TREE. */
7337 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7339 machine_mode mode
= TYPE_MODE (type
);
7340 int total_bytes
= GET_MODE_SIZE (mode
);
7341 unsigned char value
;
7342 /* There are always 32 bits in each long, no matter the size of
7343 the hosts long. We handle floating point representations with
7348 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7349 if (total_bytes
> len
|| total_bytes
> 24)
7351 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7353 memset (tmp
, 0, sizeof (tmp
));
7354 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7355 bitpos
+= BITS_PER_UNIT
)
7357 /* Both OFFSET and BYTE index within a long;
7358 bitpos indexes the whole float. */
7359 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7360 if (UNITS_PER_WORD
< 4)
7362 int word
= byte
/ UNITS_PER_WORD
;
7363 if (WORDS_BIG_ENDIAN
)
7364 word
= (words
- 1) - word
;
7365 offset
= word
* UNITS_PER_WORD
;
7366 if (BYTES_BIG_ENDIAN
)
7367 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7369 offset
+= byte
% UNITS_PER_WORD
;
7374 if (BYTES_BIG_ENDIAN
)
7376 /* Reverse bytes within each long, or within the entire float
7377 if it's smaller than a long (for HFmode). */
7378 offset
= MIN (3, total_bytes
- 1) - offset
;
7379 gcc_assert (offset
>= 0);
7382 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7384 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7387 real_from_target (&r
, tmp
, mode
);
7388 return build_real (type
, r
);
7392 /* Subroutine of native_interpret_expr. Interpret the contents of
7393 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7394 If the buffer cannot be interpreted, return NULL_TREE. */
7397 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7399 tree etype
, rpart
, ipart
;
7402 etype
= TREE_TYPE (type
);
7403 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7406 rpart
= native_interpret_expr (etype
, ptr
, size
);
7409 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7412 return build_complex (type
, rpart
, ipart
);
7416 /* Subroutine of native_interpret_expr. Interpret the contents of
7417 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7418 If the buffer cannot be interpreted, return NULL_TREE. */
7421 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7427 etype
= TREE_TYPE (type
);
7428 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7429 count
= TYPE_VECTOR_SUBPARTS (type
);
7430 if (size
* count
> len
)
7433 elements
= XALLOCAVEC (tree
, count
);
7434 for (i
= count
- 1; i
>= 0; i
--)
7436 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7441 return build_vector (type
, elements
);
7445 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7446 the buffer PTR of length LEN as a constant of type TYPE. For
7447 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7448 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7449 return NULL_TREE. */
7452 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7454 switch (TREE_CODE (type
))
7460 case REFERENCE_TYPE
:
7461 return native_interpret_int (type
, ptr
, len
);
7464 return native_interpret_real (type
, ptr
, len
);
7466 case FIXED_POINT_TYPE
:
7467 return native_interpret_fixed (type
, ptr
, len
);
7470 return native_interpret_complex (type
, ptr
, len
);
7473 return native_interpret_vector (type
, ptr
, len
);
7480 /* Returns true if we can interpret the contents of a native encoding
7484 can_native_interpret_type_p (tree type
)
7486 switch (TREE_CODE (type
))
7492 case REFERENCE_TYPE
:
7493 case FIXED_POINT_TYPE
:
7503 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7504 TYPE at compile-time. If we're unable to perform the conversion
7505 return NULL_TREE. */
7508 fold_view_convert_expr (tree type
, tree expr
)
7510 /* We support up to 512-bit values (for V8DFmode). */
7511 unsigned char buffer
[64];
7514 /* Check that the host and target are sane. */
7515 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7518 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7522 return native_interpret_expr (type
, buffer
, len
);
7525 /* Build an expression for the address of T. Folds away INDIRECT_REF
7526 to avoid confusing the gimplify process. */
7529 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7531 /* The size of the object is not relevant when talking about its address. */
7532 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7533 t
= TREE_OPERAND (t
, 0);
7535 if (TREE_CODE (t
) == INDIRECT_REF
)
7537 t
= TREE_OPERAND (t
, 0);
7539 if (TREE_TYPE (t
) != ptrtype
)
7540 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7542 else if (TREE_CODE (t
) == MEM_REF
7543 && integer_zerop (TREE_OPERAND (t
, 1)))
7544 return TREE_OPERAND (t
, 0);
7545 else if (TREE_CODE (t
) == MEM_REF
7546 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7547 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7548 TREE_OPERAND (t
, 0),
7549 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7550 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7552 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7554 if (TREE_TYPE (t
) != ptrtype
)
7555 t
= fold_convert_loc (loc
, ptrtype
, t
);
7558 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7563 /* Build an expression for the address of T. */
7566 build_fold_addr_expr_loc (location_t loc
, tree t
)
7568 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7570 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7573 /* Fold a unary expression of code CODE and type TYPE with operand
7574 OP0. Return the folded expression if folding is successful.
7575 Otherwise, return NULL_TREE. */
7578 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7582 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7584 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7585 && TREE_CODE_LENGTH (code
) == 1);
7590 if (CONVERT_EXPR_CODE_P (code
)
7591 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7593 /* Don't use STRIP_NOPS, because signedness of argument type
7595 STRIP_SIGN_NOPS (arg0
);
7599 /* Strip any conversions that don't change the mode. This
7600 is safe for every expression, except for a comparison
7601 expression because its signedness is derived from its
7604 Note that this is done as an internal manipulation within
7605 the constant folder, in order to find the simplest
7606 representation of the arguments so that their form can be
7607 studied. In any cases, the appropriate type conversions
7608 should be put back in the tree that will get out of the
7613 if (CONSTANT_CLASS_P (arg0
))
7615 tree tem
= const_unop (code
, type
, arg0
);
7618 if (TREE_TYPE (tem
) != type
)
7619 tem
= fold_convert_loc (loc
, type
, tem
);
7625 tem
= generic_simplify (loc
, code
, type
, op0
);
7629 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7631 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7632 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7633 fold_build1_loc (loc
, code
, type
,
7634 fold_convert_loc (loc
, TREE_TYPE (op0
),
7635 TREE_OPERAND (arg0
, 1))));
7636 else if (TREE_CODE (arg0
) == COND_EXPR
)
7638 tree arg01
= TREE_OPERAND (arg0
, 1);
7639 tree arg02
= TREE_OPERAND (arg0
, 2);
7640 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7641 arg01
= fold_build1_loc (loc
, code
, type
,
7642 fold_convert_loc (loc
,
7643 TREE_TYPE (op0
), arg01
));
7644 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7645 arg02
= fold_build1_loc (loc
, code
, type
,
7646 fold_convert_loc (loc
,
7647 TREE_TYPE (op0
), arg02
));
7648 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7651 /* If this was a conversion, and all we did was to move into
7652 inside the COND_EXPR, bring it back out. But leave it if
7653 it is a conversion from integer to integer and the
7654 result precision is no wider than a word since such a
7655 conversion is cheap and may be optimized away by combine,
7656 while it couldn't if it were outside the COND_EXPR. Then return
7657 so we don't get into an infinite recursion loop taking the
7658 conversion out and then back in. */
7660 if ((CONVERT_EXPR_CODE_P (code
)
7661 || code
== NON_LVALUE_EXPR
)
7662 && TREE_CODE (tem
) == COND_EXPR
7663 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7664 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7665 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7666 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7667 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7668 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7669 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7671 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7672 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7673 || flag_syntax_only
))
7674 tem
= build1_loc (loc
, code
, type
,
7676 TREE_TYPE (TREE_OPERAND
7677 (TREE_OPERAND (tem
, 1), 0)),
7678 TREE_OPERAND (tem
, 0),
7679 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7680 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7688 case NON_LVALUE_EXPR
:
7689 if (!maybe_lvalue_p (op0
))
7690 return fold_convert_loc (loc
, type
, op0
);
7695 case FIX_TRUNC_EXPR
:
7696 if (COMPARISON_CLASS_P (op0
))
7698 /* If we have (type) (a CMP b) and type is an integral type, return
7699 new expression involving the new type. Canonicalize
7700 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7702 Do not fold the result as that would not simplify further, also
7703 folding again results in recursions. */
7704 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7705 return build2_loc (loc
, TREE_CODE (op0
), type
,
7706 TREE_OPERAND (op0
, 0),
7707 TREE_OPERAND (op0
, 1));
7708 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7709 && TREE_CODE (type
) != VECTOR_TYPE
)
7710 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7711 constant_boolean_node (true, type
),
7712 constant_boolean_node (false, type
));
7715 /* Handle (T *)&A.B.C for A being of type T and B and C
7716 living at offset zero. This occurs frequently in
7717 C++ upcasting and then accessing the base. */
7718 if (TREE_CODE (op0
) == ADDR_EXPR
7719 && POINTER_TYPE_P (type
)
7720 && handled_component_p (TREE_OPERAND (op0
, 0)))
7722 HOST_WIDE_INT bitsize
, bitpos
;
7725 int unsignedp
, reversep
, volatilep
;
7727 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
7728 &offset
, &mode
, &unsignedp
, &reversep
,
7730 /* If the reference was to a (constant) zero offset, we can use
7731 the address of the base if it has the same base type
7732 as the result type and the pointer type is unqualified. */
7733 if (! offset
&& bitpos
== 0
7734 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7735 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7736 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7737 return fold_convert_loc (loc
, type
,
7738 build_fold_addr_expr_loc (loc
, base
));
7741 if (TREE_CODE (op0
) == MODIFY_EXPR
7742 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7743 /* Detect assigning a bitfield. */
7744 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7746 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7748 /* Don't leave an assignment inside a conversion
7749 unless assigning a bitfield. */
7750 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7751 /* First do the assignment, then return converted constant. */
7752 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7753 TREE_NO_WARNING (tem
) = 1;
7754 TREE_USED (tem
) = 1;
7758 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7759 constants (if x has signed type, the sign bit cannot be set
7760 in c). This folds extension into the BIT_AND_EXPR.
7761 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7762 very likely don't have maximal range for their precision and this
7763 transformation effectively doesn't preserve non-maximal ranges. */
7764 if (TREE_CODE (type
) == INTEGER_TYPE
7765 && TREE_CODE (op0
) == BIT_AND_EXPR
7766 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7768 tree and_expr
= op0
;
7769 tree and0
= TREE_OPERAND (and_expr
, 0);
7770 tree and1
= TREE_OPERAND (and_expr
, 1);
7773 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7774 || (TYPE_PRECISION (type
)
7775 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7777 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7778 <= HOST_BITS_PER_WIDE_INT
7779 && tree_fits_uhwi_p (and1
))
7781 unsigned HOST_WIDE_INT cst
;
7783 cst
= tree_to_uhwi (and1
);
7784 cst
&= HOST_WIDE_INT_M1U
7785 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7786 change
= (cst
== 0);
7788 && !flag_syntax_only
7789 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7792 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7793 and0
= fold_convert_loc (loc
, uns
, and0
);
7794 and1
= fold_convert_loc (loc
, uns
, and1
);
7799 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7800 TREE_OVERFLOW (and1
));
7801 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7802 fold_convert_loc (loc
, type
, and0
), tem
);
7806 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7807 cast (T1)X will fold away. We assume that this happens when X itself
7809 if (POINTER_TYPE_P (type
)
7810 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7811 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
7813 tree arg00
= TREE_OPERAND (arg0
, 0);
7814 tree arg01
= TREE_OPERAND (arg0
, 1);
7816 return fold_build_pointer_plus_loc
7817 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7820 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7821 of the same precision, and X is an integer type not narrower than
7822 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7823 if (INTEGRAL_TYPE_P (type
)
7824 && TREE_CODE (op0
) == BIT_NOT_EXPR
7825 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7826 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7827 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7829 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7830 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7831 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7832 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7833 fold_convert_loc (loc
, type
, tem
));
7836 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7837 type of X and Y (integer types only). */
7838 if (INTEGRAL_TYPE_P (type
)
7839 && TREE_CODE (op0
) == MULT_EXPR
7840 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7841 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7843 /* Be careful not to introduce new overflows. */
7845 if (TYPE_OVERFLOW_WRAPS (type
))
7848 mult_type
= unsigned_type_for (type
);
7850 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7852 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7853 fold_convert_loc (loc
, mult_type
,
7854 TREE_OPERAND (op0
, 0)),
7855 fold_convert_loc (loc
, mult_type
,
7856 TREE_OPERAND (op0
, 1)));
7857 return fold_convert_loc (loc
, type
, tem
);
7863 case VIEW_CONVERT_EXPR
:
7864 if (TREE_CODE (op0
) == MEM_REF
)
7866 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
7867 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7868 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
7875 tem
= fold_negate_expr (loc
, arg0
);
7877 return fold_convert_loc (loc
, type
, tem
);
7881 /* Convert fabs((double)float) into (double)fabsf(float). */
7882 if (TREE_CODE (arg0
) == NOP_EXPR
7883 && TREE_CODE (type
) == REAL_TYPE
)
7885 tree targ0
= strip_float_extensions (arg0
);
7887 return fold_convert_loc (loc
, type
,
7888 fold_build1_loc (loc
, ABS_EXPR
,
7895 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7896 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7897 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7898 fold_convert_loc (loc
, type
,
7899 TREE_OPERAND (arg0
, 0)))))
7900 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7901 fold_convert_loc (loc
, type
,
7902 TREE_OPERAND (arg0
, 1)));
7903 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7904 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7905 fold_convert_loc (loc
, type
,
7906 TREE_OPERAND (arg0
, 1)))))
7907 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7908 fold_convert_loc (loc
, type
,
7909 TREE_OPERAND (arg0
, 0)), tem
);
7913 case TRUTH_NOT_EXPR
:
7914 /* Note that the operand of this must be an int
7915 and its values must be 0 or 1.
7916 ("true" is a fixed value perhaps depending on the language,
7917 but we don't handle values other than 1 correctly yet.) */
7918 tem
= fold_truth_not_expr (loc
, arg0
);
7921 return fold_convert_loc (loc
, type
, tem
);
7924 /* Fold *&X to X if X is an lvalue. */
7925 if (TREE_CODE (op0
) == ADDR_EXPR
)
7927 tree op00
= TREE_OPERAND (op0
, 0);
7928 if ((TREE_CODE (op00
) == VAR_DECL
7929 || TREE_CODE (op00
) == PARM_DECL
7930 || TREE_CODE (op00
) == RESULT_DECL
)
7931 && !TREE_READONLY (op00
))
7938 } /* switch (code) */
7942 /* If the operation was a conversion do _not_ mark a resulting constant
7943 with TREE_OVERFLOW if the original constant was not. These conversions
7944 have implementation defined behavior and retaining the TREE_OVERFLOW
7945 flag here would confuse later passes such as VRP. */
7947 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
7948 tree type
, tree op0
)
7950 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
7952 && TREE_CODE (res
) == INTEGER_CST
7953 && TREE_CODE (op0
) == INTEGER_CST
7954 && CONVERT_EXPR_CODE_P (code
))
7955 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
7960 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7961 operands OP0 and OP1. LOC is the location of the resulting expression.
7962 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7963 Return the folded expression if folding is successful. Otherwise,
7964 return NULL_TREE. */
7966 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
7967 tree arg0
, tree arg1
, tree op0
, tree op1
)
7971 /* We only do these simplifications if we are optimizing. */
7975 /* Check for things like (A || B) && (A || C). We can convert this
7976 to A || (B && C). Note that either operator can be any of the four
7977 truth and/or operations and the transformation will still be
7978 valid. Also note that we only care about order for the
7979 ANDIF and ORIF operators. If B contains side effects, this
7980 might change the truth-value of A. */
7981 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7982 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7983 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7984 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7985 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7986 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7988 tree a00
= TREE_OPERAND (arg0
, 0);
7989 tree a01
= TREE_OPERAND (arg0
, 1);
7990 tree a10
= TREE_OPERAND (arg1
, 0);
7991 tree a11
= TREE_OPERAND (arg1
, 1);
7992 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7993 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7994 && (code
== TRUTH_AND_EXPR
7995 || code
== TRUTH_OR_EXPR
));
7997 if (operand_equal_p (a00
, a10
, 0))
7998 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
7999 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8000 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8001 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8002 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8003 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8004 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8005 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8007 /* This case if tricky because we must either have commutative
8008 operators or else A10 must not have side-effects. */
8010 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8011 && operand_equal_p (a01
, a11
, 0))
8012 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8013 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8017 /* See if we can build a range comparison. */
8018 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8021 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8022 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8024 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8026 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8029 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8030 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8032 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8034 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8037 /* Check for the possibility of merging component references. If our
8038 lhs is another similar operation, try to merge its rhs with our
8039 rhs. Then try to merge our lhs and rhs. */
8040 if (TREE_CODE (arg0
) == code
8041 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8042 TREE_OPERAND (arg0
, 1), arg1
)))
8043 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8045 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8048 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8049 && (code
== TRUTH_AND_EXPR
8050 || code
== TRUTH_ANDIF_EXPR
8051 || code
== TRUTH_OR_EXPR
8052 || code
== TRUTH_ORIF_EXPR
))
8054 enum tree_code ncode
, icode
;
8056 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8057 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8058 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8060 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8061 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8062 We don't want to pack more than two leafs to a non-IF AND/OR
8064 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8065 equal to IF-CODE, then we don't want to add right-hand operand.
8066 If the inner right-hand side of left-hand operand has
8067 side-effects, or isn't simple, then we can't add to it,
8068 as otherwise we might destroy if-sequence. */
8069 if (TREE_CODE (arg0
) == icode
8070 && simple_operand_p_2 (arg1
)
8071 /* Needed for sequence points to handle trappings, and
8073 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8075 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8077 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8080 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8081 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8082 else if (TREE_CODE (arg1
) == icode
8083 && simple_operand_p_2 (arg0
)
8084 /* Needed for sequence points to handle trappings, and
8086 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8088 tem
= fold_build2_loc (loc
, ncode
, type
,
8089 arg0
, TREE_OPERAND (arg1
, 0));
8090 return fold_build2_loc (loc
, icode
, type
, tem
,
8091 TREE_OPERAND (arg1
, 1));
8093 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8095 For sequence point consistancy, we need to check for trapping,
8096 and side-effects. */
8097 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8098 && simple_operand_p_2 (arg1
))
8099 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8105 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8106 by changing CODE to reduce the magnitude of constants involved in
8107 ARG0 of the comparison.
8108 Returns a canonicalized comparison tree if a simplification was
8109 possible, otherwise returns NULL_TREE.
8110 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8111 valid if signed overflow is undefined. */
8114 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8115 tree arg0
, tree arg1
,
8116 bool *strict_overflow_p
)
8118 enum tree_code code0
= TREE_CODE (arg0
);
8119 tree t
, cst0
= NULL_TREE
;
8122 /* Match A +- CST code arg1. We can change this only if overflow
8124 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8125 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8126 /* In principle pointers also have undefined overflow behavior,
8127 but that causes problems elsewhere. */
8128 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8129 && (code0
== MINUS_EXPR
8130 || code0
== PLUS_EXPR
)
8131 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8134 /* Identify the constant in arg0 and its sign. */
8135 cst0
= TREE_OPERAND (arg0
, 1);
8136 sgn0
= tree_int_cst_sgn (cst0
);
8138 /* Overflowed constants and zero will cause problems. */
8139 if (integer_zerop (cst0
)
8140 || TREE_OVERFLOW (cst0
))
8143 /* See if we can reduce the magnitude of the constant in
8144 arg0 by changing the comparison code. */
8145 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8147 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8149 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8150 else if (code
== GT_EXPR
8151 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8153 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8154 else if (code
== LE_EXPR
8155 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8157 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8158 else if (code
== GE_EXPR
8159 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8163 *strict_overflow_p
= true;
8165 /* Now build the constant reduced in magnitude. But not if that
8166 would produce one outside of its types range. */
8167 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8169 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8170 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8172 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8173 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8176 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8177 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8178 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8179 t
= fold_convert (TREE_TYPE (arg1
), t
);
8181 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8184 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8185 overflow further. Try to decrease the magnitude of constants involved
8186 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8187 and put sole constants at the second argument position.
8188 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8191 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8192 tree arg0
, tree arg1
)
8195 bool strict_overflow_p
;
8196 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8197 "when reducing constant in comparison");
8199 /* Try canonicalization by simplifying arg0. */
8200 strict_overflow_p
= false;
8201 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8202 &strict_overflow_p
);
8205 if (strict_overflow_p
)
8206 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8210 /* Try canonicalization by simplifying arg1 using the swapped
8212 code
= swap_tree_comparison (code
);
8213 strict_overflow_p
= false;
8214 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8215 &strict_overflow_p
);
8216 if (t
&& strict_overflow_p
)
8217 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8221 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8222 space. This is used to avoid issuing overflow warnings for
8223 expressions like &p->x which can not wrap. */
8226 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8228 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8235 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8236 if (offset
== NULL_TREE
)
8237 wi_offset
= wi::zero (precision
);
8238 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8244 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8245 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8249 if (!wi::fits_uhwi_p (total
))
8252 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8256 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8258 if (TREE_CODE (base
) == ADDR_EXPR
)
8260 HOST_WIDE_INT base_size
;
8262 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8263 if (base_size
> 0 && size
< base_size
)
8267 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8270 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8271 kind INTEGER_CST. This makes sure to properly sign-extend the
8274 static HOST_WIDE_INT
8275 size_low_cst (const_tree t
)
8277 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8278 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8279 if (prec
< HOST_BITS_PER_WIDE_INT
)
8280 return sext_hwi (w
, prec
);
8284 /* Subroutine of fold_binary. This routine performs all of the
8285 transformations that are common to the equality/inequality
8286 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8287 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8288 fold_binary should call fold_binary. Fold a comparison with
8289 tree code CODE and type TYPE with operands OP0 and OP1. Return
8290 the folded comparison or NULL_TREE. */
8293 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8296 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8297 tree arg0
, arg1
, tem
;
8302 STRIP_SIGN_NOPS (arg0
);
8303 STRIP_SIGN_NOPS (arg1
);
8305 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8306 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8308 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8309 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8310 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8311 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8312 && TREE_CODE (arg1
) == INTEGER_CST
8313 && !TREE_OVERFLOW (arg1
))
8315 const enum tree_code
8316 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8317 tree const1
= TREE_OPERAND (arg0
, 1);
8318 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8319 tree variable
= TREE_OPERAND (arg0
, 0);
8320 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8322 /* If the constant operation overflowed this can be
8323 simplified as a comparison against INT_MAX/INT_MIN. */
8324 if (TREE_OVERFLOW (new_const
)
8325 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8327 int const1_sgn
= tree_int_cst_sgn (const1
);
8328 enum tree_code code2
= code
;
8330 /* Get the sign of the constant on the lhs if the
8331 operation were VARIABLE + CONST1. */
8332 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8333 const1_sgn
= -const1_sgn
;
8335 /* The sign of the constant determines if we overflowed
8336 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8337 Canonicalize to the INT_MIN overflow by swapping the comparison
8339 if (const1_sgn
== -1)
8340 code2
= swap_tree_comparison (code
);
8342 /* We now can look at the canonicalized case
8343 VARIABLE + 1 CODE2 INT_MIN
8344 and decide on the result. */
8351 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8357 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8366 fold_overflow_warning ("assuming signed overflow does not occur "
8367 "when changing X +- C1 cmp C2 to "
8369 WARN_STRICT_OVERFLOW_COMPARISON
);
8370 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8374 /* For comparisons of pointers we can decompose it to a compile time
8375 comparison of the base objects and the offsets into the object.
8376 This requires at least one operand being an ADDR_EXPR or a
8377 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8378 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8379 && (TREE_CODE (arg0
) == ADDR_EXPR
8380 || TREE_CODE (arg1
) == ADDR_EXPR
8381 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8382 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8384 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8385 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8387 int volatilep
, reversep
, unsignedp
;
8388 bool indirect_base0
= false, indirect_base1
= false;
8390 /* Get base and offset for the access. Strip ADDR_EXPR for
8391 get_inner_reference, but put it back by stripping INDIRECT_REF
8392 off the base object if possible. indirect_baseN will be true
8393 if baseN is not an address but refers to the object itself. */
8395 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8398 = get_inner_reference (TREE_OPERAND (arg0
, 0),
8399 &bitsize
, &bitpos0
, &offset0
, &mode
,
8400 &unsignedp
, &reversep
, &volatilep
, false);
8401 if (TREE_CODE (base0
) == INDIRECT_REF
)
8402 base0
= TREE_OPERAND (base0
, 0);
8404 indirect_base0
= true;
8406 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8408 base0
= TREE_OPERAND (arg0
, 0);
8409 STRIP_SIGN_NOPS (base0
);
8410 if (TREE_CODE (base0
) == ADDR_EXPR
)
8412 base0
= TREE_OPERAND (base0
, 0);
8413 indirect_base0
= true;
8415 offset0
= TREE_OPERAND (arg0
, 1);
8416 if (tree_fits_shwi_p (offset0
))
8418 HOST_WIDE_INT off
= size_low_cst (offset0
);
8419 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8421 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8423 bitpos0
= off
* BITS_PER_UNIT
;
8424 offset0
= NULL_TREE
;
8430 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8433 = get_inner_reference (TREE_OPERAND (arg1
, 0),
8434 &bitsize
, &bitpos1
, &offset1
, &mode
,
8435 &unsignedp
, &reversep
, &volatilep
, false);
8436 if (TREE_CODE (base1
) == INDIRECT_REF
)
8437 base1
= TREE_OPERAND (base1
, 0);
8439 indirect_base1
= true;
8441 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8443 base1
= TREE_OPERAND (arg1
, 0);
8444 STRIP_SIGN_NOPS (base1
);
8445 if (TREE_CODE (base1
) == ADDR_EXPR
)
8447 base1
= TREE_OPERAND (base1
, 0);
8448 indirect_base1
= true;
8450 offset1
= TREE_OPERAND (arg1
, 1);
8451 if (tree_fits_shwi_p (offset1
))
8453 HOST_WIDE_INT off
= size_low_cst (offset1
);
8454 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8456 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8458 bitpos1
= off
* BITS_PER_UNIT
;
8459 offset1
= NULL_TREE
;
8464 /* If we have equivalent bases we might be able to simplify. */
8465 if (indirect_base0
== indirect_base1
8466 && operand_equal_p (base0
, base1
,
8467 indirect_base0
? OEP_ADDRESS_OF
: 0))
8469 /* We can fold this expression to a constant if the non-constant
8470 offset parts are equal. */
8471 if ((offset0
== offset1
8472 || (offset0
&& offset1
8473 && operand_equal_p (offset0
, offset1
, 0)))
8476 || (indirect_base0
&& DECL_P (base0
))
8477 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8481 && bitpos0
!= bitpos1
8482 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8483 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8484 fold_overflow_warning (("assuming pointer wraparound does not "
8485 "occur when comparing P +- C1 with "
8487 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8492 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8494 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8496 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8498 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8500 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8502 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8506 /* We can simplify the comparison to a comparison of the variable
8507 offset parts if the constant offset parts are equal.
8508 Be careful to use signed sizetype here because otherwise we
8509 mess with array offsets in the wrong way. This is possible
8510 because pointer arithmetic is restricted to retain within an
8511 object and overflow on pointer differences is undefined as of
8512 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8513 else if (bitpos0
== bitpos1
8515 || (indirect_base0
&& DECL_P (base0
))
8516 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8518 /* By converting to signed sizetype we cover middle-end pointer
8519 arithmetic which operates on unsigned pointer types of size
8520 type size and ARRAY_REF offsets which are properly sign or
8521 zero extended from their type in case it is narrower than
8523 if (offset0
== NULL_TREE
)
8524 offset0
= build_int_cst (ssizetype
, 0);
8526 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8527 if (offset1
== NULL_TREE
)
8528 offset1
= build_int_cst (ssizetype
, 0);
8530 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8533 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8534 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8535 fold_overflow_warning (("assuming pointer wraparound does not "
8536 "occur when comparing P +- C1 with "
8538 WARN_STRICT_OVERFLOW_COMPARISON
);
8540 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8543 /* For equal offsets we can simplify to a comparison of the
8545 else if (bitpos0
== bitpos1
8547 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8549 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8550 && ((offset0
== offset1
)
8551 || (offset0
&& offset1
8552 && operand_equal_p (offset0
, offset1
, 0))))
8555 base0
= build_fold_addr_expr_loc (loc
, base0
);
8557 base1
= build_fold_addr_expr_loc (loc
, base1
);
8558 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8562 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8563 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8564 the resulting offset is smaller in absolute value than the
8565 original one and has the same sign. */
8566 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8567 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8568 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8569 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8570 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8571 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8572 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8573 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8575 tree const1
= TREE_OPERAND (arg0
, 1);
8576 tree const2
= TREE_OPERAND (arg1
, 1);
8577 tree variable1
= TREE_OPERAND (arg0
, 0);
8578 tree variable2
= TREE_OPERAND (arg1
, 0);
8580 const char * const warnmsg
= G_("assuming signed overflow does not "
8581 "occur when combining constants around "
8584 /* Put the constant on the side where it doesn't overflow and is
8585 of lower absolute value and of same sign than before. */
8586 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8587 ? MINUS_EXPR
: PLUS_EXPR
,
8589 if (!TREE_OVERFLOW (cst
)
8590 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8591 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8593 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8594 return fold_build2_loc (loc
, code
, type
,
8596 fold_build2_loc (loc
, TREE_CODE (arg1
),
8601 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8602 ? MINUS_EXPR
: PLUS_EXPR
,
8604 if (!TREE_OVERFLOW (cst
)
8605 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8606 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8608 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8609 return fold_build2_loc (loc
, code
, type
,
8610 fold_build2_loc (loc
, TREE_CODE (arg0
),
8617 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8621 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8622 constant, we can simplify it. */
8623 if (TREE_CODE (arg1
) == INTEGER_CST
8624 && (TREE_CODE (arg0
) == MIN_EXPR
8625 || TREE_CODE (arg0
) == MAX_EXPR
)
8626 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8628 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
8633 /* If we are comparing an expression that just has comparisons
8634 of two integer values, arithmetic expressions of those comparisons,
8635 and constants, we can simplify it. There are only three cases
8636 to check: the two values can either be equal, the first can be
8637 greater, or the second can be greater. Fold the expression for
8638 those three values. Since each value must be 0 or 1, we have
8639 eight possibilities, each of which corresponds to the constant 0
8640 or 1 or one of the six possible comparisons.
8642 This handles common cases like (a > b) == 0 but also handles
8643 expressions like ((x > y) - (y > x)) > 0, which supposedly
8644 occur in macroized code. */
8646 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8648 tree cval1
= 0, cval2
= 0;
8651 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8652 /* Don't handle degenerate cases here; they should already
8653 have been handled anyway. */
8654 && cval1
!= 0 && cval2
!= 0
8655 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8656 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8657 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8658 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8659 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8660 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8661 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8663 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8664 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8666 /* We can't just pass T to eval_subst in case cval1 or cval2
8667 was the same as ARG1. */
8670 = fold_build2_loc (loc
, code
, type
,
8671 eval_subst (loc
, arg0
, cval1
, maxval
,
8675 = fold_build2_loc (loc
, code
, type
,
8676 eval_subst (loc
, arg0
, cval1
, maxval
,
8680 = fold_build2_loc (loc
, code
, type
,
8681 eval_subst (loc
, arg0
, cval1
, minval
,
8685 /* All three of these results should be 0 or 1. Confirm they are.
8686 Then use those values to select the proper code to use. */
8688 if (TREE_CODE (high_result
) == INTEGER_CST
8689 && TREE_CODE (equal_result
) == INTEGER_CST
8690 && TREE_CODE (low_result
) == INTEGER_CST
)
8692 /* Make a 3-bit mask with the high-order bit being the
8693 value for `>', the next for '=', and the low for '<'. */
8694 switch ((integer_onep (high_result
) * 4)
8695 + (integer_onep (equal_result
) * 2)
8696 + integer_onep (low_result
))
8700 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8721 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8726 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8727 SET_EXPR_LOCATION (tem
, loc
);
8730 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8735 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8736 into a single range test. */
8737 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8738 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8739 && TREE_CODE (arg1
) == INTEGER_CST
8740 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8741 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8742 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8743 && !TREE_OVERFLOW (arg1
))
8745 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
8746 if (tem
!= NULL_TREE
)
8754 /* Subroutine of fold_binary. Optimize complex multiplications of the
8755 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8756 argument EXPR represents the expression "z" of type TYPE. */
8759 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8761 tree itype
= TREE_TYPE (type
);
8762 tree rpart
, ipart
, tem
;
8764 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8766 rpart
= TREE_OPERAND (expr
, 0);
8767 ipart
= TREE_OPERAND (expr
, 1);
8769 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8771 rpart
= TREE_REALPART (expr
);
8772 ipart
= TREE_IMAGPART (expr
);
8776 expr
= save_expr (expr
);
8777 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8778 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8781 rpart
= save_expr (rpart
);
8782 ipart
= save_expr (ipart
);
8783 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8784 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8785 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8786 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8787 build_zero_cst (itype
));
8791 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8792 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8795 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
8797 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
8799 if (TREE_CODE (arg
) == VECTOR_CST
)
8801 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8802 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8804 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8806 constructor_elt
*elt
;
8808 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8809 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8812 elts
[i
] = elt
->value
;
8816 for (; i
< nelts
; i
++)
8818 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8822 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8823 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8824 NULL_TREE otherwise. */
8827 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
8829 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8831 bool need_ctor
= false;
8833 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8834 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8835 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8836 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8839 elts
= XALLOCAVEC (tree
, nelts
* 3);
8840 if (!vec_cst_ctor_to_array (arg0
, elts
)
8841 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
8844 for (i
= 0; i
< nelts
; i
++)
8846 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
8848 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
8853 vec
<constructor_elt
, va_gc
> *v
;
8854 vec_alloc (v
, nelts
);
8855 for (i
= 0; i
< nelts
; i
++)
8856 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
8857 return build_constructor (type
, v
);
8860 return build_vector (type
, &elts
[2 * nelts
]);
8863 /* Try to fold a pointer difference of type TYPE two address expressions of
8864 array references AREF0 and AREF1 using location LOC. Return a
8865 simplified expression for the difference or NULL_TREE. */
8868 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8869 tree aref0
, tree aref1
)
8871 tree base0
= TREE_OPERAND (aref0
, 0);
8872 tree base1
= TREE_OPERAND (aref1
, 0);
8873 tree base_offset
= build_int_cst (type
, 0);
8875 /* If the bases are array references as well, recurse. If the bases
8876 are pointer indirections compute the difference of the pointers.
8877 If the bases are equal, we are set. */
8878 if ((TREE_CODE (base0
) == ARRAY_REF
8879 && TREE_CODE (base1
) == ARRAY_REF
8881 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
8882 || (INDIRECT_REF_P (base0
)
8883 && INDIRECT_REF_P (base1
)
8885 = fold_binary_loc (loc
, MINUS_EXPR
, type
,
8886 fold_convert (type
, TREE_OPERAND (base0
, 0)),
8888 TREE_OPERAND (base1
, 0)))))
8889 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
8891 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8892 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8893 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8894 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8895 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8897 fold_build2_loc (loc
, MULT_EXPR
, type
,
8903 /* If the real or vector real constant CST of type TYPE has an exact
8904 inverse, return it, else return NULL. */
8907 exact_inverse (tree type
, tree cst
)
8910 tree unit_type
, *elts
;
8912 unsigned vec_nelts
, i
;
8914 switch (TREE_CODE (cst
))
8917 r
= TREE_REAL_CST (cst
);
8919 if (exact_real_inverse (TYPE_MODE (type
), &r
))
8920 return build_real (type
, r
);
8925 vec_nelts
= VECTOR_CST_NELTS (cst
);
8926 elts
= XALLOCAVEC (tree
, vec_nelts
);
8927 unit_type
= TREE_TYPE (type
);
8928 mode
= TYPE_MODE (unit_type
);
8930 for (i
= 0; i
< vec_nelts
; i
++)
8932 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
8933 if (!exact_real_inverse (mode
, &r
))
8935 elts
[i
] = build_real (unit_type
, r
);
8938 return build_vector (type
, elts
);
8945 /* Mask out the tz least significant bits of X of type TYPE where
8946 tz is the number of trailing zeroes in Y. */
8948 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
8950 int tz
= wi::ctz (y
);
8952 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
8956 /* Return true when T is an address and is known to be nonzero.
8957 For floating point we further ensure that T is not denormal.
8958 Similar logic is present in nonzero_address in rtlanal.h.
8960 If the return value is based on the assumption that signed overflow
8961 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8962 change *STRICT_OVERFLOW_P. */
8965 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
8967 tree type
= TREE_TYPE (t
);
8968 enum tree_code code
;
8970 /* Doing something useful for floating point would need more work. */
8971 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
8974 code
= TREE_CODE (t
);
8975 switch (TREE_CODE_CLASS (code
))
8978 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8981 case tcc_comparison
:
8982 return tree_binary_nonzero_warnv_p (code
, type
,
8983 TREE_OPERAND (t
, 0),
8984 TREE_OPERAND (t
, 1),
8987 case tcc_declaration
:
8989 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8997 case TRUTH_NOT_EXPR
:
8998 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9001 case TRUTH_AND_EXPR
:
9003 case TRUTH_XOR_EXPR
:
9004 return tree_binary_nonzero_warnv_p (code
, type
,
9005 TREE_OPERAND (t
, 0),
9006 TREE_OPERAND (t
, 1),
9014 case WITH_SIZE_EXPR
:
9016 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9021 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9025 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9030 tree fndecl
= get_callee_fndecl (t
);
9031 if (!fndecl
) return false;
9032 if (flag_delete_null_pointer_checks
&& !flag_check_new
9033 && DECL_IS_OPERATOR_NEW (fndecl
)
9034 && !TREE_NOTHROW (fndecl
))
9036 if (flag_delete_null_pointer_checks
9037 && lookup_attribute ("returns_nonnull",
9038 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9040 return alloca_call_p (t
);
9049 /* Return true when T is an address and is known to be nonzero.
9050 Handle warnings about undefined signed overflow. */
9053 tree_expr_nonzero_p (tree t
)
9055 bool ret
, strict_overflow_p
;
9057 strict_overflow_p
= false;
9058 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9059 if (strict_overflow_p
)
9060 fold_overflow_warning (("assuming signed overflow does not occur when "
9061 "determining that expression is always "
9063 WARN_STRICT_OVERFLOW_MISC
);
9067 /* Fold a binary expression of code CODE and type TYPE with operands
9068 OP0 and OP1. LOC is the location of the resulting expression.
9069 Return the folded expression if folding is successful. Otherwise,
9070 return NULL_TREE. */
9073 fold_binary_loc (location_t loc
,
9074 enum tree_code code
, tree type
, tree op0
, tree op1
)
9076 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9077 tree arg0
, arg1
, tem
;
9078 tree t1
= NULL_TREE
;
9079 bool strict_overflow_p
;
9082 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9083 && TREE_CODE_LENGTH (code
) == 2
9085 && op1
!= NULL_TREE
);
9090 /* Strip any conversions that don't change the mode. This is
9091 safe for every expression, except for a comparison expression
9092 because its signedness is derived from its operands. So, in
9093 the latter case, only strip conversions that don't change the
9094 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9097 Note that this is done as an internal manipulation within the
9098 constant folder, in order to find the simplest representation
9099 of the arguments so that their form can be studied. In any
9100 cases, the appropriate type conversions should be put back in
9101 the tree that will get out of the constant folder. */
9103 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9105 STRIP_SIGN_NOPS (arg0
);
9106 STRIP_SIGN_NOPS (arg1
);
9114 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9115 constant but we can't do arithmetic on them. */
9116 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9118 tem
= const_binop (code
, type
, arg0
, arg1
);
9119 if (tem
!= NULL_TREE
)
9121 if (TREE_TYPE (tem
) != type
)
9122 tem
= fold_convert_loc (loc
, type
, tem
);
9127 /* If this is a commutative operation, and ARG0 is a constant, move it
9128 to ARG1 to reduce the number of tests below. */
9129 if (commutative_tree_code (code
)
9130 && tree_swap_operands_p (arg0
, arg1
, true))
9131 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9133 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9134 to ARG1 to reduce the number of tests below. */
9135 if (kind
== tcc_comparison
9136 && tree_swap_operands_p (arg0
, arg1
, true))
9137 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9139 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9143 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9145 First check for cases where an arithmetic operation is applied to a
9146 compound, conditional, or comparison operation. Push the arithmetic
9147 operation inside the compound or conditional to see if any folding
9148 can then be done. Convert comparison to conditional for this purpose.
9149 The also optimizes non-constant cases that used to be done in
9152 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9153 one of the operands is a comparison and the other is a comparison, a
9154 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9155 code below would make the expression more complex. Change it to a
9156 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9157 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9159 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9160 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9161 && TREE_CODE (type
) != VECTOR_TYPE
9162 && ((truth_value_p (TREE_CODE (arg0
))
9163 && (truth_value_p (TREE_CODE (arg1
))
9164 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9165 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9166 || (truth_value_p (TREE_CODE (arg1
))
9167 && (truth_value_p (TREE_CODE (arg0
))
9168 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9169 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9171 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9172 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9175 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9176 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9178 if (code
== EQ_EXPR
)
9179 tem
= invert_truthvalue_loc (loc
, tem
);
9181 return fold_convert_loc (loc
, type
, tem
);
9184 if (TREE_CODE_CLASS (code
) == tcc_binary
9185 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9187 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9189 tem
= fold_build2_loc (loc
, code
, type
,
9190 fold_convert_loc (loc
, TREE_TYPE (op0
),
9191 TREE_OPERAND (arg0
, 1)), op1
);
9192 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9195 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9196 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9198 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9199 fold_convert_loc (loc
, TREE_TYPE (op1
),
9200 TREE_OPERAND (arg1
, 1)));
9201 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9205 if (TREE_CODE (arg0
) == COND_EXPR
9206 || TREE_CODE (arg0
) == VEC_COND_EXPR
9207 || COMPARISON_CLASS_P (arg0
))
9209 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9211 /*cond_first_p=*/1);
9212 if (tem
!= NULL_TREE
)
9216 if (TREE_CODE (arg1
) == COND_EXPR
9217 || TREE_CODE (arg1
) == VEC_COND_EXPR
9218 || COMPARISON_CLASS_P (arg1
))
9220 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9222 /*cond_first_p=*/0);
9223 if (tem
!= NULL_TREE
)
9231 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9232 if (TREE_CODE (arg0
) == ADDR_EXPR
9233 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9235 tree iref
= TREE_OPERAND (arg0
, 0);
9236 return fold_build2 (MEM_REF
, type
,
9237 TREE_OPERAND (iref
, 0),
9238 int_const_binop (PLUS_EXPR
, arg1
,
9239 TREE_OPERAND (iref
, 1)));
9242 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9243 if (TREE_CODE (arg0
) == ADDR_EXPR
9244 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9247 HOST_WIDE_INT coffset
;
9248 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9252 return fold_build2 (MEM_REF
, type
,
9253 build_fold_addr_expr (base
),
9254 int_const_binop (PLUS_EXPR
, arg1
,
9255 size_int (coffset
)));
9260 case POINTER_PLUS_EXPR
:
9261 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9262 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9263 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9264 return fold_convert_loc (loc
, type
,
9265 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9266 fold_convert_loc (loc
, sizetype
,
9268 fold_convert_loc (loc
, sizetype
,
9274 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9276 /* X + (X / CST) * -CST is X % CST. */
9277 if (TREE_CODE (arg1
) == MULT_EXPR
9278 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9279 && operand_equal_p (arg0
,
9280 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9282 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9283 tree cst1
= TREE_OPERAND (arg1
, 1);
9284 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9286 if (sum
&& integer_zerop (sum
))
9287 return fold_convert_loc (loc
, type
,
9288 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9289 TREE_TYPE (arg0
), arg0
,
9294 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9295 one. Make sure the type is not saturating and has the signedness of
9296 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9297 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9298 if ((TREE_CODE (arg0
) == MULT_EXPR
9299 || TREE_CODE (arg1
) == MULT_EXPR
)
9300 && !TYPE_SATURATING (type
)
9301 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9302 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9303 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9305 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9310 if (! FLOAT_TYPE_P (type
))
9312 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9313 (plus (plus (mult) (mult)) (foo)) so that we can
9314 take advantage of the factoring cases below. */
9315 if (ANY_INTEGRAL_TYPE_P (type
)
9316 && TYPE_OVERFLOW_WRAPS (type
)
9317 && (((TREE_CODE (arg0
) == PLUS_EXPR
9318 || TREE_CODE (arg0
) == MINUS_EXPR
)
9319 && TREE_CODE (arg1
) == MULT_EXPR
)
9320 || ((TREE_CODE (arg1
) == PLUS_EXPR
9321 || TREE_CODE (arg1
) == MINUS_EXPR
)
9322 && TREE_CODE (arg0
) == MULT_EXPR
)))
9324 tree parg0
, parg1
, parg
, marg
;
9325 enum tree_code pcode
;
9327 if (TREE_CODE (arg1
) == MULT_EXPR
)
9328 parg
= arg0
, marg
= arg1
;
9330 parg
= arg1
, marg
= arg0
;
9331 pcode
= TREE_CODE (parg
);
9332 parg0
= TREE_OPERAND (parg
, 0);
9333 parg1
= TREE_OPERAND (parg
, 1);
9337 if (TREE_CODE (parg0
) == MULT_EXPR
9338 && TREE_CODE (parg1
) != MULT_EXPR
)
9339 return fold_build2_loc (loc
, pcode
, type
,
9340 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9341 fold_convert_loc (loc
, type
,
9343 fold_convert_loc (loc
, type
,
9345 fold_convert_loc (loc
, type
, parg1
));
9346 if (TREE_CODE (parg0
) != MULT_EXPR
9347 && TREE_CODE (parg1
) == MULT_EXPR
)
9349 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9350 fold_convert_loc (loc
, type
, parg0
),
9351 fold_build2_loc (loc
, pcode
, type
,
9352 fold_convert_loc (loc
, type
, marg
),
9353 fold_convert_loc (loc
, type
,
9359 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9360 to __complex__ ( x, y ). This is not the same for SNaNs or
9361 if signed zeros are involved. */
9362 if (!HONOR_SNANS (element_mode (arg0
))
9363 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9364 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9366 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9367 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9368 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9369 bool arg0rz
= false, arg0iz
= false;
9370 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9371 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9373 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9374 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9375 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9377 tree rp
= arg1r
? arg1r
9378 : build1 (REALPART_EXPR
, rtype
, arg1
);
9379 tree ip
= arg0i
? arg0i
9380 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9381 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9383 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9385 tree rp
= arg0r
? arg0r
9386 : build1 (REALPART_EXPR
, rtype
, arg0
);
9387 tree ip
= arg1i
? arg1i
9388 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9389 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9394 if (flag_unsafe_math_optimizations
9395 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9396 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9397 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9400 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9401 We associate floats only if the user has specified
9402 -fassociative-math. */
9403 if (flag_associative_math
9404 && TREE_CODE (arg1
) == PLUS_EXPR
9405 && TREE_CODE (arg0
) != MULT_EXPR
)
9407 tree tree10
= TREE_OPERAND (arg1
, 0);
9408 tree tree11
= TREE_OPERAND (arg1
, 1);
9409 if (TREE_CODE (tree11
) == MULT_EXPR
9410 && TREE_CODE (tree10
) == MULT_EXPR
)
9413 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9414 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9417 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9418 We associate floats only if the user has specified
9419 -fassociative-math. */
9420 if (flag_associative_math
9421 && TREE_CODE (arg0
) == PLUS_EXPR
9422 && TREE_CODE (arg1
) != MULT_EXPR
)
9424 tree tree00
= TREE_OPERAND (arg0
, 0);
9425 tree tree01
= TREE_OPERAND (arg0
, 1);
9426 if (TREE_CODE (tree01
) == MULT_EXPR
9427 && TREE_CODE (tree00
) == MULT_EXPR
)
9430 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9431 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9437 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9438 is a rotate of A by C1 bits. */
9439 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9440 is a rotate of A by B bits. */
9442 enum tree_code code0
, code1
;
9444 code0
= TREE_CODE (arg0
);
9445 code1
= TREE_CODE (arg1
);
9446 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9447 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9448 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9449 TREE_OPERAND (arg1
, 0), 0)
9450 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9451 TYPE_UNSIGNED (rtype
))
9452 /* Only create rotates in complete modes. Other cases are not
9453 expanded properly. */
9454 && (element_precision (rtype
)
9455 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
9457 tree tree01
, tree11
;
9458 enum tree_code code01
, code11
;
9460 tree01
= TREE_OPERAND (arg0
, 1);
9461 tree11
= TREE_OPERAND (arg1
, 1);
9462 STRIP_NOPS (tree01
);
9463 STRIP_NOPS (tree11
);
9464 code01
= TREE_CODE (tree01
);
9465 code11
= TREE_CODE (tree11
);
9466 if (code01
== INTEGER_CST
9467 && code11
== INTEGER_CST
9468 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9469 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9471 tem
= build2_loc (loc
, LROTATE_EXPR
,
9472 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9473 TREE_OPERAND (arg0
, 0),
9474 code0
== LSHIFT_EXPR
9475 ? TREE_OPERAND (arg0
, 1)
9476 : TREE_OPERAND (arg1
, 1));
9477 return fold_convert_loc (loc
, type
, tem
);
9479 else if (code11
== MINUS_EXPR
)
9481 tree tree110
, tree111
;
9482 tree110
= TREE_OPERAND (tree11
, 0);
9483 tree111
= TREE_OPERAND (tree11
, 1);
9484 STRIP_NOPS (tree110
);
9485 STRIP_NOPS (tree111
);
9486 if (TREE_CODE (tree110
) == INTEGER_CST
9487 && 0 == compare_tree_int (tree110
,
9489 (TREE_TYPE (TREE_OPERAND
9491 && operand_equal_p (tree01
, tree111
, 0))
9493 fold_convert_loc (loc
, type
,
9494 build2 ((code0
== LSHIFT_EXPR
9497 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9498 TREE_OPERAND (arg0
, 0),
9499 TREE_OPERAND (arg0
, 1)));
9501 else if (code01
== MINUS_EXPR
)
9503 tree tree010
, tree011
;
9504 tree010
= TREE_OPERAND (tree01
, 0);
9505 tree011
= TREE_OPERAND (tree01
, 1);
9506 STRIP_NOPS (tree010
);
9507 STRIP_NOPS (tree011
);
9508 if (TREE_CODE (tree010
) == INTEGER_CST
9509 && 0 == compare_tree_int (tree010
,
9511 (TREE_TYPE (TREE_OPERAND
9513 && operand_equal_p (tree11
, tree011
, 0))
9514 return fold_convert_loc
9516 build2 ((code0
!= LSHIFT_EXPR
9519 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9520 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9526 /* In most languages, can't associate operations on floats through
9527 parentheses. Rather than remember where the parentheses were, we
9528 don't associate floats at all, unless the user has specified
9530 And, we need to make sure type is not saturating. */
9532 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9533 && !TYPE_SATURATING (type
))
9535 tree var0
, con0
, lit0
, minus_lit0
;
9536 tree var1
, con1
, lit1
, minus_lit1
;
9540 /* Split both trees into variables, constants, and literals. Then
9541 associate each group together, the constants with literals,
9542 then the result with variables. This increases the chances of
9543 literals being recombined later and of generating relocatable
9544 expressions for the sum of a constant and literal. */
9545 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9546 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9547 code
== MINUS_EXPR
);
9549 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9550 if (code
== MINUS_EXPR
)
9553 /* With undefined overflow prefer doing association in a type
9554 which wraps on overflow, if that is one of the operand types. */
9555 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9556 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9558 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9559 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9560 atype
= TREE_TYPE (arg0
);
9561 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9562 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9563 atype
= TREE_TYPE (arg1
);
9564 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9567 /* With undefined overflow we can only associate constants with one
9568 variable, and constants whose association doesn't overflow. */
9569 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9570 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9576 bool one_neg
= false;
9578 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9580 tmp0
= TREE_OPERAND (tmp0
, 0);
9583 if (CONVERT_EXPR_P (tmp0
)
9584 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9586 <= TYPE_PRECISION (atype
)))
9587 tmp0
= TREE_OPERAND (tmp0
, 0);
9588 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9590 tmp1
= TREE_OPERAND (tmp1
, 0);
9593 if (CONVERT_EXPR_P (tmp1
)
9594 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9595 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9596 <= TYPE_PRECISION (atype
)))
9597 tmp1
= TREE_OPERAND (tmp1
, 0);
9598 /* The only case we can still associate with two variables
9599 is if they cancel out. */
9601 || !operand_equal_p (tmp0
, tmp1
, 0))
9606 /* Only do something if we found more than two objects. Otherwise,
9607 nothing has changed and we risk infinite recursion. */
9609 && (2 < ((var0
!= 0) + (var1
!= 0)
9610 + (con0
!= 0) + (con1
!= 0)
9611 + (lit0
!= 0) + (lit1
!= 0)
9612 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9614 bool any_overflows
= false;
9615 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9616 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9617 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9618 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9619 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9620 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9621 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9622 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9625 /* Preserve the MINUS_EXPR if the negative part of the literal is
9626 greater than the positive part. Otherwise, the multiplicative
9627 folding code (i.e extract_muldiv) may be fooled in case
9628 unsigned constants are subtracted, like in the following
9629 example: ((X*2 + 4) - 8U)/2. */
9630 if (minus_lit0
&& lit0
)
9632 if (TREE_CODE (lit0
) == INTEGER_CST
9633 && TREE_CODE (minus_lit0
) == INTEGER_CST
9634 && tree_int_cst_lt (lit0
, minus_lit0
))
9636 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9642 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9648 /* Don't introduce overflows through reassociation. */
9650 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9651 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9658 fold_convert_loc (loc
, type
,
9659 associate_trees (loc
, var0
, minus_lit0
,
9660 MINUS_EXPR
, atype
));
9663 con0
= associate_trees (loc
, con0
, minus_lit0
,
9666 fold_convert_loc (loc
, type
,
9667 associate_trees (loc
, var0
, con0
,
9672 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9674 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9682 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9683 if (TREE_CODE (arg0
) == NEGATE_EXPR
9684 && negate_expr_p (op1
)
9685 && reorder_operands_p (arg0
, arg1
))
9686 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9688 fold_convert_loc (loc
, type
,
9689 TREE_OPERAND (arg0
, 0)));
9691 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9692 __complex__ ( x, -y ). This is not the same for SNaNs or if
9693 signed zeros are involved. */
9694 if (!HONOR_SNANS (element_mode (arg0
))
9695 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9696 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9698 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9699 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9700 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9701 bool arg0rz
= false, arg0iz
= false;
9702 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9703 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9705 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9706 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9707 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9709 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9711 : build1 (REALPART_EXPR
, rtype
, arg1
));
9712 tree ip
= arg0i
? arg0i
9713 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9714 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9716 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9718 tree rp
= arg0r
? arg0r
9719 : build1 (REALPART_EXPR
, rtype
, arg0
);
9720 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9722 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9723 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9728 /* A - B -> A + (-B) if B is easily negatable. */
9729 if (negate_expr_p (op1
)
9730 && ! TYPE_OVERFLOW_SANITIZED (type
)
9731 && ((FLOAT_TYPE_P (type
)
9732 /* Avoid this transformation if B is a positive REAL_CST. */
9733 && (TREE_CODE (op1
) != REAL_CST
9734 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
9735 || INTEGRAL_TYPE_P (type
)))
9736 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9737 fold_convert_loc (loc
, type
, arg0
),
9740 /* Fold &a[i] - &a[j] to i-j. */
9741 if (TREE_CODE (arg0
) == ADDR_EXPR
9742 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9743 && TREE_CODE (arg1
) == ADDR_EXPR
9744 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9746 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9747 TREE_OPERAND (arg0
, 0),
9748 TREE_OPERAND (arg1
, 0));
9753 if (FLOAT_TYPE_P (type
)
9754 && flag_unsafe_math_optimizations
9755 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9756 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9757 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9760 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9761 one. Make sure the type is not saturating and has the signedness of
9762 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9763 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9764 if ((TREE_CODE (arg0
) == MULT_EXPR
9765 || TREE_CODE (arg1
) == MULT_EXPR
)
9766 && !TYPE_SATURATING (type
)
9767 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9768 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9769 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9771 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9779 if (! FLOAT_TYPE_P (type
))
9781 /* Transform x * -C into -x * C if x is easily negatable. */
9782 if (TREE_CODE (op1
) == INTEGER_CST
9783 && tree_int_cst_sgn (op1
) == -1
9784 && negate_expr_p (op0
)
9785 && (tem
= negate_expr (op1
)) != op1
9786 && ! TREE_OVERFLOW (tem
))
9787 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9788 fold_convert_loc (loc
, type
,
9789 negate_expr (op0
)), tem
);
9791 /* (A + A) * C -> A * 2 * C */
9792 if (TREE_CODE (arg0
) == PLUS_EXPR
9793 && TREE_CODE (arg1
) == INTEGER_CST
9794 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9795 TREE_OPERAND (arg0
, 1), 0))
9796 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9797 omit_one_operand_loc (loc
, type
,
9798 TREE_OPERAND (arg0
, 0),
9799 TREE_OPERAND (arg0
, 1)),
9800 fold_build2_loc (loc
, MULT_EXPR
, type
,
9801 build_int_cst (type
, 2) , arg1
));
9803 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9804 sign-changing only. */
9805 if (TREE_CODE (arg1
) == INTEGER_CST
9806 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
9807 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
9808 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9810 strict_overflow_p
= false;
9811 if (TREE_CODE (arg1
) == INTEGER_CST
9812 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9813 &strict_overflow_p
)))
9815 if (strict_overflow_p
)
9816 fold_overflow_warning (("assuming signed overflow does not "
9817 "occur when simplifying "
9819 WARN_STRICT_OVERFLOW_MISC
);
9820 return fold_convert_loc (loc
, type
, tem
);
9823 /* Optimize z * conj(z) for integer complex numbers. */
9824 if (TREE_CODE (arg0
) == CONJ_EXPR
9825 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9826 return fold_mult_zconjz (loc
, type
, arg1
);
9827 if (TREE_CODE (arg1
) == CONJ_EXPR
9828 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9829 return fold_mult_zconjz (loc
, type
, arg0
);
9833 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9834 This is not the same for NaNs or if signed zeros are
9836 if (!HONOR_NANS (arg0
)
9837 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9838 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9839 && TREE_CODE (arg1
) == COMPLEX_CST
9840 && real_zerop (TREE_REALPART (arg1
)))
9842 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9843 if (real_onep (TREE_IMAGPART (arg1
)))
9845 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9846 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9848 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9849 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9851 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9852 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9853 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9857 /* Optimize z * conj(z) for floating point complex numbers.
9858 Guarded by flag_unsafe_math_optimizations as non-finite
9859 imaginary components don't produce scalar results. */
9860 if (flag_unsafe_math_optimizations
9861 && TREE_CODE (arg0
) == CONJ_EXPR
9862 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9863 return fold_mult_zconjz (loc
, type
, arg1
);
9864 if (flag_unsafe_math_optimizations
9865 && TREE_CODE (arg1
) == CONJ_EXPR
9866 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9867 return fold_mult_zconjz (loc
, type
, arg0
);
9869 if (flag_unsafe_math_optimizations
)
9872 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9875 && operand_equal_p (arg0
, arg1
, 0))
9877 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9881 tree arg
= build_real (type
, dconst2
);
9882 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
9890 /* Canonicalize (X & C1) | C2. */
9891 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9892 && TREE_CODE (arg1
) == INTEGER_CST
9893 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9895 int width
= TYPE_PRECISION (type
), w
;
9896 wide_int c1
= TREE_OPERAND (arg0
, 1);
9899 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9900 if ((c1
& c2
) == c1
)
9901 return omit_one_operand_loc (loc
, type
, arg1
,
9902 TREE_OPERAND (arg0
, 0));
9904 wide_int msk
= wi::mask (width
, false,
9905 TYPE_PRECISION (TREE_TYPE (arg1
)));
9907 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9908 if (msk
.and_not (c1
| c2
) == 0)
9909 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9910 TREE_OPERAND (arg0
, 0), arg1
);
9912 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9913 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9914 mode which allows further optimizations. */
9917 wide_int c3
= c1
.and_not (c2
);
9918 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
9920 wide_int mask
= wi::mask (w
, false,
9921 TYPE_PRECISION (type
));
9922 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
9930 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9931 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9932 TREE_OPERAND (arg0
, 0),
9933 wide_int_to_tree (type
,
9938 /* See if this can be simplified into a rotate first. If that
9939 is unsuccessful continue in the association code. */
9943 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9944 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9945 && INTEGRAL_TYPE_P (type
)
9946 && integer_onep (TREE_OPERAND (arg0
, 1))
9947 && integer_onep (arg1
))
9948 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
9949 build_zero_cst (TREE_TYPE (arg0
)));
9951 /* See if this can be simplified into a rotate first. If that
9952 is unsuccessful continue in the association code. */
9956 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9957 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9958 && INTEGRAL_TYPE_P (type
)
9959 && integer_onep (TREE_OPERAND (arg0
, 1))
9960 && integer_onep (arg1
))
9963 tem
= TREE_OPERAND (arg0
, 0);
9964 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
9965 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
9967 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
9968 build_zero_cst (TREE_TYPE (tem
)));
9970 /* Fold ~X & 1 as (X & 1) == 0. */
9971 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9972 && INTEGRAL_TYPE_P (type
)
9973 && integer_onep (arg1
))
9976 tem
= TREE_OPERAND (arg0
, 0);
9977 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
9978 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
9980 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
9981 build_zero_cst (TREE_TYPE (tem
)));
9983 /* Fold !X & 1 as X == 0. */
9984 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
9985 && integer_onep (arg1
))
9987 tem
= TREE_OPERAND (arg0
, 0);
9988 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
9989 build_zero_cst (TREE_TYPE (tem
)));
9992 /* Fold (X ^ Y) & Y as ~X & Y. */
9993 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9994 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9996 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9997 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9998 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
9999 fold_convert_loc (loc
, type
, arg1
));
10001 /* Fold (X ^ Y) & X as ~Y & X. */
10002 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10003 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10004 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10006 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10007 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10008 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10009 fold_convert_loc (loc
, type
, arg1
));
10011 /* Fold X & (X ^ Y) as X & ~Y. */
10012 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10013 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10015 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10016 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10017 fold_convert_loc (loc
, type
, arg0
),
10018 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10020 /* Fold X & (Y ^ X) as ~Y & X. */
10021 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10022 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10023 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10025 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10026 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10027 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10028 fold_convert_loc (loc
, type
, arg0
));
10031 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10032 multiple of 1 << CST. */
10033 if (TREE_CODE (arg1
) == INTEGER_CST
)
10035 wide_int cst1
= arg1
;
10036 wide_int ncst1
= -cst1
;
10037 if ((cst1
& ncst1
) == ncst1
10038 && multiple_of_p (type
, arg0
,
10039 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10040 return fold_convert_loc (loc
, type
, arg0
);
10043 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10045 if (TREE_CODE (arg1
) == INTEGER_CST
10046 && TREE_CODE (arg0
) == MULT_EXPR
10047 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10049 wide_int warg1
= arg1
;
10050 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10053 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10055 else if (masked
!= warg1
)
10057 /* Avoid the transform if arg1 is a mask of some
10058 mode which allows further optimizations. */
10059 int pop
= wi::popcount (warg1
);
10060 if (!(pop
>= BITS_PER_UNIT
10061 && exact_log2 (pop
) != -1
10062 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10063 return fold_build2_loc (loc
, code
, type
, op0
,
10064 wide_int_to_tree (type
, masked
));
10068 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10069 ((A & N) + B) & M -> (A + B) & M
10070 Similarly if (N & M) == 0,
10071 ((A | N) + B) & M -> (A + B) & M
10072 and for - instead of + (or unary - instead of +)
10073 and/or ^ instead of |.
10074 If B is constant and (B & M) == 0, fold into A & M. */
10075 if (TREE_CODE (arg1
) == INTEGER_CST
)
10077 wide_int cst1
= arg1
;
10078 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10079 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10080 && (TREE_CODE (arg0
) == PLUS_EXPR
10081 || TREE_CODE (arg0
) == MINUS_EXPR
10082 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10083 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10084 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10090 /* Now we know that arg0 is (C + D) or (C - D) or
10091 -C and arg1 (M) is == (1LL << cst) - 1.
10092 Store C into PMOP[0] and D into PMOP[1]. */
10093 pmop
[0] = TREE_OPERAND (arg0
, 0);
10095 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10097 pmop
[1] = TREE_OPERAND (arg0
, 1);
10101 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10104 for (; which
>= 0; which
--)
10105 switch (TREE_CODE (pmop
[which
]))
10110 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10113 cst0
= TREE_OPERAND (pmop
[which
], 1);
10115 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10120 else if (cst0
!= 0)
10122 /* If C or D is of the form (A & N) where
10123 (N & M) == M, or of the form (A | N) or
10124 (A ^ N) where (N & M) == 0, replace it with A. */
10125 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10128 /* If C or D is a N where (N & M) == 0, it can be
10129 omitted (assumed 0). */
10130 if ((TREE_CODE (arg0
) == PLUS_EXPR
10131 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10132 && (cst1
& pmop
[which
]) == 0)
10133 pmop
[which
] = NULL
;
10139 /* Only build anything new if we optimized one or both arguments
10141 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10142 || (TREE_CODE (arg0
) != NEGATE_EXPR
10143 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10145 tree utype
= TREE_TYPE (arg0
);
10146 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10148 /* Perform the operations in a type that has defined
10149 overflow behavior. */
10150 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10151 if (pmop
[0] != NULL
)
10152 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10153 if (pmop
[1] != NULL
)
10154 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10157 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10158 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10159 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10161 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10162 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10164 else if (pmop
[0] != NULL
)
10166 else if (pmop
[1] != NULL
)
10169 return build_int_cst (type
, 0);
10171 else if (pmop
[0] == NULL
)
10172 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10174 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10176 /* TEM is now the new binary +, - or unary - replacement. */
10177 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10178 fold_convert_loc (loc
, utype
, arg1
));
10179 return fold_convert_loc (loc
, type
, tem
);
10184 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10185 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10186 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10188 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10190 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10193 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10199 /* Don't touch a floating-point divide by zero unless the mode
10200 of the constant can represent infinity. */
10201 if (TREE_CODE (arg1
) == REAL_CST
10202 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10203 && real_zerop (arg1
))
10206 /* (-A) / (-B) -> A / B */
10207 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10208 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10209 TREE_OPERAND (arg0
, 0),
10210 negate_expr (arg1
));
10211 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10212 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10213 negate_expr (arg0
),
10214 TREE_OPERAND (arg1
, 0));
10217 case TRUNC_DIV_EXPR
:
10220 case FLOOR_DIV_EXPR
:
10221 /* Simplify A / (B << N) where A and B are positive and B is
10222 a power of 2, to A >> (N + log2(B)). */
10223 strict_overflow_p
= false;
10224 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10225 && (TYPE_UNSIGNED (type
)
10226 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10228 tree sval
= TREE_OPERAND (arg1
, 0);
10229 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10231 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10232 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10233 wi::exact_log2 (sval
));
10235 if (strict_overflow_p
)
10236 fold_overflow_warning (("assuming signed overflow does not "
10237 "occur when simplifying A / (B << N)"),
10238 WARN_STRICT_OVERFLOW_MISC
);
10240 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10242 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10243 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10249 case ROUND_DIV_EXPR
:
10250 case CEIL_DIV_EXPR
:
10251 case EXACT_DIV_EXPR
:
10252 if (integer_zerop (arg1
))
10255 /* Convert -A / -B to A / B when the type is signed and overflow is
10257 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10258 && TREE_CODE (arg0
) == NEGATE_EXPR
10259 && negate_expr_p (op1
))
10261 if (INTEGRAL_TYPE_P (type
))
10262 fold_overflow_warning (("assuming signed overflow does not occur "
10263 "when distributing negation across "
10265 WARN_STRICT_OVERFLOW_MISC
);
10266 return fold_build2_loc (loc
, code
, type
,
10267 fold_convert_loc (loc
, type
,
10268 TREE_OPERAND (arg0
, 0)),
10269 negate_expr (op1
));
10271 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10272 && TREE_CODE (arg1
) == NEGATE_EXPR
10273 && negate_expr_p (op0
))
10275 if (INTEGRAL_TYPE_P (type
))
10276 fold_overflow_warning (("assuming signed overflow does not occur "
10277 "when distributing negation across "
10279 WARN_STRICT_OVERFLOW_MISC
);
10280 return fold_build2_loc (loc
, code
, type
,
10282 fold_convert_loc (loc
, type
,
10283 TREE_OPERAND (arg1
, 0)));
10286 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10287 operation, EXACT_DIV_EXPR.
10289 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10290 At one time others generated faster code, it's not clear if they do
10291 after the last round to changes to the DIV code in expmed.c. */
10292 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10293 && multiple_of_p (type
, arg0
, arg1
))
10294 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10295 fold_convert (type
, arg0
),
10296 fold_convert (type
, arg1
));
10298 strict_overflow_p
= false;
10299 if (TREE_CODE (arg1
) == INTEGER_CST
10300 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10301 &strict_overflow_p
)))
10303 if (strict_overflow_p
)
10304 fold_overflow_warning (("assuming signed overflow does not occur "
10305 "when simplifying division"),
10306 WARN_STRICT_OVERFLOW_MISC
);
10307 return fold_convert_loc (loc
, type
, tem
);
10312 case CEIL_MOD_EXPR
:
10313 case FLOOR_MOD_EXPR
:
10314 case ROUND_MOD_EXPR
:
10315 case TRUNC_MOD_EXPR
:
10316 strict_overflow_p
= false;
10317 if (TREE_CODE (arg1
) == INTEGER_CST
10318 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10319 &strict_overflow_p
)))
10321 if (strict_overflow_p
)
10322 fold_overflow_warning (("assuming signed overflow does not occur "
10323 "when simplifying modulus"),
10324 WARN_STRICT_OVERFLOW_MISC
);
10325 return fold_convert_loc (loc
, type
, tem
);
10334 /* Since negative shift count is not well-defined,
10335 don't try to compute it in the compiler. */
10336 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10339 prec
= element_precision (type
);
10341 /* If we have a rotate of a bit operation with the rotate count and
10342 the second operand of the bit operation both constant,
10343 permute the two operations. */
10344 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10345 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10346 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10347 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10348 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10349 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10350 fold_build2_loc (loc
, code
, type
,
10351 TREE_OPERAND (arg0
, 0), arg1
),
10352 fold_build2_loc (loc
, code
, type
,
10353 TREE_OPERAND (arg0
, 1), arg1
));
10355 /* Two consecutive rotates adding up to the some integer
10356 multiple of the precision of the type can be ignored. */
10357 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10358 && TREE_CODE (arg0
) == RROTATE_EXPR
10359 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10360 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
10362 return TREE_OPERAND (arg0
, 0);
10370 case TRUTH_ANDIF_EXPR
:
10371 /* Note that the operands of this must be ints
10372 and their values must be 0 or 1.
10373 ("true" is a fixed value perhaps depending on the language.) */
10374 /* If first arg is constant zero, return it. */
10375 if (integer_zerop (arg0
))
10376 return fold_convert_loc (loc
, type
, arg0
);
10377 case TRUTH_AND_EXPR
:
10378 /* If either arg is constant true, drop it. */
10379 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10380 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10381 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10382 /* Preserve sequence points. */
10383 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10384 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10385 /* If second arg is constant zero, result is zero, but first arg
10386 must be evaluated. */
10387 if (integer_zerop (arg1
))
10388 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10389 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10390 case will be handled here. */
10391 if (integer_zerop (arg0
))
10392 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10394 /* !X && X is always false. */
10395 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10396 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10397 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10398 /* X && !X is always false. */
10399 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10400 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10401 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10403 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10404 means A >= Y && A != MAX, but in this case we know that
10407 if (!TREE_SIDE_EFFECTS (arg0
)
10408 && !TREE_SIDE_EFFECTS (arg1
))
10410 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10411 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10412 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10414 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10415 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10416 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10419 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10425 case TRUTH_ORIF_EXPR
:
10426 /* Note that the operands of this must be ints
10427 and their values must be 0 or true.
10428 ("true" is a fixed value perhaps depending on the language.) */
10429 /* If first arg is constant true, return it. */
10430 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10431 return fold_convert_loc (loc
, type
, arg0
);
10432 case TRUTH_OR_EXPR
:
10433 /* If either arg is constant zero, drop it. */
10434 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10435 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10436 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10437 /* Preserve sequence points. */
10438 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10439 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10440 /* If second arg is constant true, result is true, but we must
10441 evaluate first arg. */
10442 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10443 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10444 /* Likewise for first arg, but note this only occurs here for
10446 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10447 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10449 /* !X || X is always true. */
10450 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10451 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10452 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10453 /* X || !X is always true. */
10454 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10455 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10456 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10458 /* (X && !Y) || (!X && Y) is X ^ Y */
10459 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10460 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10462 tree a0
, a1
, l0
, l1
, n0
, n1
;
10464 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10465 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10467 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10468 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10470 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10471 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10473 if ((operand_equal_p (n0
, a0
, 0)
10474 && operand_equal_p (n1
, a1
, 0))
10475 || (operand_equal_p (n0
, a1
, 0)
10476 && operand_equal_p (n1
, a0
, 0)))
10477 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10480 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10486 case TRUTH_XOR_EXPR
:
10487 /* If the second arg is constant zero, drop it. */
10488 if (integer_zerop (arg1
))
10489 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10490 /* If the second arg is constant true, this is a logical inversion. */
10491 if (integer_onep (arg1
))
10493 tem
= invert_truthvalue_loc (loc
, arg0
);
10494 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10496 /* Identical arguments cancel to zero. */
10497 if (operand_equal_p (arg0
, arg1
, 0))
10498 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10500 /* !X ^ X is always true. */
10501 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10502 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10503 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10505 /* X ^ !X is always true. */
10506 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10507 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10508 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10517 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10518 if (tem
!= NULL_TREE
)
10521 /* bool_var != 1 becomes !bool_var. */
10522 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10523 && code
== NE_EXPR
)
10524 return fold_convert_loc (loc
, type
,
10525 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10526 TREE_TYPE (arg0
), arg0
));
10528 /* bool_var == 0 becomes !bool_var. */
10529 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10530 && code
== EQ_EXPR
)
10531 return fold_convert_loc (loc
, type
,
10532 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10533 TREE_TYPE (arg0
), arg0
));
10535 /* !exp != 0 becomes !exp */
10536 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
10537 && code
== NE_EXPR
)
10538 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10540 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10541 if ((TREE_CODE (arg0
) == PLUS_EXPR
10542 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10543 || TREE_CODE (arg0
) == MINUS_EXPR
)
10544 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10547 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10548 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
10550 tree val
= TREE_OPERAND (arg0
, 1);
10551 return omit_two_operands_loc (loc
, type
,
10552 fold_build2_loc (loc
, code
, type
,
10554 build_int_cst (TREE_TYPE (val
),
10556 TREE_OPERAND (arg0
, 0), arg1
);
10559 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10560 if (TREE_CODE (arg0
) == MINUS_EXPR
10561 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
10562 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10565 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
10567 return omit_two_operands_loc (loc
, type
,
10569 ? boolean_true_node
: boolean_false_node
,
10570 TREE_OPERAND (arg0
, 1), arg1
);
10573 /* If this is an EQ or NE comparison with zero and ARG0 is
10574 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10575 two operations, but the latter can be done in one less insn
10576 on machines that have only two-operand insns or on which a
10577 constant cannot be the first operand. */
10578 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10579 && integer_zerop (arg1
))
10581 tree arg00
= TREE_OPERAND (arg0
, 0);
10582 tree arg01
= TREE_OPERAND (arg0
, 1);
10583 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10584 && integer_onep (TREE_OPERAND (arg00
, 0)))
10586 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
10587 arg01
, TREE_OPERAND (arg00
, 1));
10588 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10589 build_int_cst (TREE_TYPE (arg0
), 1));
10590 return fold_build2_loc (loc
, code
, type
,
10591 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10594 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
10595 && integer_onep (TREE_OPERAND (arg01
, 0)))
10597 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
10598 arg00
, TREE_OPERAND (arg01
, 1));
10599 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10600 build_int_cst (TREE_TYPE (arg0
), 1));
10601 return fold_build2_loc (loc
, code
, type
,
10602 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10607 /* If this is an NE or EQ comparison of zero against the result of a
10608 signed MOD operation whose second operand is a power of 2, make
10609 the MOD operation unsigned since it is simpler and equivalent. */
10610 if (integer_zerop (arg1
)
10611 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10612 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10613 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10614 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10615 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10616 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10618 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
10619 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
10620 fold_convert_loc (loc
, newtype
,
10621 TREE_OPERAND (arg0
, 0)),
10622 fold_convert_loc (loc
, newtype
,
10623 TREE_OPERAND (arg0
, 1)));
10625 return fold_build2_loc (loc
, code
, type
, newmod
,
10626 fold_convert_loc (loc
, newtype
, arg1
));
10629 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10630 C1 is a valid shift constant, and C2 is a power of two, i.e.
10632 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10633 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10634 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10636 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10637 && integer_zerop (arg1
))
10639 tree itype
= TREE_TYPE (arg0
);
10640 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10641 prec
= TYPE_PRECISION (itype
);
10643 /* Check for a valid shift count. */
10644 if (wi::ltu_p (arg001
, prec
))
10646 tree arg01
= TREE_OPERAND (arg0
, 1);
10647 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10648 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10649 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10650 can be rewritten as (X & (C2 << C1)) != 0. */
10651 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10653 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
10654 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
10655 return fold_build2_loc (loc
, code
, type
, tem
,
10656 fold_convert_loc (loc
, itype
, arg1
));
10658 /* Otherwise, for signed (arithmetic) shifts,
10659 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10660 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10661 else if (!TYPE_UNSIGNED (itype
))
10662 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10663 arg000
, build_int_cst (itype
, 0));
10664 /* Otherwise, of unsigned (logical) shifts,
10665 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10666 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10668 return omit_one_operand_loc (loc
, type
,
10669 code
== EQ_EXPR
? integer_one_node
10670 : integer_zero_node
,
10675 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10676 Similarly for NE_EXPR. */
10677 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10678 && TREE_CODE (arg1
) == INTEGER_CST
10679 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10681 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
10682 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10683 TREE_OPERAND (arg0
, 1));
10685 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10686 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
10688 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10689 if (integer_nonzerop (dandnotc
))
10690 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
10693 /* If this is a comparison of a field, we may be able to simplify it. */
10694 if ((TREE_CODE (arg0
) == COMPONENT_REF
10695 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10696 /* Handle the constant case even without -O
10697 to make sure the warnings are given. */
10698 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10700 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
10705 /* Optimize comparisons of strlen vs zero to a compare of the
10706 first character of the string vs zero. To wit,
10707 strlen(ptr) == 0 => *ptr == 0
10708 strlen(ptr) != 0 => *ptr != 0
10709 Other cases should reduce to one of these two (or a constant)
10710 due to the return value of strlen being unsigned. */
10711 if (TREE_CODE (arg0
) == CALL_EXPR
10712 && integer_zerop (arg1
))
10714 tree fndecl
= get_callee_fndecl (arg0
);
10717 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10718 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10719 && call_expr_nargs (arg0
) == 1
10720 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
10722 tree iref
= build_fold_indirect_ref_loc (loc
,
10723 CALL_EXPR_ARG (arg0
, 0));
10724 return fold_build2_loc (loc
, code
, type
, iref
,
10725 build_int_cst (TREE_TYPE (iref
), 0));
10729 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10730 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10731 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10732 && integer_zerop (arg1
)
10733 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10735 tree arg00
= TREE_OPERAND (arg0
, 0);
10736 tree arg01
= TREE_OPERAND (arg0
, 1);
10737 tree itype
= TREE_TYPE (arg00
);
10738 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
10740 if (TYPE_UNSIGNED (itype
))
10742 itype
= signed_type_for (itype
);
10743 arg00
= fold_convert_loc (loc
, itype
, arg00
);
10745 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10746 type
, arg00
, build_zero_cst (itype
));
10750 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10751 (X & C) == 0 when C is a single bit. */
10752 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10753 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10754 && integer_zerop (arg1
)
10755 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10757 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10758 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10759 TREE_OPERAND (arg0
, 1));
10760 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10762 fold_convert_loc (loc
, TREE_TYPE (arg0
),
10766 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10767 constant C is a power of two, i.e. a single bit. */
10768 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10769 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10770 && integer_zerop (arg1
)
10771 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10772 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10773 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10775 tree arg00
= TREE_OPERAND (arg0
, 0);
10776 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10777 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10780 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10781 when is C is a power of two, i.e. a single bit. */
10782 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10783 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10784 && integer_zerop (arg1
)
10785 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10786 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10787 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10789 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10790 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
10791 arg000
, TREE_OPERAND (arg0
, 1));
10792 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10793 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10796 if (integer_zerop (arg1
)
10797 && tree_expr_nonzero_p (arg0
))
10799 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10800 return omit_one_operand_loc (loc
, type
, res
, arg0
);
10803 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10804 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10805 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10807 tree arg00
= TREE_OPERAND (arg0
, 0);
10808 tree arg01
= TREE_OPERAND (arg0
, 1);
10809 tree arg10
= TREE_OPERAND (arg1
, 0);
10810 tree arg11
= TREE_OPERAND (arg1
, 1);
10811 tree itype
= TREE_TYPE (arg0
);
10813 if (operand_equal_p (arg01
, arg11
, 0))
10814 return fold_build2_loc (loc
, code
, type
,
10815 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10816 fold_build2_loc (loc
,
10817 BIT_XOR_EXPR
, itype
,
10820 build_zero_cst (itype
));
10822 if (operand_equal_p (arg01
, arg10
, 0))
10823 return fold_build2_loc (loc
, code
, type
,
10824 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10825 fold_build2_loc (loc
,
10826 BIT_XOR_EXPR
, itype
,
10829 build_zero_cst (itype
));
10831 if (operand_equal_p (arg00
, arg11
, 0))
10832 return fold_build2_loc (loc
, code
, type
,
10833 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10834 fold_build2_loc (loc
,
10835 BIT_XOR_EXPR
, itype
,
10838 build_zero_cst (itype
));
10840 if (operand_equal_p (arg00
, arg10
, 0))
10841 return fold_build2_loc (loc
, code
, type
,
10842 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10843 fold_build2_loc (loc
,
10844 BIT_XOR_EXPR
, itype
,
10847 build_zero_cst (itype
));
10850 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10851 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
10853 tree arg00
= TREE_OPERAND (arg0
, 0);
10854 tree arg01
= TREE_OPERAND (arg0
, 1);
10855 tree arg10
= TREE_OPERAND (arg1
, 0);
10856 tree arg11
= TREE_OPERAND (arg1
, 1);
10857 tree itype
= TREE_TYPE (arg0
);
10859 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10860 operand_equal_p guarantees no side-effects so we don't need
10861 to use omit_one_operand on Z. */
10862 if (operand_equal_p (arg01
, arg11
, 0))
10863 return fold_build2_loc (loc
, code
, type
, arg00
,
10864 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10866 if (operand_equal_p (arg01
, arg10
, 0))
10867 return fold_build2_loc (loc
, code
, type
, arg00
,
10868 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10870 if (operand_equal_p (arg00
, arg11
, 0))
10871 return fold_build2_loc (loc
, code
, type
, arg01
,
10872 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10874 if (operand_equal_p (arg00
, arg10
, 0))
10875 return fold_build2_loc (loc
, code
, type
, arg01
,
10876 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10879 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10880 if (TREE_CODE (arg01
) == INTEGER_CST
10881 && TREE_CODE (arg11
) == INTEGER_CST
)
10883 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
10884 fold_convert_loc (loc
, itype
, arg11
));
10885 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10886 return fold_build2_loc (loc
, code
, type
, tem
,
10887 fold_convert_loc (loc
, itype
, arg10
));
10891 /* Attempt to simplify equality/inequality comparisons of complex
10892 values. Only lower the comparison if the result is known or
10893 can be simplified to a single scalar comparison. */
10894 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
10895 || TREE_CODE (arg0
) == COMPLEX_CST
)
10896 && (TREE_CODE (arg1
) == COMPLEX_EXPR
10897 || TREE_CODE (arg1
) == COMPLEX_CST
))
10899 tree real0
, imag0
, real1
, imag1
;
10902 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
10904 real0
= TREE_OPERAND (arg0
, 0);
10905 imag0
= TREE_OPERAND (arg0
, 1);
10909 real0
= TREE_REALPART (arg0
);
10910 imag0
= TREE_IMAGPART (arg0
);
10913 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
10915 real1
= TREE_OPERAND (arg1
, 0);
10916 imag1
= TREE_OPERAND (arg1
, 1);
10920 real1
= TREE_REALPART (arg1
);
10921 imag1
= TREE_IMAGPART (arg1
);
10924 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
10925 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
10927 if (integer_zerop (rcond
))
10929 if (code
== EQ_EXPR
)
10930 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10932 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
10936 if (code
== NE_EXPR
)
10937 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10939 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
10943 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
10944 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
10946 if (integer_zerop (icond
))
10948 if (code
== EQ_EXPR
)
10949 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10951 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
10955 if (code
== NE_EXPR
)
10956 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10958 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
10969 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10970 if (tem
!= NULL_TREE
)
10973 /* Transform comparisons of the form X +- C CMP X. */
10974 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10975 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10976 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
10977 && !HONOR_SNANS (arg0
))
10978 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10979 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
10981 tree arg01
= TREE_OPERAND (arg0
, 1);
10982 enum tree_code code0
= TREE_CODE (arg0
);
10985 if (TREE_CODE (arg01
) == REAL_CST
)
10986 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
10988 is_positive
= tree_int_cst_sgn (arg01
);
10990 /* (X - c) > X becomes false. */
10991 if (code
== GT_EXPR
10992 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10993 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10995 if (TREE_CODE (arg01
) == INTEGER_CST
10996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
10997 fold_overflow_warning (("assuming signed overflow does not "
10998 "occur when assuming that (X - c) > X "
10999 "is always false"),
11000 WARN_STRICT_OVERFLOW_ALL
);
11001 return constant_boolean_node (0, type
);
11004 /* Likewise (X + c) < X becomes false. */
11005 if (code
== LT_EXPR
11006 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11007 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11009 if (TREE_CODE (arg01
) == INTEGER_CST
11010 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11011 fold_overflow_warning (("assuming signed overflow does not "
11012 "occur when assuming that "
11013 "(X + c) < X is always false"),
11014 WARN_STRICT_OVERFLOW_ALL
);
11015 return constant_boolean_node (0, type
);
11018 /* Convert (X - c) <= X to true. */
11019 if (!HONOR_NANS (arg1
)
11021 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11022 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11024 if (TREE_CODE (arg01
) == INTEGER_CST
11025 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11026 fold_overflow_warning (("assuming signed overflow does not "
11027 "occur when assuming that "
11028 "(X - c) <= X is always true"),
11029 WARN_STRICT_OVERFLOW_ALL
);
11030 return constant_boolean_node (1, type
);
11033 /* Convert (X + c) >= X to true. */
11034 if (!HONOR_NANS (arg1
)
11036 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11037 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11039 if (TREE_CODE (arg01
) == INTEGER_CST
11040 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11041 fold_overflow_warning (("assuming signed overflow does not "
11042 "occur when assuming that "
11043 "(X + c) >= X is always true"),
11044 WARN_STRICT_OVERFLOW_ALL
);
11045 return constant_boolean_node (1, type
);
11048 if (TREE_CODE (arg01
) == INTEGER_CST
)
11050 /* Convert X + c > X and X - c < X to true for integers. */
11051 if (code
== GT_EXPR
11052 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11053 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11055 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11056 fold_overflow_warning (("assuming signed overflow does "
11057 "not occur when assuming that "
11058 "(X + c) > X is always true"),
11059 WARN_STRICT_OVERFLOW_ALL
);
11060 return constant_boolean_node (1, type
);
11063 if (code
== LT_EXPR
11064 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11065 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11067 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11068 fold_overflow_warning (("assuming signed overflow does "
11069 "not occur when assuming that "
11070 "(X - c) < X is always true"),
11071 WARN_STRICT_OVERFLOW_ALL
);
11072 return constant_boolean_node (1, type
);
11075 /* Convert X + c <= X and X - c >= X to false for integers. */
11076 if (code
== LE_EXPR
11077 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11078 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11080 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11081 fold_overflow_warning (("assuming signed overflow does "
11082 "not occur when assuming that "
11083 "(X + c) <= X is always false"),
11084 WARN_STRICT_OVERFLOW_ALL
);
11085 return constant_boolean_node (0, type
);
11088 if (code
== GE_EXPR
11089 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11090 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11092 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11093 fold_overflow_warning (("assuming signed overflow does "
11094 "not occur when assuming that "
11095 "(X - c) >= X is always false"),
11096 WARN_STRICT_OVERFLOW_ALL
);
11097 return constant_boolean_node (0, type
);
11102 /* If we are comparing an ABS_EXPR with a constant, we can
11103 convert all the cases into explicit comparisons, but they may
11104 well not be faster than doing the ABS and one comparison.
11105 But ABS (X) <= C is a range comparison, which becomes a subtraction
11106 and a comparison, and is probably faster. */
11107 if (code
== LE_EXPR
11108 && TREE_CODE (arg1
) == INTEGER_CST
11109 && TREE_CODE (arg0
) == ABS_EXPR
11110 && ! TREE_SIDE_EFFECTS (arg0
)
11111 && (0 != (tem
= negate_expr (arg1
)))
11112 && TREE_CODE (tem
) == INTEGER_CST
11113 && !TREE_OVERFLOW (tem
))
11114 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11115 build2 (GE_EXPR
, type
,
11116 TREE_OPERAND (arg0
, 0), tem
),
11117 build2 (LE_EXPR
, type
,
11118 TREE_OPERAND (arg0
, 0), arg1
));
11120 /* Convert ABS_EXPR<x> >= 0 to true. */
11121 strict_overflow_p
= false;
11122 if (code
== GE_EXPR
11123 && (integer_zerop (arg1
)
11124 || (! HONOR_NANS (arg0
)
11125 && real_zerop (arg1
)))
11126 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11128 if (strict_overflow_p
)
11129 fold_overflow_warning (("assuming signed overflow does not occur "
11130 "when simplifying comparison of "
11131 "absolute value and zero"),
11132 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11133 return omit_one_operand_loc (loc
, type
,
11134 constant_boolean_node (true, type
),
11138 /* Convert ABS_EXPR<x> < 0 to false. */
11139 strict_overflow_p
= false;
11140 if (code
== LT_EXPR
11141 && (integer_zerop (arg1
) || real_zerop (arg1
))
11142 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11144 if (strict_overflow_p
)
11145 fold_overflow_warning (("assuming signed overflow does not occur "
11146 "when simplifying comparison of "
11147 "absolute value and zero"),
11148 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11149 return omit_one_operand_loc (loc
, type
,
11150 constant_boolean_node (false, type
),
11154 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11155 and similarly for >= into !=. */
11156 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11157 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11158 && TREE_CODE (arg1
) == LSHIFT_EXPR
11159 && integer_onep (TREE_OPERAND (arg1
, 0)))
11160 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11161 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11162 TREE_OPERAND (arg1
, 1)),
11163 build_zero_cst (TREE_TYPE (arg0
)));
11165 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11166 otherwise Y might be >= # of bits in X's type and thus e.g.
11167 (unsigned char) (1 << Y) for Y 15 might be 0.
11168 If the cast is widening, then 1 << Y should have unsigned type,
11169 otherwise if Y is number of bits in the signed shift type minus 1,
11170 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11171 31 might be 0xffffffff80000000. */
11172 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11173 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11174 && CONVERT_EXPR_P (arg1
)
11175 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11176 && (element_precision (TREE_TYPE (arg1
))
11177 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11178 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11179 || (element_precision (TREE_TYPE (arg1
))
11180 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11181 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11183 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11184 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11185 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11186 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11187 build_zero_cst (TREE_TYPE (arg0
)));
11192 case UNORDERED_EXPR
:
11200 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11202 tree targ0
= strip_float_extensions (arg0
);
11203 tree targ1
= strip_float_extensions (arg1
);
11204 tree newtype
= TREE_TYPE (targ0
);
11206 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11207 newtype
= TREE_TYPE (targ1
);
11209 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11210 return fold_build2_loc (loc
, code
, type
,
11211 fold_convert_loc (loc
, newtype
, targ0
),
11212 fold_convert_loc (loc
, newtype
, targ1
));
11217 case COMPOUND_EXPR
:
11218 /* When pedantic, a compound expression can be neither an lvalue
11219 nor an integer constant expression. */
11220 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11222 /* Don't let (0, 0) be null pointer constant. */
11223 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11224 : fold_convert_loc (loc
, type
, arg1
);
11225 return pedantic_non_lvalue_loc (loc
, tem
);
11228 /* An ASSERT_EXPR should never be passed to fold_binary. */
11229 gcc_unreachable ();
11233 } /* switch (code) */
11236 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11237 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11241 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11243 switch (TREE_CODE (*tp
))
11249 *walk_subtrees
= 0;
11251 /* ... fall through ... */
11258 /* Return whether the sub-tree ST contains a label which is accessible from
11259 outside the sub-tree. */
11262 contains_label_p (tree st
)
11265 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11268 /* Fold a ternary expression of code CODE and type TYPE with operands
11269 OP0, OP1, and OP2. Return the folded expression if folding is
11270 successful. Otherwise, return NULL_TREE. */
11273 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11274 tree op0
, tree op1
, tree op2
)
11277 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11278 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11280 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11281 && TREE_CODE_LENGTH (code
) == 3);
11283 /* If this is a commutative operation, and OP0 is a constant, move it
11284 to OP1 to reduce the number of tests below. */
11285 if (commutative_ternary_tree_code (code
)
11286 && tree_swap_operands_p (op0
, op1
, true))
11287 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11289 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11293 /* Strip any conversions that don't change the mode. This is safe
11294 for every expression, except for a comparison expression because
11295 its signedness is derived from its operands. So, in the latter
11296 case, only strip conversions that don't change the signedness.
11298 Note that this is done as an internal manipulation within the
11299 constant folder, in order to find the simplest representation of
11300 the arguments so that their form can be studied. In any cases,
11301 the appropriate type conversions should be put back in the tree
11302 that will get out of the constant folder. */
11323 case COMPONENT_REF
:
11324 if (TREE_CODE (arg0
) == CONSTRUCTOR
11325 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11327 unsigned HOST_WIDE_INT idx
;
11329 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11336 case VEC_COND_EXPR
:
11337 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11338 so all simple results must be passed through pedantic_non_lvalue. */
11339 if (TREE_CODE (arg0
) == INTEGER_CST
)
11341 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11342 tem
= integer_zerop (arg0
) ? op2
: op1
;
11343 /* Only optimize constant conditions when the selected branch
11344 has the same type as the COND_EXPR. This avoids optimizing
11345 away "c ? x : throw", where the throw has a void type.
11346 Avoid throwing away that operand which contains label. */
11347 if ((!TREE_SIDE_EFFECTS (unused_op
)
11348 || !contains_label_p (unused_op
))
11349 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11350 || VOID_TYPE_P (type
)))
11351 return pedantic_non_lvalue_loc (loc
, tem
);
11354 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11356 if ((TREE_CODE (arg1
) == VECTOR_CST
11357 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11358 && (TREE_CODE (arg2
) == VECTOR_CST
11359 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11361 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
11362 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
11363 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
11364 for (i
= 0; i
< nelts
; i
++)
11366 tree val
= VECTOR_CST_ELT (arg0
, i
);
11367 if (integer_all_onesp (val
))
11369 else if (integer_zerop (val
))
11370 sel
[i
] = nelts
+ i
;
11371 else /* Currently unreachable. */
11374 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11375 if (t
!= NULL_TREE
)
11380 /* If we have A op B ? A : C, we may be able to convert this to a
11381 simpler expression, depending on the operation and the values
11382 of B and C. Signed zeros prevent all of these transformations,
11383 for reasons given above each one.
11385 Also try swapping the arguments and inverting the conditional. */
11386 if (COMPARISON_CLASS_P (arg0
)
11387 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11388 arg1
, TREE_OPERAND (arg0
, 1))
11389 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
11391 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11396 if (COMPARISON_CLASS_P (arg0
)
11397 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11399 TREE_OPERAND (arg0
, 1))
11400 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11402 location_t loc0
= expr_location_or (arg0
, loc
);
11403 tem
= fold_invert_truthvalue (loc0
, arg0
);
11404 if (tem
&& COMPARISON_CLASS_P (tem
))
11406 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11412 /* If the second operand is simpler than the third, swap them
11413 since that produces better jump optimization results. */
11414 if (truth_value_p (TREE_CODE (arg0
))
11415 && tree_swap_operands_p (op1
, op2
, false))
11417 location_t loc0
= expr_location_or (arg0
, loc
);
11418 /* See if this can be inverted. If it can't, possibly because
11419 it was a floating-point inequality comparison, don't do
11421 tem
= fold_invert_truthvalue (loc0
, arg0
);
11423 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11426 /* Convert A ? 1 : 0 to simply A. */
11427 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11428 : (integer_onep (op1
)
11429 && !VECTOR_TYPE_P (type
)))
11430 && integer_zerop (op2
)
11431 /* If we try to convert OP0 to our type, the
11432 call to fold will try to move the conversion inside
11433 a COND, which will recurse. In that case, the COND_EXPR
11434 is probably the best choice, so leave it alone. */
11435 && type
== TREE_TYPE (arg0
))
11436 return pedantic_non_lvalue_loc (loc
, arg0
);
11438 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11439 over COND_EXPR in cases such as floating point comparisons. */
11440 if (integer_zerop (op1
)
11441 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
11442 : (integer_onep (op2
)
11443 && !VECTOR_TYPE_P (type
)))
11444 && truth_value_p (TREE_CODE (arg0
)))
11445 return pedantic_non_lvalue_loc (loc
,
11446 fold_convert_loc (loc
, type
,
11447 invert_truthvalue_loc (loc
,
11450 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11451 if (TREE_CODE (arg0
) == LT_EXPR
11452 && integer_zerop (TREE_OPERAND (arg0
, 1))
11453 && integer_zerop (op2
)
11454 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11456 /* sign_bit_p looks through both zero and sign extensions,
11457 but for this optimization only sign extensions are
11459 tree tem2
= TREE_OPERAND (arg0
, 0);
11460 while (tem
!= tem2
)
11462 if (TREE_CODE (tem2
) != NOP_EXPR
11463 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11468 tem2
= TREE_OPERAND (tem2
, 0);
11470 /* sign_bit_p only checks ARG1 bits within A's precision.
11471 If <sign bit of A> has wider type than A, bits outside
11472 of A's precision in <sign bit of A> need to be checked.
11473 If they are all 0, this optimization needs to be done
11474 in unsigned A's type, if they are all 1 in signed A's type,
11475 otherwise this can't be done. */
11477 && TYPE_PRECISION (TREE_TYPE (tem
))
11478 < TYPE_PRECISION (TREE_TYPE (arg1
))
11479 && TYPE_PRECISION (TREE_TYPE (tem
))
11480 < TYPE_PRECISION (type
))
11482 int inner_width
, outer_width
;
11485 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11486 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11487 if (outer_width
> TYPE_PRECISION (type
))
11488 outer_width
= TYPE_PRECISION (type
);
11490 wide_int mask
= wi::shifted_mask
11491 (inner_width
, outer_width
- inner_width
, false,
11492 TYPE_PRECISION (TREE_TYPE (arg1
)));
11494 wide_int common
= mask
& arg1
;
11495 if (common
== mask
)
11497 tem_type
= signed_type_for (TREE_TYPE (tem
));
11498 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11500 else if (common
== 0)
11502 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11503 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11511 fold_convert_loc (loc
, type
,
11512 fold_build2_loc (loc
, BIT_AND_EXPR
,
11513 TREE_TYPE (tem
), tem
,
11514 fold_convert_loc (loc
,
11519 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11520 already handled above. */
11521 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11522 && integer_onep (TREE_OPERAND (arg0
, 1))
11523 && integer_zerop (op2
)
11524 && integer_pow2p (arg1
))
11526 tree tem
= TREE_OPERAND (arg0
, 0);
11528 if (TREE_CODE (tem
) == RSHIFT_EXPR
11529 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
11530 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11531 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
11532 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11533 TREE_OPERAND (tem
, 0), arg1
);
11536 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11537 is probably obsolete because the first operand should be a
11538 truth value (that's why we have the two cases above), but let's
11539 leave it in until we can confirm this for all front-ends. */
11540 if (integer_zerop (op2
)
11541 && TREE_CODE (arg0
) == NE_EXPR
11542 && integer_zerop (TREE_OPERAND (arg0
, 1))
11543 && integer_pow2p (arg1
)
11544 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11545 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11546 arg1
, OEP_ONLY_CONST
))
11547 return pedantic_non_lvalue_loc (loc
,
11548 fold_convert_loc (loc
, type
,
11549 TREE_OPERAND (arg0
, 0)));
11551 /* Disable the transformations below for vectors, since
11552 fold_binary_op_with_conditional_arg may undo them immediately,
11553 yielding an infinite loop. */
11554 if (code
== VEC_COND_EXPR
)
11557 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11558 if (integer_zerop (op2
)
11559 && truth_value_p (TREE_CODE (arg0
))
11560 && truth_value_p (TREE_CODE (arg1
))
11561 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11562 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
11563 : TRUTH_ANDIF_EXPR
,
11564 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
11566 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11567 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
11568 && truth_value_p (TREE_CODE (arg0
))
11569 && truth_value_p (TREE_CODE (arg1
))
11570 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11572 location_t loc0
= expr_location_or (arg0
, loc
);
11573 /* Only perform transformation if ARG0 is easily inverted. */
11574 tem
= fold_invert_truthvalue (loc0
, arg0
);
11576 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11579 type
, fold_convert_loc (loc
, type
, tem
),
11583 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11584 if (integer_zerop (arg1
)
11585 && truth_value_p (TREE_CODE (arg0
))
11586 && truth_value_p (TREE_CODE (op2
))
11587 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11589 location_t loc0
= expr_location_or (arg0
, loc
);
11590 /* Only perform transformation if ARG0 is easily inverted. */
11591 tem
= fold_invert_truthvalue (loc0
, arg0
);
11593 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11594 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
11595 type
, fold_convert_loc (loc
, type
, tem
),
11599 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11600 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
11601 && truth_value_p (TREE_CODE (arg0
))
11602 && truth_value_p (TREE_CODE (op2
))
11603 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11604 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11605 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
11606 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
11611 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11612 of fold_ternary on them. */
11613 gcc_unreachable ();
11615 case BIT_FIELD_REF
:
11616 if ((TREE_CODE (arg0
) == VECTOR_CST
11617 || (TREE_CODE (arg0
) == CONSTRUCTOR
11618 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
11619 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
11620 || (TREE_CODE (type
) == VECTOR_TYPE
11621 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
11623 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
11624 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
11625 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
11626 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
11629 && (idx
% width
) == 0
11630 && (n
% width
) == 0
11631 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11636 if (TREE_CODE (arg0
) == VECTOR_CST
)
11639 return VECTOR_CST_ELT (arg0
, idx
);
11641 tree
*vals
= XALLOCAVEC (tree
, n
);
11642 for (unsigned i
= 0; i
< n
; ++i
)
11643 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
11644 return build_vector (type
, vals
);
11647 /* Constructor elements can be subvectors. */
11648 unsigned HOST_WIDE_INT k
= 1;
11649 if (CONSTRUCTOR_NELTS (arg0
) != 0)
11651 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
11652 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
11653 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
11656 /* We keep an exact subset of the constructor elements. */
11657 if ((idx
% k
) == 0 && (n
% k
) == 0)
11659 if (CONSTRUCTOR_NELTS (arg0
) == 0)
11660 return build_constructor (type
, NULL
);
11665 if (idx
< CONSTRUCTOR_NELTS (arg0
))
11666 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
11667 return build_zero_cst (type
);
11670 vec
<constructor_elt
, va_gc
> *vals
;
11671 vec_alloc (vals
, n
);
11672 for (unsigned i
= 0;
11673 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
11675 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
11677 (arg0
, idx
+ i
)->value
);
11678 return build_constructor (type
, vals
);
11680 /* The bitfield references a single constructor element. */
11681 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
11683 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
11684 return build_zero_cst (type
);
11686 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
11688 return fold_build3_loc (loc
, code
, type
,
11689 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
11690 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
11695 /* A bit-field-ref that referenced the full argument can be stripped. */
11696 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11697 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
11698 && integer_zerop (op2
))
11699 return fold_convert_loc (loc
, type
, arg0
);
11701 /* On constants we can use native encode/interpret to constant
11702 fold (nearly) all BIT_FIELD_REFs. */
11703 if (CONSTANT_CLASS_P (arg0
)
11704 && can_native_interpret_type_p (type
)
11705 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
11706 /* This limitation should not be necessary, we just need to
11707 round this up to mode size. */
11708 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
11709 /* Need bit-shifting of the buffer to relax the following. */
11710 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
11712 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11713 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
11714 unsigned HOST_WIDE_INT clen
;
11715 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
11716 /* ??? We cannot tell native_encode_expr to start at
11717 some random byte only. So limit us to a reasonable amount
11721 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
11722 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
11724 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
11726 tree v
= native_interpret_expr (type
,
11727 b
+ bitpos
/ BITS_PER_UNIT
,
11728 bitsize
/ BITS_PER_UNIT
);
11738 /* For integers we can decompose the FMA if possible. */
11739 if (TREE_CODE (arg0
) == INTEGER_CST
11740 && TREE_CODE (arg1
) == INTEGER_CST
)
11741 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11742 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
11743 if (integer_zerop (arg2
))
11744 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11746 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
11748 case VEC_PERM_EXPR
:
11749 if (TREE_CODE (arg2
) == VECTOR_CST
)
11751 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
11752 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
11753 unsigned char *sel2
= sel
+ nelts
;
11754 bool need_mask_canon
= false;
11755 bool need_mask_canon2
= false;
11756 bool all_in_vec0
= true;
11757 bool all_in_vec1
= true;
11758 bool maybe_identity
= true;
11759 bool single_arg
= (op0
== op1
);
11760 bool changed
= false;
11762 mask2
= 2 * nelts
- 1;
11763 mask
= single_arg
? (nelts
- 1) : mask2
;
11764 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
11765 for (i
= 0; i
< nelts
; i
++)
11767 tree val
= VECTOR_CST_ELT (arg2
, i
);
11768 if (TREE_CODE (val
) != INTEGER_CST
)
11771 /* Make sure that the perm value is in an acceptable
11774 need_mask_canon
|= wi::gtu_p (t
, mask
);
11775 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
11776 sel
[i
] = t
.to_uhwi () & mask
;
11777 sel2
[i
] = t
.to_uhwi () & mask2
;
11779 if (sel
[i
] < nelts
)
11780 all_in_vec1
= false;
11782 all_in_vec0
= false;
11784 if ((sel
[i
] & (nelts
-1)) != i
)
11785 maybe_identity
= false;
11788 if (maybe_identity
)
11798 else if (all_in_vec1
)
11801 for (i
= 0; i
< nelts
; i
++)
11803 need_mask_canon
= true;
11806 if ((TREE_CODE (op0
) == VECTOR_CST
11807 || TREE_CODE (op0
) == CONSTRUCTOR
)
11808 && (TREE_CODE (op1
) == VECTOR_CST
11809 || TREE_CODE (op1
) == CONSTRUCTOR
))
11811 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
11812 if (t
!= NULL_TREE
)
11816 if (op0
== op1
&& !single_arg
)
11819 /* Some targets are deficient and fail to expand a single
11820 argument permutation while still allowing an equivalent
11821 2-argument version. */
11822 if (need_mask_canon
&& arg2
== op2
11823 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
11824 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
11826 need_mask_canon
= need_mask_canon2
;
11830 if (need_mask_canon
&& arg2
== op2
)
11832 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
11833 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
11834 for (i
= 0; i
< nelts
; i
++)
11835 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
11836 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
11841 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
11847 } /* switch (code) */
11850 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11851 of an array (or vector). */
11854 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
)
11856 tree index_type
= NULL_TREE
;
11857 offset_int low_bound
= 0;
11859 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
11861 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
11862 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
11864 /* Static constructors for variably sized objects makes no sense. */
11865 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
11866 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
11867 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
11872 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
11873 TYPE_SIGN (index_type
));
11875 offset_int index
= low_bound
- 1;
11877 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11878 TYPE_SIGN (index_type
));
11880 offset_int max_index
;
11881 unsigned HOST_WIDE_INT cnt
;
11884 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
11886 /* Array constructor might explicitly set index, or specify a range,
11887 or leave index NULL meaning that it is next index after previous
11891 if (TREE_CODE (cfield
) == INTEGER_CST
)
11892 max_index
= index
= wi::to_offset (cfield
);
11895 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
11896 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
11897 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
11904 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11905 TYPE_SIGN (index_type
));
11909 /* Do we have match? */
11910 if (wi::cmpu (access_index
, index
) >= 0
11911 && wi::cmpu (access_index
, max_index
) <= 0)
11917 /* Perform constant folding and related simplification of EXPR.
11918 The related simplifications include x*1 => x, x*0 => 0, etc.,
11919 and application of the associative law.
11920 NOP_EXPR conversions may be removed freely (as long as we
11921 are careful not to change the type of the overall expression).
11922 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11923 but we can constant-fold them if they have constant operands. */
11925 #ifdef ENABLE_FOLD_CHECKING
11926 # define fold(x) fold_1 (x)
11927 static tree
fold_1 (tree
);
11933 const tree t
= expr
;
11934 enum tree_code code
= TREE_CODE (t
);
11935 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11937 location_t loc
= EXPR_LOCATION (expr
);
11939 /* Return right away if a constant. */
11940 if (kind
== tcc_constant
)
11943 /* CALL_EXPR-like objects with variable numbers of operands are
11944 treated specially. */
11945 if (kind
== tcc_vl_exp
)
11947 if (code
== CALL_EXPR
)
11949 tem
= fold_call_expr (loc
, expr
, false);
11950 return tem
? tem
: expr
;
11955 if (IS_EXPR_CODE_CLASS (kind
))
11957 tree type
= TREE_TYPE (t
);
11958 tree op0
, op1
, op2
;
11960 switch (TREE_CODE_LENGTH (code
))
11963 op0
= TREE_OPERAND (t
, 0);
11964 tem
= fold_unary_loc (loc
, code
, type
, op0
);
11965 return tem
? tem
: expr
;
11967 op0
= TREE_OPERAND (t
, 0);
11968 op1
= TREE_OPERAND (t
, 1);
11969 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
11970 return tem
? tem
: expr
;
11972 op0
= TREE_OPERAND (t
, 0);
11973 op1
= TREE_OPERAND (t
, 1);
11974 op2
= TREE_OPERAND (t
, 2);
11975 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
11976 return tem
? tem
: expr
;
11986 tree op0
= TREE_OPERAND (t
, 0);
11987 tree op1
= TREE_OPERAND (t
, 1);
11989 if (TREE_CODE (op1
) == INTEGER_CST
11990 && TREE_CODE (op0
) == CONSTRUCTOR
11991 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
11993 tree val
= get_array_ctor_element_at_index (op0
,
11994 wi::to_offset (op1
));
12002 /* Return a VECTOR_CST if possible. */
12005 tree type
= TREE_TYPE (t
);
12006 if (TREE_CODE (type
) != VECTOR_TYPE
)
12011 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
12012 if (! CONSTANT_CLASS_P (val
))
12015 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
12019 return fold (DECL_INITIAL (t
));
12023 } /* switch (code) */
12026 #ifdef ENABLE_FOLD_CHECKING
12029 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
12030 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
12031 static void fold_check_failed (const_tree
, const_tree
);
12032 void print_fold_checksum (const_tree
);
12034 /* When --enable-checking=fold, compute a digest of expr before
12035 and after actual fold call to see if fold did not accidentally
12036 change original expr. */
12042 struct md5_ctx ctx
;
12043 unsigned char checksum_before
[16], checksum_after
[16];
12044 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12046 md5_init_ctx (&ctx
);
12047 fold_checksum_tree (expr
, &ctx
, &ht
);
12048 md5_finish_ctx (&ctx
, checksum_before
);
12051 ret
= fold_1 (expr
);
12053 md5_init_ctx (&ctx
);
12054 fold_checksum_tree (expr
, &ctx
, &ht
);
12055 md5_finish_ctx (&ctx
, checksum_after
);
12057 if (memcmp (checksum_before
, checksum_after
, 16))
12058 fold_check_failed (expr
, ret
);
12064 print_fold_checksum (const_tree expr
)
12066 struct md5_ctx ctx
;
12067 unsigned char checksum
[16], cnt
;
12068 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12070 md5_init_ctx (&ctx
);
12071 fold_checksum_tree (expr
, &ctx
, &ht
);
12072 md5_finish_ctx (&ctx
, checksum
);
12073 for (cnt
= 0; cnt
< 16; ++cnt
)
12074 fprintf (stderr
, "%02x", checksum
[cnt
]);
12075 putc ('\n', stderr
);
12079 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12081 internal_error ("fold check: original tree changed by fold");
12085 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12086 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12088 const tree_node
**slot
;
12089 enum tree_code code
;
12090 union tree_node buf
;
12096 slot
= ht
->find_slot (expr
, INSERT
);
12100 code
= TREE_CODE (expr
);
12101 if (TREE_CODE_CLASS (code
) == tcc_declaration
12102 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
12104 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12105 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12106 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
12107 buf
.decl_with_vis
.symtab_node
= NULL
;
12108 expr
= (tree
) &buf
;
12110 else if (TREE_CODE_CLASS (code
) == tcc_type
12111 && (TYPE_POINTER_TO (expr
)
12112 || TYPE_REFERENCE_TO (expr
)
12113 || TYPE_CACHED_VALUES_P (expr
)
12114 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
12115 || TYPE_NEXT_VARIANT (expr
)))
12117 /* Allow these fields to be modified. */
12119 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12120 expr
= tmp
= (tree
) &buf
;
12121 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
12122 TYPE_POINTER_TO (tmp
) = NULL
;
12123 TYPE_REFERENCE_TO (tmp
) = NULL
;
12124 TYPE_NEXT_VARIANT (tmp
) = NULL
;
12125 if (TYPE_CACHED_VALUES_P (tmp
))
12127 TYPE_CACHED_VALUES_P (tmp
) = 0;
12128 TYPE_CACHED_VALUES (tmp
) = NULL
;
12131 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12132 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
12133 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12134 if (TREE_CODE_CLASS (code
) != tcc_type
12135 && TREE_CODE_CLASS (code
) != tcc_declaration
12136 && code
!= TREE_LIST
12137 && code
!= SSA_NAME
12138 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12139 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12140 switch (TREE_CODE_CLASS (code
))
12146 md5_process_bytes (TREE_STRING_POINTER (expr
),
12147 TREE_STRING_LENGTH (expr
), ctx
);
12150 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12151 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12154 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
12155 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
12161 case tcc_exceptional
:
12165 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12166 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12167 expr
= TREE_CHAIN (expr
);
12168 goto recursive_label
;
12171 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12172 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12178 case tcc_expression
:
12179 case tcc_reference
:
12180 case tcc_comparison
:
12183 case tcc_statement
:
12185 len
= TREE_OPERAND_LENGTH (expr
);
12186 for (i
= 0; i
< len
; ++i
)
12187 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12189 case tcc_declaration
:
12190 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12191 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12192 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12194 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12195 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12196 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12197 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12198 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12201 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12203 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12205 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12206 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12208 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12212 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12213 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12214 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12215 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12216 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12217 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12218 if (INTEGRAL_TYPE_P (expr
)
12219 || SCALAR_FLOAT_TYPE_P (expr
))
12221 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12222 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12224 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12225 if (TREE_CODE (expr
) == RECORD_TYPE
12226 || TREE_CODE (expr
) == UNION_TYPE
12227 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12228 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12229 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12236 /* Helper function for outputting the checksum of a tree T. When
12237 debugging with gdb, you can "define mynext" to be "next" followed
12238 by "call debug_fold_checksum (op0)", then just trace down till the
12241 DEBUG_FUNCTION
void
12242 debug_fold_checksum (const_tree t
)
12245 unsigned char checksum
[16];
12246 struct md5_ctx ctx
;
12247 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12249 md5_init_ctx (&ctx
);
12250 fold_checksum_tree (t
, &ctx
, &ht
);
12251 md5_finish_ctx (&ctx
, checksum
);
12254 for (i
= 0; i
< 16; i
++)
12255 fprintf (stderr
, "%d ", checksum
[i
]);
12257 fprintf (stderr
, "\n");
12262 /* Fold a unary tree expression with code CODE of type TYPE with an
12263 operand OP0. LOC is the location of the resulting expression.
12264 Return a folded expression if successful. Otherwise, return a tree
12265 expression with code CODE of type TYPE with an operand OP0. */
12268 fold_build1_stat_loc (location_t loc
,
12269 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12272 #ifdef ENABLE_FOLD_CHECKING
12273 unsigned char checksum_before
[16], checksum_after
[16];
12274 struct md5_ctx ctx
;
12275 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12277 md5_init_ctx (&ctx
);
12278 fold_checksum_tree (op0
, &ctx
, &ht
);
12279 md5_finish_ctx (&ctx
, checksum_before
);
12283 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12285 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12287 #ifdef ENABLE_FOLD_CHECKING
12288 md5_init_ctx (&ctx
);
12289 fold_checksum_tree (op0
, &ctx
, &ht
);
12290 md5_finish_ctx (&ctx
, checksum_after
);
12292 if (memcmp (checksum_before
, checksum_after
, 16))
12293 fold_check_failed (op0
, tem
);
12298 /* Fold a binary tree expression with code CODE of type TYPE with
12299 operands OP0 and OP1. LOC is the location of the resulting
12300 expression. Return a folded expression if successful. Otherwise,
12301 return a tree expression with code CODE of type TYPE with operands
12305 fold_build2_stat_loc (location_t loc
,
12306 enum tree_code code
, tree type
, tree op0
, tree op1
12310 #ifdef ENABLE_FOLD_CHECKING
12311 unsigned char checksum_before_op0
[16],
12312 checksum_before_op1
[16],
12313 checksum_after_op0
[16],
12314 checksum_after_op1
[16];
12315 struct md5_ctx ctx
;
12316 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12318 md5_init_ctx (&ctx
);
12319 fold_checksum_tree (op0
, &ctx
, &ht
);
12320 md5_finish_ctx (&ctx
, checksum_before_op0
);
12323 md5_init_ctx (&ctx
);
12324 fold_checksum_tree (op1
, &ctx
, &ht
);
12325 md5_finish_ctx (&ctx
, checksum_before_op1
);
12329 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12331 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12333 #ifdef ENABLE_FOLD_CHECKING
12334 md5_init_ctx (&ctx
);
12335 fold_checksum_tree (op0
, &ctx
, &ht
);
12336 md5_finish_ctx (&ctx
, checksum_after_op0
);
12339 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12340 fold_check_failed (op0
, tem
);
12342 md5_init_ctx (&ctx
);
12343 fold_checksum_tree (op1
, &ctx
, &ht
);
12344 md5_finish_ctx (&ctx
, checksum_after_op1
);
12346 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12347 fold_check_failed (op1
, tem
);
12352 /* Fold a ternary tree expression with code CODE of type TYPE with
12353 operands OP0, OP1, and OP2. Return a folded expression if
12354 successful. Otherwise, return a tree expression with code CODE of
12355 type TYPE with operands OP0, OP1, and OP2. */
12358 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
12359 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12362 #ifdef ENABLE_FOLD_CHECKING
12363 unsigned char checksum_before_op0
[16],
12364 checksum_before_op1
[16],
12365 checksum_before_op2
[16],
12366 checksum_after_op0
[16],
12367 checksum_after_op1
[16],
12368 checksum_after_op2
[16];
12369 struct md5_ctx ctx
;
12370 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12372 md5_init_ctx (&ctx
);
12373 fold_checksum_tree (op0
, &ctx
, &ht
);
12374 md5_finish_ctx (&ctx
, checksum_before_op0
);
12377 md5_init_ctx (&ctx
);
12378 fold_checksum_tree (op1
, &ctx
, &ht
);
12379 md5_finish_ctx (&ctx
, checksum_before_op1
);
12382 md5_init_ctx (&ctx
);
12383 fold_checksum_tree (op2
, &ctx
, &ht
);
12384 md5_finish_ctx (&ctx
, checksum_before_op2
);
12388 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12389 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12391 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12393 #ifdef ENABLE_FOLD_CHECKING
12394 md5_init_ctx (&ctx
);
12395 fold_checksum_tree (op0
, &ctx
, &ht
);
12396 md5_finish_ctx (&ctx
, checksum_after_op0
);
12399 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12400 fold_check_failed (op0
, tem
);
12402 md5_init_ctx (&ctx
);
12403 fold_checksum_tree (op1
, &ctx
, &ht
);
12404 md5_finish_ctx (&ctx
, checksum_after_op1
);
12407 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12408 fold_check_failed (op1
, tem
);
12410 md5_init_ctx (&ctx
);
12411 fold_checksum_tree (op2
, &ctx
, &ht
);
12412 md5_finish_ctx (&ctx
, checksum_after_op2
);
12414 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12415 fold_check_failed (op2
, tem
);
12420 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12421 arguments in ARGARRAY, and a null static chain.
12422 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12423 of type TYPE from the given operands as constructed by build_call_array. */
12426 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12427 int nargs
, tree
*argarray
)
12430 #ifdef ENABLE_FOLD_CHECKING
12431 unsigned char checksum_before_fn
[16],
12432 checksum_before_arglist
[16],
12433 checksum_after_fn
[16],
12434 checksum_after_arglist
[16];
12435 struct md5_ctx ctx
;
12436 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12439 md5_init_ctx (&ctx
);
12440 fold_checksum_tree (fn
, &ctx
, &ht
);
12441 md5_finish_ctx (&ctx
, checksum_before_fn
);
12444 md5_init_ctx (&ctx
);
12445 for (i
= 0; i
< nargs
; i
++)
12446 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12447 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12451 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12453 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12455 #ifdef ENABLE_FOLD_CHECKING
12456 md5_init_ctx (&ctx
);
12457 fold_checksum_tree (fn
, &ctx
, &ht
);
12458 md5_finish_ctx (&ctx
, checksum_after_fn
);
12461 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12462 fold_check_failed (fn
, tem
);
12464 md5_init_ctx (&ctx
);
12465 for (i
= 0; i
< nargs
; i
++)
12466 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12467 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12469 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12470 fold_check_failed (NULL_TREE
, tem
);
12475 /* Perform constant folding and related simplification of initializer
12476 expression EXPR. These behave identically to "fold_buildN" but ignore
12477 potential run-time traps and exceptions that fold must preserve. */
12479 #define START_FOLD_INIT \
12480 int saved_signaling_nans = flag_signaling_nans;\
12481 int saved_trapping_math = flag_trapping_math;\
12482 int saved_rounding_math = flag_rounding_math;\
12483 int saved_trapv = flag_trapv;\
12484 int saved_folding_initializer = folding_initializer;\
12485 flag_signaling_nans = 0;\
12486 flag_trapping_math = 0;\
12487 flag_rounding_math = 0;\
12489 folding_initializer = 1;
12491 #define END_FOLD_INIT \
12492 flag_signaling_nans = saved_signaling_nans;\
12493 flag_trapping_math = saved_trapping_math;\
12494 flag_rounding_math = saved_rounding_math;\
12495 flag_trapv = saved_trapv;\
12496 folding_initializer = saved_folding_initializer;
12499 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12500 tree type
, tree op
)
12505 result
= fold_build1_loc (loc
, code
, type
, op
);
12512 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12513 tree type
, tree op0
, tree op1
)
12518 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12525 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12526 int nargs
, tree
*argarray
)
12531 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12537 #undef START_FOLD_INIT
12538 #undef END_FOLD_INIT
12540 /* Determine if first argument is a multiple of second argument. Return 0 if
12541 it is not, or we cannot easily determined it to be.
12543 An example of the sort of thing we care about (at this point; this routine
12544 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12545 fold cases do now) is discovering that
12547 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12553 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12555 This code also handles discovering that
12557 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12559 is a multiple of 8 so we don't have to worry about dealing with a
12560 possible remainder.
12562 Note that we *look* inside a SAVE_EXPR only to determine how it was
12563 calculated; it is not safe for fold to do much of anything else with the
12564 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12565 at run time. For example, the latter example above *cannot* be implemented
12566 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12567 evaluation time of the original SAVE_EXPR is not necessarily the same at
12568 the time the new expression is evaluated. The only optimization of this
12569 sort that would be valid is changing
12571 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12575 SAVE_EXPR (I) * SAVE_EXPR (J)
12577 (where the same SAVE_EXPR (J) is used in the original and the
12578 transformed version). */
12581 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
12583 if (operand_equal_p (top
, bottom
, 0))
12586 if (TREE_CODE (type
) != INTEGER_TYPE
)
12589 switch (TREE_CODE (top
))
12592 /* Bitwise and provides a power of two multiple. If the mask is
12593 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12594 if (!integer_pow2p (bottom
))
12599 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12600 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12604 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12605 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12608 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12612 op1
= TREE_OPERAND (top
, 1);
12613 /* const_binop may not detect overflow correctly,
12614 so check for it explicitly here. */
12615 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
12616 && 0 != (t1
= fold_convert (type
,
12617 const_binop (LSHIFT_EXPR
,
12620 && !TREE_OVERFLOW (t1
))
12621 return multiple_of_p (type
, t1
, bottom
);
12626 /* Can't handle conversions from non-integral or wider integral type. */
12627 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12628 || (TYPE_PRECISION (type
)
12629 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12632 /* .. fall through ... */
12635 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12638 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12639 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
12642 if (TREE_CODE (bottom
) != INTEGER_CST
12643 || integer_zerop (bottom
)
12644 || (TYPE_UNSIGNED (type
)
12645 && (tree_int_cst_sgn (top
) < 0
12646 || tree_int_cst_sgn (bottom
) < 0)))
12648 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
12656 #define tree_expr_nonnegative_warnv_p(X, Y) \
12657 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12659 #define RECURSE(X) \
12660 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12662 /* Return true if CODE or TYPE is known to be non-negative. */
12665 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
12667 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
12668 && truth_value_p (code
))
12669 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12670 have a signed:1 type (where the value is -1 and 0). */
12675 /* Return true if (CODE OP0) is known to be non-negative. If the return
12676 value is based on the assumption that signed overflow is undefined,
12677 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12678 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12681 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12682 bool *strict_overflow_p
, int depth
)
12684 if (TYPE_UNSIGNED (type
))
12690 /* We can't return 1 if flag_wrapv is set because
12691 ABS_EXPR<INT_MIN> = INT_MIN. */
12692 if (!ANY_INTEGRAL_TYPE_P (type
))
12694 if (TYPE_OVERFLOW_UNDEFINED (type
))
12696 *strict_overflow_p
= true;
12701 case NON_LVALUE_EXPR
:
12703 case FIX_TRUNC_EXPR
:
12704 return RECURSE (op0
);
12708 tree inner_type
= TREE_TYPE (op0
);
12709 tree outer_type
= type
;
12711 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12713 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12714 return RECURSE (op0
);
12715 if (INTEGRAL_TYPE_P (inner_type
))
12717 if (TYPE_UNSIGNED (inner_type
))
12719 return RECURSE (op0
);
12722 else if (INTEGRAL_TYPE_P (outer_type
))
12724 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12725 return RECURSE (op0
);
12726 if (INTEGRAL_TYPE_P (inner_type
))
12727 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12728 && TYPE_UNSIGNED (inner_type
);
12734 return tree_simple_nonnegative_warnv_p (code
, type
);
12737 /* We don't know sign of `t', so be conservative and return false. */
12741 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12742 value is based on the assumption that signed overflow is undefined,
12743 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12744 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12747 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12748 tree op1
, bool *strict_overflow_p
,
12751 if (TYPE_UNSIGNED (type
))
12756 case POINTER_PLUS_EXPR
:
12758 if (FLOAT_TYPE_P (type
))
12759 return RECURSE (op0
) && RECURSE (op1
);
12761 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12762 both unsigned and at least 2 bits shorter than the result. */
12763 if (TREE_CODE (type
) == INTEGER_TYPE
12764 && TREE_CODE (op0
) == NOP_EXPR
12765 && TREE_CODE (op1
) == NOP_EXPR
)
12767 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
12768 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
12769 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12770 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12772 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12773 TYPE_PRECISION (inner2
)) + 1;
12774 return prec
< TYPE_PRECISION (type
);
12780 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12782 /* x * x is always non-negative for floating point x
12783 or without overflow. */
12784 if (operand_equal_p (op0
, op1
, 0)
12785 || (RECURSE (op0
) && RECURSE (op1
)))
12787 if (ANY_INTEGRAL_TYPE_P (type
)
12788 && TYPE_OVERFLOW_UNDEFINED (type
))
12789 *strict_overflow_p
= true;
12794 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12795 both unsigned and their total bits is shorter than the result. */
12796 if (TREE_CODE (type
) == INTEGER_TYPE
12797 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
12798 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
12800 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
12801 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
12803 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
12804 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
12807 bool unsigned0
= TYPE_UNSIGNED (inner0
);
12808 bool unsigned1
= TYPE_UNSIGNED (inner1
);
12810 if (TREE_CODE (op0
) == INTEGER_CST
)
12811 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
12813 if (TREE_CODE (op1
) == INTEGER_CST
)
12814 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
12816 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
12817 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
12819 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
12820 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
12821 : TYPE_PRECISION (inner0
);
12823 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
12824 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
12825 : TYPE_PRECISION (inner1
);
12827 return precision0
+ precision1
< TYPE_PRECISION (type
);
12834 return RECURSE (op0
) || RECURSE (op1
);
12840 case TRUNC_DIV_EXPR
:
12841 case CEIL_DIV_EXPR
:
12842 case FLOOR_DIV_EXPR
:
12843 case ROUND_DIV_EXPR
:
12844 return RECURSE (op0
) && RECURSE (op1
);
12846 case TRUNC_MOD_EXPR
:
12847 return RECURSE (op0
);
12849 case FLOOR_MOD_EXPR
:
12850 return RECURSE (op1
);
12852 case CEIL_MOD_EXPR
:
12853 case ROUND_MOD_EXPR
:
12855 return tree_simple_nonnegative_warnv_p (code
, type
);
12858 /* We don't know sign of `t', so be conservative and return false. */
12862 /* Return true if T is known to be non-negative. If the return
12863 value is based on the assumption that signed overflow is undefined,
12864 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12865 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12868 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12870 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12873 switch (TREE_CODE (t
))
12876 return tree_int_cst_sgn (t
) >= 0;
12879 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12882 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
12885 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
12888 /* Limit the depth of recursion to avoid quadratic behavior.
12889 This is expected to catch almost all occurrences in practice.
12890 If this code misses important cases that unbounded recursion
12891 would not, passes that need this information could be revised
12892 to provide it through dataflow propagation. */
12893 return (!name_registered_for_update_p (t
)
12894 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
12895 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
12896 strict_overflow_p
, depth
));
12899 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
12903 /* Return true if T is known to be non-negative. If the return
12904 value is based on the assumption that signed overflow is undefined,
12905 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12906 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12909 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
12910 bool *strict_overflow_p
, int depth
)
12931 case CFN_BUILT_IN_BSWAP32
:
12932 case CFN_BUILT_IN_BSWAP64
:
12937 /* sqrt(-0.0) is -0.0. */
12938 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
12940 return RECURSE (arg0
);
12966 CASE_CFN_NEARBYINT
:
12973 CASE_CFN_SIGNIFICAND
:
12977 /* True if the 1st argument is nonnegative. */
12978 return RECURSE (arg0
);
12981 /* True if the 1st OR 2nd arguments are nonnegative. */
12982 return RECURSE (arg0
) || RECURSE (arg1
);
12985 /* True if the 1st AND 2nd arguments are nonnegative. */
12986 return RECURSE (arg0
) && RECURSE (arg1
);
12989 /* True if the 2nd argument is nonnegative. */
12990 return RECURSE (arg1
);
12993 /* True if the 1st argument is nonnegative or the second
12994 argument is an even integer. */
12995 if (TREE_CODE (arg1
) == INTEGER_CST
12996 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
12998 return RECURSE (arg0
);
13001 /* True if the 1st argument is nonnegative or the second
13002 argument is an even integer valued real. */
13003 if (TREE_CODE (arg1
) == REAL_CST
)
13008 c
= TREE_REAL_CST (arg1
);
13009 n
= real_to_integer (&c
);
13012 REAL_VALUE_TYPE cint
;
13013 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
13014 if (real_identical (&c
, &cint
))
13018 return RECURSE (arg0
);
13023 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
13026 /* Return true if T is known to be non-negative. If the return
13027 value is based on the assumption that signed overflow is undefined,
13028 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13029 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13032 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13034 enum tree_code code
= TREE_CODE (t
);
13035 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13042 tree temp
= TARGET_EXPR_SLOT (t
);
13043 t
= TARGET_EXPR_INITIAL (t
);
13045 /* If the initializer is non-void, then it's a normal expression
13046 that will be assigned to the slot. */
13047 if (!VOID_TYPE_P (t
))
13048 return RECURSE (t
);
13050 /* Otherwise, the initializer sets the slot in some way. One common
13051 way is an assignment statement at the end of the initializer. */
13054 if (TREE_CODE (t
) == BIND_EXPR
)
13055 t
= expr_last (BIND_EXPR_BODY (t
));
13056 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13057 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13058 t
= expr_last (TREE_OPERAND (t
, 0));
13059 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13064 if (TREE_CODE (t
) == MODIFY_EXPR
13065 && TREE_OPERAND (t
, 0) == temp
)
13066 return RECURSE (TREE_OPERAND (t
, 1));
13073 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
13074 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
13076 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
13077 get_call_combined_fn (t
),
13080 strict_overflow_p
, depth
);
13082 case COMPOUND_EXPR
:
13084 return RECURSE (TREE_OPERAND (t
, 1));
13087 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
13090 return RECURSE (TREE_OPERAND (t
, 0));
13093 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13098 #undef tree_expr_nonnegative_warnv_p
13100 /* Return true if T is known to be non-negative. If the return
13101 value is based on the assumption that signed overflow is undefined,
13102 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13103 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13106 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13108 enum tree_code code
;
13109 if (t
== error_mark_node
)
13112 code
= TREE_CODE (t
);
13113 switch (TREE_CODE_CLASS (code
))
13116 case tcc_comparison
:
13117 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13119 TREE_OPERAND (t
, 0),
13120 TREE_OPERAND (t
, 1),
13121 strict_overflow_p
, depth
);
13124 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13126 TREE_OPERAND (t
, 0),
13127 strict_overflow_p
, depth
);
13130 case tcc_declaration
:
13131 case tcc_reference
:
13132 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13140 case TRUTH_AND_EXPR
:
13141 case TRUTH_OR_EXPR
:
13142 case TRUTH_XOR_EXPR
:
13143 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13145 TREE_OPERAND (t
, 0),
13146 TREE_OPERAND (t
, 1),
13147 strict_overflow_p
, depth
);
13148 case TRUTH_NOT_EXPR
:
13149 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13151 TREE_OPERAND (t
, 0),
13152 strict_overflow_p
, depth
);
13159 case WITH_SIZE_EXPR
:
13161 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13164 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13168 /* Return true if `t' is known to be non-negative. Handle warnings
13169 about undefined signed overflow. */
13172 tree_expr_nonnegative_p (tree t
)
13174 bool ret
, strict_overflow_p
;
13176 strict_overflow_p
= false;
13177 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13178 if (strict_overflow_p
)
13179 fold_overflow_warning (("assuming signed overflow does not occur when "
13180 "determining that expression is always "
13182 WARN_STRICT_OVERFLOW_MISC
);
13187 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13188 For floating point we further ensure that T is not denormal.
13189 Similar logic is present in nonzero_address in rtlanal.h.
13191 If the return value is based on the assumption that signed overflow
13192 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13193 change *STRICT_OVERFLOW_P. */
13196 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13197 bool *strict_overflow_p
)
13202 return tree_expr_nonzero_warnv_p (op0
,
13203 strict_overflow_p
);
13207 tree inner_type
= TREE_TYPE (op0
);
13208 tree outer_type
= type
;
13210 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13211 && tree_expr_nonzero_warnv_p (op0
,
13212 strict_overflow_p
));
13216 case NON_LVALUE_EXPR
:
13217 return tree_expr_nonzero_warnv_p (op0
,
13218 strict_overflow_p
);
13227 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13228 For floating point we further ensure that T is not denormal.
13229 Similar logic is present in nonzero_address in rtlanal.h.
13231 If the return value is based on the assumption that signed overflow
13232 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13233 change *STRICT_OVERFLOW_P. */
13236 tree_binary_nonzero_warnv_p (enum tree_code code
,
13239 tree op1
, bool *strict_overflow_p
)
13241 bool sub_strict_overflow_p
;
13244 case POINTER_PLUS_EXPR
:
13246 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13248 /* With the presence of negative values it is hard
13249 to say something. */
13250 sub_strict_overflow_p
= false;
13251 if (!tree_expr_nonnegative_warnv_p (op0
,
13252 &sub_strict_overflow_p
)
13253 || !tree_expr_nonnegative_warnv_p (op1
,
13254 &sub_strict_overflow_p
))
13256 /* One of operands must be positive and the other non-negative. */
13257 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13258 overflows, on a twos-complement machine the sum of two
13259 nonnegative numbers can never be zero. */
13260 return (tree_expr_nonzero_warnv_p (op0
,
13262 || tree_expr_nonzero_warnv_p (op1
,
13263 strict_overflow_p
));
13268 if (TYPE_OVERFLOW_UNDEFINED (type
))
13270 if (tree_expr_nonzero_warnv_p (op0
,
13272 && tree_expr_nonzero_warnv_p (op1
,
13273 strict_overflow_p
))
13275 *strict_overflow_p
= true;
13282 sub_strict_overflow_p
= false;
13283 if (tree_expr_nonzero_warnv_p (op0
,
13284 &sub_strict_overflow_p
)
13285 && tree_expr_nonzero_warnv_p (op1
,
13286 &sub_strict_overflow_p
))
13288 if (sub_strict_overflow_p
)
13289 *strict_overflow_p
= true;
13294 sub_strict_overflow_p
= false;
13295 if (tree_expr_nonzero_warnv_p (op0
,
13296 &sub_strict_overflow_p
))
13298 if (sub_strict_overflow_p
)
13299 *strict_overflow_p
= true;
13301 /* When both operands are nonzero, then MAX must be too. */
13302 if (tree_expr_nonzero_warnv_p (op1
,
13303 strict_overflow_p
))
13306 /* MAX where operand 0 is positive is positive. */
13307 return tree_expr_nonnegative_warnv_p (op0
,
13308 strict_overflow_p
);
13310 /* MAX where operand 1 is positive is positive. */
13311 else if (tree_expr_nonzero_warnv_p (op1
,
13312 &sub_strict_overflow_p
)
13313 && tree_expr_nonnegative_warnv_p (op1
,
13314 &sub_strict_overflow_p
))
13316 if (sub_strict_overflow_p
)
13317 *strict_overflow_p
= true;
13323 return (tree_expr_nonzero_warnv_p (op1
,
13325 || tree_expr_nonzero_warnv_p (op0
,
13326 strict_overflow_p
));
13335 /* Return true when T is an address and is known to be nonzero.
13336 For floating point we further ensure that T is not denormal.
13337 Similar logic is present in nonzero_address in rtlanal.h.
13339 If the return value is based on the assumption that signed overflow
13340 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13341 change *STRICT_OVERFLOW_P. */
13344 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13346 bool sub_strict_overflow_p
;
13347 switch (TREE_CODE (t
))
13350 return !integer_zerop (t
);
13354 tree base
= TREE_OPERAND (t
, 0);
13356 if (!DECL_P (base
))
13357 base
= get_base_address (base
);
13362 /* For objects in symbol table check if we know they are non-zero.
13363 Don't do anything for variables and functions before symtab is built;
13364 it is quite possible that they will be declared weak later. */
13365 if (DECL_P (base
) && decl_in_symtab_p (base
))
13367 struct symtab_node
*symbol
;
13369 symbol
= symtab_node::get_create (base
);
13371 return symbol
->nonzero_address ();
13376 /* Function local objects are never NULL. */
13378 && (DECL_CONTEXT (base
)
13379 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
13380 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
13383 /* Constants are never weak. */
13384 if (CONSTANT_CLASS_P (base
))
13391 sub_strict_overflow_p
= false;
13392 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13393 &sub_strict_overflow_p
)
13394 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13395 &sub_strict_overflow_p
))
13397 if (sub_strict_overflow_p
)
13398 *strict_overflow_p
= true;
13409 #define integer_valued_real_p(X) \
13410 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13412 #define RECURSE(X) \
13413 ((integer_valued_real_p) (X, depth + 1))
13415 /* Return true if the floating point result of (CODE OP0) has an
13416 integer value. We also allow +Inf, -Inf and NaN to be considered
13419 DEPTH is the current nesting depth of the query. */
13422 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
13430 return RECURSE (op0
);
13434 tree type
= TREE_TYPE (op0
);
13435 if (TREE_CODE (type
) == INTEGER_TYPE
)
13437 if (TREE_CODE (type
) == REAL_TYPE
)
13438 return RECURSE (op0
);
13448 /* Return true if the floating point result of (CODE OP0 OP1) has an
13449 integer value. We also allow +Inf, -Inf and NaN to be considered
13452 DEPTH is the current nesting depth of the query. */
13455 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
13464 return RECURSE (op0
) && RECURSE (op1
);
13472 /* Return true if the floating point result of calling FNDECL with arguments
13473 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13474 considered integer values. If FNDECL takes fewer than 2 arguments,
13475 the remaining ARGn are null.
13477 DEPTH is the current nesting depth of the query. */
13480 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
13486 CASE_CFN_NEARBYINT
:
13494 return RECURSE (arg0
) && RECURSE (arg1
);
13502 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13503 has an integer value. We also allow +Inf, -Inf and NaN to be
13504 considered integer values.
13506 DEPTH is the current nesting depth of the query. */
13509 integer_valued_real_single_p (tree t
, int depth
)
13511 switch (TREE_CODE (t
))
13514 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
13517 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13520 /* Limit the depth of recursion to avoid quadratic behavior.
13521 This is expected to catch almost all occurrences in practice.
13522 If this code misses important cases that unbounded recursion
13523 would not, passes that need this information could be revised
13524 to provide it through dataflow propagation. */
13525 return (!name_registered_for_update_p (t
)
13526 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
13527 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
13536 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13537 has an integer value. We also allow +Inf, -Inf and NaN to be
13538 considered integer values.
13540 DEPTH is the current nesting depth of the query. */
13543 integer_valued_real_invalid_p (tree t
, int depth
)
13545 switch (TREE_CODE (t
))
13547 case COMPOUND_EXPR
:
13550 return RECURSE (TREE_OPERAND (t
, 1));
13553 return RECURSE (TREE_OPERAND (t
, 0));
13562 #undef integer_valued_real_p
13564 /* Return true if the floating point expression T has an integer value.
13565 We also allow +Inf, -Inf and NaN to be considered integer values.
13567 DEPTH is the current nesting depth of the query. */
13570 integer_valued_real_p (tree t
, int depth
)
13572 if (t
== error_mark_node
)
13575 tree_code code
= TREE_CODE (t
);
13576 switch (TREE_CODE_CLASS (code
))
13579 case tcc_comparison
:
13580 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
13581 TREE_OPERAND (t
, 1), depth
);
13584 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
13587 case tcc_declaration
:
13588 case tcc_reference
:
13589 return integer_valued_real_single_p (t
, depth
);
13599 return integer_valued_real_single_p (t
, depth
);
13603 tree arg0
= (call_expr_nargs (t
) > 0
13604 ? CALL_EXPR_ARG (t
, 0)
13606 tree arg1
= (call_expr_nargs (t
) > 1
13607 ? CALL_EXPR_ARG (t
, 1)
13609 return integer_valued_real_call_p (get_call_combined_fn (t
),
13610 arg0
, arg1
, depth
);
13614 return integer_valued_real_invalid_p (t
, depth
);
13618 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13619 attempt to fold the expression to a constant without modifying TYPE,
13622 If the expression could be simplified to a constant, then return
13623 the constant. If the expression would not be simplified to a
13624 constant, then return NULL_TREE. */
13627 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13629 tree tem
= fold_binary (code
, type
, op0
, op1
);
13630 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13633 /* Given the components of a unary expression CODE, TYPE and OP0,
13634 attempt to fold the expression to a constant without modifying
13637 If the expression could be simplified to a constant, then return
13638 the constant. If the expression would not be simplified to a
13639 constant, then return NULL_TREE. */
13642 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13644 tree tem
= fold_unary (code
, type
, op0
);
13645 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13648 /* If EXP represents referencing an element in a constant string
13649 (either via pointer arithmetic or array indexing), return the
13650 tree representing the value accessed, otherwise return NULL. */
13653 fold_read_from_constant_string (tree exp
)
13655 if ((TREE_CODE (exp
) == INDIRECT_REF
13656 || TREE_CODE (exp
) == ARRAY_REF
)
13657 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13659 tree exp1
= TREE_OPERAND (exp
, 0);
13662 location_t loc
= EXPR_LOCATION (exp
);
13664 if (TREE_CODE (exp
) == INDIRECT_REF
)
13665 string
= string_constant (exp1
, &index
);
13668 tree low_bound
= array_ref_low_bound (exp
);
13669 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13671 /* Optimize the special-case of a zero lower bound.
13673 We convert the low_bound to sizetype to avoid some problems
13674 with constant folding. (E.g. suppose the lower bound is 1,
13675 and its mode is QI. Without the conversion,l (ARRAY
13676 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13677 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13678 if (! integer_zerop (low_bound
))
13679 index
= size_diffop_loc (loc
, index
,
13680 fold_convert_loc (loc
, sizetype
, low_bound
));
13686 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13687 && TREE_CODE (string
) == STRING_CST
13688 && TREE_CODE (index
) == INTEGER_CST
13689 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13690 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13692 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13693 return build_int_cst_type (TREE_TYPE (exp
),
13694 (TREE_STRING_POINTER (string
)
13695 [TREE_INT_CST_LOW (index
)]));
13700 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13701 an integer constant, real, or fixed-point constant.
13703 TYPE is the type of the result. */
13706 fold_negate_const (tree arg0
, tree type
)
13708 tree t
= NULL_TREE
;
13710 switch (TREE_CODE (arg0
))
13715 wide_int val
= wi::neg (arg0
, &overflow
);
13716 t
= force_fit_type (type
, val
, 1,
13717 (overflow
| TREE_OVERFLOW (arg0
))
13718 && !TYPE_UNSIGNED (type
));
13723 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13728 FIXED_VALUE_TYPE f
;
13729 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13730 &(TREE_FIXED_CST (arg0
)), NULL
,
13731 TYPE_SATURATING (type
));
13732 t
= build_fixed (type
, f
);
13733 /* Propagate overflow flags. */
13734 if (overflow_p
| TREE_OVERFLOW (arg0
))
13735 TREE_OVERFLOW (t
) = 1;
13740 gcc_unreachable ();
13746 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13747 an integer constant or real constant.
13749 TYPE is the type of the result. */
13752 fold_abs_const (tree arg0
, tree type
)
13754 tree t
= NULL_TREE
;
13756 switch (TREE_CODE (arg0
))
13760 /* If the value is unsigned or non-negative, then the absolute value
13761 is the same as the ordinary value. */
13762 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
13765 /* If the value is negative, then the absolute value is
13770 wide_int val
= wi::neg (arg0
, &overflow
);
13771 t
= force_fit_type (type
, val
, -1,
13772 overflow
| TREE_OVERFLOW (arg0
));
13778 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13779 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13785 gcc_unreachable ();
13791 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13792 constant. TYPE is the type of the result. */
13795 fold_not_const (const_tree arg0
, tree type
)
13797 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13799 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
13802 /* Given CODE, a relational operator, the target type, TYPE and two
13803 constant operands OP0 and OP1, return the result of the
13804 relational operation. If the result is not a compile time
13805 constant, then return NULL_TREE. */
13808 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13810 int result
, invert
;
13812 /* From here on, the only cases we handle are when the result is
13813 known to be a constant. */
13815 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13817 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13818 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13820 /* Handle the cases where either operand is a NaN. */
13821 if (real_isnan (c0
) || real_isnan (c1
))
13831 case UNORDERED_EXPR
:
13845 if (flag_trapping_math
)
13851 gcc_unreachable ();
13854 return constant_boolean_node (result
, type
);
13857 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13860 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
13862 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
13863 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
13864 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
13867 /* Handle equality/inequality of complex constants. */
13868 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
13870 tree rcond
= fold_relational_const (code
, type
,
13871 TREE_REALPART (op0
),
13872 TREE_REALPART (op1
));
13873 tree icond
= fold_relational_const (code
, type
,
13874 TREE_IMAGPART (op0
),
13875 TREE_IMAGPART (op1
));
13876 if (code
== EQ_EXPR
)
13877 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
13878 else if (code
== NE_EXPR
)
13879 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
13884 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
13886 unsigned count
= VECTOR_CST_NELTS (op0
);
13887 tree
*elts
= XALLOCAVEC (tree
, count
);
13888 gcc_assert (VECTOR_CST_NELTS (op1
) == count
13889 && TYPE_VECTOR_SUBPARTS (type
) == count
);
13891 for (unsigned i
= 0; i
< count
; i
++)
13893 tree elem_type
= TREE_TYPE (type
);
13894 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13895 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13897 tree tem
= fold_relational_const (code
, elem_type
,
13900 if (tem
== NULL_TREE
)
13903 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
13906 return build_vector (type
, elts
);
13909 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13911 To compute GT, swap the arguments and do LT.
13912 To compute GE, do LT and invert the result.
13913 To compute LE, swap the arguments, do LT and invert the result.
13914 To compute NE, do EQ and invert the result.
13916 Therefore, the code below must handle only EQ and LT. */
13918 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13920 std::swap (op0
, op1
);
13921 code
= swap_tree_comparison (code
);
13924 /* Note that it is safe to invert for real values here because we
13925 have already handled the one case that it matters. */
13928 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13931 code
= invert_tree_comparison (code
, false);
13934 /* Compute a result for LT or EQ if args permit;
13935 Otherwise return T. */
13936 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13938 if (code
== EQ_EXPR
)
13939 result
= tree_int_cst_equal (op0
, op1
);
13941 result
= tree_int_cst_lt (op0
, op1
);
13948 return constant_boolean_node (result
, type
);
13951 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13952 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13956 fold_build_cleanup_point_expr (tree type
, tree expr
)
13958 /* If the expression does not have side effects then we don't have to wrap
13959 it with a cleanup point expression. */
13960 if (!TREE_SIDE_EFFECTS (expr
))
13963 /* If the expression is a return, check to see if the expression inside the
13964 return has no side effects or the right hand side of the modify expression
13965 inside the return. If either don't have side effects set we don't need to
13966 wrap the expression in a cleanup point expression. Note we don't check the
13967 left hand side of the modify because it should always be a return decl. */
13968 if (TREE_CODE (expr
) == RETURN_EXPR
)
13970 tree op
= TREE_OPERAND (expr
, 0);
13971 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13973 op
= TREE_OPERAND (op
, 1);
13974 if (!TREE_SIDE_EFFECTS (op
))
13978 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
13981 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13982 of an indirection through OP0, or NULL_TREE if no simplification is
13986 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
13992 subtype
= TREE_TYPE (sub
);
13993 if (!POINTER_TYPE_P (subtype
))
13996 if (TREE_CODE (sub
) == ADDR_EXPR
)
13998 tree op
= TREE_OPERAND (sub
, 0);
13999 tree optype
= TREE_TYPE (op
);
14000 /* *&CONST_DECL -> to the value of the const decl. */
14001 if (TREE_CODE (op
) == CONST_DECL
)
14002 return DECL_INITIAL (op
);
14003 /* *&p => p; make sure to handle *&"str"[cst] here. */
14004 if (type
== optype
)
14006 tree fop
= fold_read_from_constant_string (op
);
14012 /* *(foo *)&fooarray => fooarray[0] */
14013 else if (TREE_CODE (optype
) == ARRAY_TYPE
14014 && type
== TREE_TYPE (optype
)
14015 && (!in_gimple_form
14016 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14018 tree type_domain
= TYPE_DOMAIN (optype
);
14019 tree min_val
= size_zero_node
;
14020 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14021 min_val
= TYPE_MIN_VALUE (type_domain
);
14023 && TREE_CODE (min_val
) != INTEGER_CST
)
14025 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14026 NULL_TREE
, NULL_TREE
);
14028 /* *(foo *)&complexfoo => __real__ complexfoo */
14029 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14030 && type
== TREE_TYPE (optype
))
14031 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14032 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14033 else if (TREE_CODE (optype
) == VECTOR_TYPE
14034 && type
== TREE_TYPE (optype
))
14036 tree part_width
= TYPE_SIZE (type
);
14037 tree index
= bitsize_int (0);
14038 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14042 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14043 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14045 tree op00
= TREE_OPERAND (sub
, 0);
14046 tree op01
= TREE_OPERAND (sub
, 1);
14049 if (TREE_CODE (op00
) == ADDR_EXPR
)
14052 op00
= TREE_OPERAND (op00
, 0);
14053 op00type
= TREE_TYPE (op00
);
14055 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14056 if (TREE_CODE (op00type
) == VECTOR_TYPE
14057 && type
== TREE_TYPE (op00type
))
14059 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
14060 tree part_width
= TYPE_SIZE (type
);
14061 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
14062 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14063 tree index
= bitsize_int (indexi
);
14065 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
14066 return fold_build3_loc (loc
,
14067 BIT_FIELD_REF
, type
, op00
,
14068 part_width
, index
);
14071 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14072 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14073 && type
== TREE_TYPE (op00type
))
14075 tree size
= TYPE_SIZE_UNIT (type
);
14076 if (tree_int_cst_equal (size
, op01
))
14077 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14079 /* ((foo *)&fooarray)[1] => fooarray[1] */
14080 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14081 && type
== TREE_TYPE (op00type
))
14083 tree type_domain
= TYPE_DOMAIN (op00type
);
14084 tree min_val
= size_zero_node
;
14085 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14086 min_val
= TYPE_MIN_VALUE (type_domain
);
14087 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14088 TYPE_SIZE_UNIT (type
));
14089 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14090 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14091 NULL_TREE
, NULL_TREE
);
14096 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14097 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14098 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14099 && (!in_gimple_form
14100 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14103 tree min_val
= size_zero_node
;
14104 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14105 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14106 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14107 min_val
= TYPE_MIN_VALUE (type_domain
);
14109 && TREE_CODE (min_val
) != INTEGER_CST
)
14111 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14118 /* Builds an expression for an indirection through T, simplifying some
14122 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14124 tree type
= TREE_TYPE (TREE_TYPE (t
));
14125 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14130 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14133 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14136 fold_indirect_ref_loc (location_t loc
, tree t
)
14138 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14146 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14147 whose result is ignored. The type of the returned tree need not be
14148 the same as the original expression. */
14151 fold_ignored_result (tree t
)
14153 if (!TREE_SIDE_EFFECTS (t
))
14154 return integer_zero_node
;
14157 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14160 t
= TREE_OPERAND (t
, 0);
14164 case tcc_comparison
:
14165 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14166 t
= TREE_OPERAND (t
, 0);
14167 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14168 t
= TREE_OPERAND (t
, 1);
14173 case tcc_expression
:
14174 switch (TREE_CODE (t
))
14176 case COMPOUND_EXPR
:
14177 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14179 t
= TREE_OPERAND (t
, 0);
14183 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14184 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14186 t
= TREE_OPERAND (t
, 0);
14199 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14202 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14204 tree div
= NULL_TREE
;
14209 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14210 have to do anything. Only do this when we are not given a const,
14211 because in that case, this check is more expensive than just
14213 if (TREE_CODE (value
) != INTEGER_CST
)
14215 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14217 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14221 /* If divisor is a power of two, simplify this to bit manipulation. */
14222 if (divisor
== (divisor
& -divisor
))
14224 if (TREE_CODE (value
) == INTEGER_CST
)
14226 wide_int val
= value
;
14229 if ((val
& (divisor
- 1)) == 0)
14232 overflow_p
= TREE_OVERFLOW (value
);
14233 val
+= divisor
- 1;
14234 val
&= - (int) divisor
;
14238 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14244 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14245 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14246 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14247 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14253 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14254 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14255 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14261 /* Likewise, but round down. */
14264 round_down_loc (location_t loc
, tree value
, int divisor
)
14266 tree div
= NULL_TREE
;
14268 gcc_assert (divisor
> 0);
14272 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14273 have to do anything. Only do this when we are not given a const,
14274 because in that case, this check is more expensive than just
14276 if (TREE_CODE (value
) != INTEGER_CST
)
14278 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14280 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14284 /* If divisor is a power of two, simplify this to bit manipulation. */
14285 if (divisor
== (divisor
& -divisor
))
14289 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14290 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14295 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14296 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14297 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14303 /* Returns the pointer to the base of the object addressed by EXP and
14304 extracts the information about the offset of the access, storing it
14305 to PBITPOS and POFFSET. */
14308 split_address_to_core_and_offset (tree exp
,
14309 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14313 int unsignedp
, reversep
, volatilep
;
14314 HOST_WIDE_INT bitsize
;
14315 location_t loc
= EXPR_LOCATION (exp
);
14317 if (TREE_CODE (exp
) == ADDR_EXPR
)
14319 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14320 poffset
, &mode
, &unsignedp
, &reversep
,
14321 &volatilep
, false);
14322 core
= build_fold_addr_expr_loc (loc
, core
);
14328 *poffset
= NULL_TREE
;
14334 /* Returns true if addresses of E1 and E2 differ by a constant, false
14335 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14338 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14341 HOST_WIDE_INT bitpos1
, bitpos2
;
14342 tree toffset1
, toffset2
, tdiff
, type
;
14344 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14345 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14347 if (bitpos1
% BITS_PER_UNIT
!= 0
14348 || bitpos2
% BITS_PER_UNIT
!= 0
14349 || !operand_equal_p (core1
, core2
, 0))
14352 if (toffset1
&& toffset2
)
14354 type
= TREE_TYPE (toffset1
);
14355 if (type
!= TREE_TYPE (toffset2
))
14356 toffset2
= fold_convert (type
, toffset2
);
14358 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14359 if (!cst_and_fits_in_hwi (tdiff
))
14362 *diff
= int_cst_value (tdiff
);
14364 else if (toffset1
|| toffset2
)
14366 /* If only one of the offsets is non-constant, the difference cannot
14373 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14377 /* Return OFF converted to a pointer offset type suitable as offset for
14378 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14380 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14382 return fold_convert_loc (loc
, sizetype
, off
);
14385 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14387 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14389 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14390 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14393 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14395 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14397 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14398 ptr
, size_int (off
));
14401 /* Return a char pointer for a C string if it is a string constant
14402 or sum of string constant and integer constant. */
14405 c_getstr (tree src
)
14409 src
= string_constant (src
, &offset_node
);
14413 if (offset_node
== 0)
14414 return TREE_STRING_POINTER (src
);
14415 else if (!tree_fits_uhwi_p (offset_node
)
14416 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
14419 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);