1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tree-iterator.h"
58 #include "insn-config.h"
68 #include "diagnostic-core.h"
70 #include "langhooks.h"
72 #include "internal-fn.h"
78 #include "generic-match.h"
81 #ifndef LOAD_EXTEND_OP
82 #define LOAD_EXTEND_OP(M) UNKNOWN
85 /* Nonzero if we are folding constants inside an initializer; zero
87 int folding_initializer
= 0;
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code
{
111 static bool negate_mathfn_p (enum built_in_function
);
112 static bool negate_expr_p (tree
);
113 static tree
negate_expr (tree
);
114 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
115 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
116 static enum comparison_code
comparison_to_compcode (enum tree_code
);
117 static enum tree_code
compcode_to_comparison (enum comparison_code
);
118 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
119 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
120 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
121 static tree
make_bit_field_ref (location_t
, tree
, tree
,
122 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
123 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
125 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
127 machine_mode
*, int *, int *,
129 static int simple_operand_p (const_tree
);
130 static bool simple_operand_p_2 (tree
);
131 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
132 static tree
range_predecessor (tree
);
133 static tree
range_successor (tree
);
134 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
136 static tree
unextend (tree
, int, int, tree
);
137 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
139 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
140 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
141 static tree
fold_binary_op_with_conditional_arg (location_t
,
142 enum tree_code
, tree
,
145 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
146 static bool reorder_operands_p (const_tree
, const_tree
);
147 static tree
fold_negate_const (tree
, tree
);
148 static tree
fold_not_const (const_tree
, tree
);
149 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
150 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
151 static tree
fold_view_convert_expr (tree
, tree
);
152 static bool vec_cst_ctor_to_array (tree
, tree
*);
155 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
156 Otherwise, return LOC. */
159 expr_location_or (tree t
, location_t loc
)
161 location_t tloc
= EXPR_LOCATION (t
);
162 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
165 /* Similar to protected_set_expr_location, but never modify x in place,
166 if location can and needs to be set, unshare it. */
169 protected_set_expr_location_unshare (tree x
, location_t loc
)
171 if (CAN_HAVE_LOCATION_P (x
)
172 && EXPR_LOCATION (x
) != loc
173 && !(TREE_CODE (x
) == SAVE_EXPR
174 || TREE_CODE (x
) == TARGET_EXPR
175 || TREE_CODE (x
) == BIND_EXPR
))
178 SET_EXPR_LOCATION (x
, loc
);
183 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
184 division and returns the quotient. Otherwise returns
188 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
192 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
194 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
199 /* This is nonzero if we should defer warnings about undefined
200 overflow. This facility exists because these warnings are a
201 special case. The code to estimate loop iterations does not want
202 to issue any warnings, since it works with expressions which do not
203 occur in user code. Various bits of cleanup code call fold(), but
204 only use the result if it has certain characteristics (e.g., is a
205 constant); that code only wants to issue a warning if the result is
208 static int fold_deferring_overflow_warnings
;
210 /* If a warning about undefined overflow is deferred, this is the
211 warning. Note that this may cause us to turn two warnings into
212 one, but that is fine since it is sufficient to only give one
213 warning per expression. */
215 static const char* fold_deferred_overflow_warning
;
217 /* If a warning about undefined overflow is deferred, this is the
218 level at which the warning should be emitted. */
220 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
222 /* Start deferring overflow warnings. We could use a stack here to
223 permit nested calls, but at present it is not necessary. */
226 fold_defer_overflow_warnings (void)
228 ++fold_deferring_overflow_warnings
;
231 /* Stop deferring overflow warnings. If there is a pending warning,
232 and ISSUE is true, then issue the warning if appropriate. STMT is
233 the statement with which the warning should be associated (used for
234 location information); STMT may be NULL. CODE is the level of the
235 warning--a warn_strict_overflow_code value. This function will use
236 the smaller of CODE and the deferred code when deciding whether to
237 issue the warning. CODE may be zero to mean to always use the
241 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
246 gcc_assert (fold_deferring_overflow_warnings
> 0);
247 --fold_deferring_overflow_warnings
;
248 if (fold_deferring_overflow_warnings
> 0)
250 if (fold_deferred_overflow_warning
!= NULL
252 && code
< (int) fold_deferred_overflow_code
)
253 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
257 warnmsg
= fold_deferred_overflow_warning
;
258 fold_deferred_overflow_warning
= NULL
;
260 if (!issue
|| warnmsg
== NULL
)
263 if (gimple_no_warning_p (stmt
))
266 /* Use the smallest code level when deciding to issue the
268 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
269 code
= fold_deferred_overflow_code
;
271 if (!issue_strict_overflow_warning (code
))
275 locus
= input_location
;
277 locus
= gimple_location (stmt
);
278 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
281 /* Stop deferring overflow warnings, ignoring any deferred
285 fold_undefer_and_ignore_overflow_warnings (void)
287 fold_undefer_overflow_warnings (false, NULL
, 0);
290 /* Whether we are deferring overflow warnings. */
293 fold_deferring_overflow_warnings_p (void)
295 return fold_deferring_overflow_warnings
> 0;
298 /* This is called when we fold something based on the fact that signed
299 overflow is undefined. */
302 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
304 if (fold_deferring_overflow_warnings
> 0)
306 if (fold_deferred_overflow_warning
== NULL
307 || wc
< fold_deferred_overflow_code
)
309 fold_deferred_overflow_warning
= gmsgid
;
310 fold_deferred_overflow_code
= wc
;
313 else if (issue_strict_overflow_warning (wc
))
314 warning (OPT_Wstrict_overflow
, gmsgid
);
317 /* Return true if the built-in mathematical function specified by CODE
318 is odd, i.e. -f(x) == f(-x). */
321 negate_mathfn_p (enum built_in_function code
)
325 CASE_FLT_FN (BUILT_IN_ASIN
):
326 CASE_FLT_FN (BUILT_IN_ASINH
):
327 CASE_FLT_FN (BUILT_IN_ATAN
):
328 CASE_FLT_FN (BUILT_IN_ATANH
):
329 CASE_FLT_FN (BUILT_IN_CASIN
):
330 CASE_FLT_FN (BUILT_IN_CASINH
):
331 CASE_FLT_FN (BUILT_IN_CATAN
):
332 CASE_FLT_FN (BUILT_IN_CATANH
):
333 CASE_FLT_FN (BUILT_IN_CBRT
):
334 CASE_FLT_FN (BUILT_IN_CPROJ
):
335 CASE_FLT_FN (BUILT_IN_CSIN
):
336 CASE_FLT_FN (BUILT_IN_CSINH
):
337 CASE_FLT_FN (BUILT_IN_CTAN
):
338 CASE_FLT_FN (BUILT_IN_CTANH
):
339 CASE_FLT_FN (BUILT_IN_ERF
):
340 CASE_FLT_FN (BUILT_IN_LLROUND
):
341 CASE_FLT_FN (BUILT_IN_LROUND
):
342 CASE_FLT_FN (BUILT_IN_ROUND
):
343 CASE_FLT_FN (BUILT_IN_SIN
):
344 CASE_FLT_FN (BUILT_IN_SINH
):
345 CASE_FLT_FN (BUILT_IN_TAN
):
346 CASE_FLT_FN (BUILT_IN_TANH
):
347 CASE_FLT_FN (BUILT_IN_TRUNC
):
350 CASE_FLT_FN (BUILT_IN_LLRINT
):
351 CASE_FLT_FN (BUILT_IN_LRINT
):
352 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
353 CASE_FLT_FN (BUILT_IN_RINT
):
354 return !flag_rounding_math
;
362 /* Check whether we may negate an integer constant T without causing
366 may_negate_without_overflow_p (const_tree t
)
370 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
372 type
= TREE_TYPE (t
);
373 if (TYPE_UNSIGNED (type
))
376 return !wi::only_sign_bit_p (t
);
379 /* Determine whether an expression T can be cheaply negated using
380 the function negate_expr without introducing undefined overflow. */
383 negate_expr_p (tree t
)
390 type
= TREE_TYPE (t
);
393 switch (TREE_CODE (t
))
396 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
399 /* Check that -CST will not overflow type. */
400 return may_negate_without_overflow_p (t
);
402 return (INTEGRAL_TYPE_P (type
)
403 && TYPE_OVERFLOW_WRAPS (type
));
409 return !TYPE_OVERFLOW_SANITIZED (type
);
412 /* We want to canonicalize to positive real constants. Pretend
413 that only negative ones can be easily negated. */
414 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
417 return negate_expr_p (TREE_REALPART (t
))
418 && negate_expr_p (TREE_IMAGPART (t
));
422 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
425 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
427 for (i
= 0; i
< count
; i
++)
428 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
435 return negate_expr_p (TREE_OPERAND (t
, 0))
436 && negate_expr_p (TREE_OPERAND (t
, 1));
439 return negate_expr_p (TREE_OPERAND (t
, 0));
442 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
443 || HONOR_SIGNED_ZEROS (element_mode (type
)))
445 /* -(A + B) -> (-B) - A. */
446 if (negate_expr_p (TREE_OPERAND (t
, 1))
447 && reorder_operands_p (TREE_OPERAND (t
, 0),
448 TREE_OPERAND (t
, 1)))
450 /* -(A + B) -> (-A) - B. */
451 return negate_expr_p (TREE_OPERAND (t
, 0));
454 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
455 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
456 && !HONOR_SIGNED_ZEROS (element_mode (type
))
457 && reorder_operands_p (TREE_OPERAND (t
, 0),
458 TREE_OPERAND (t
, 1));
461 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
467 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
468 return negate_expr_p (TREE_OPERAND (t
, 1))
469 || negate_expr_p (TREE_OPERAND (t
, 0));
475 /* In general we can't negate A / B, because if A is INT_MIN and
476 B is 1, we may turn this into INT_MIN / -1 which is undefined
477 and actually traps on some architectures. But if overflow is
478 undefined, we can negate, because - (INT_MIN / 1) is an
480 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
482 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
484 /* If overflow is undefined then we have to be careful because
485 we ask whether it's ok to associate the negate with the
486 division which is not ok for example for
487 -((a - b) / c) where (-(a - b)) / c may invoke undefined
488 overflow because of negating INT_MIN. So do not use
489 negate_expr_p here but open-code the two important cases. */
490 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
491 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
492 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
495 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
497 return negate_expr_p (TREE_OPERAND (t
, 1));
500 /* Negate -((double)float) as (double)(-float). */
501 if (TREE_CODE (type
) == REAL_TYPE
)
503 tree tem
= strip_float_extensions (t
);
505 return negate_expr_p (tem
);
510 /* Negate -f(x) as f(-x). */
511 if (negate_mathfn_p (builtin_mathfn_code (t
)))
512 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
516 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
517 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
519 tree op1
= TREE_OPERAND (t
, 1);
520 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
531 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
532 simplification is possible.
533 If negate_expr_p would return true for T, NULL_TREE will never be
537 fold_negate_expr (location_t loc
, tree t
)
539 tree type
= TREE_TYPE (t
);
542 switch (TREE_CODE (t
))
544 /* Convert - (~A) to A + 1. */
546 if (INTEGRAL_TYPE_P (type
))
547 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
548 build_one_cst (type
));
552 tem
= fold_negate_const (t
, type
);
553 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
554 || (ANY_INTEGRAL_TYPE_P (type
)
555 && !TYPE_OVERFLOW_TRAPS (type
)
556 && TYPE_OVERFLOW_WRAPS (type
))
557 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
562 tem
= fold_negate_const (t
, type
);
566 tem
= fold_negate_const (t
, type
);
571 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
572 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
574 return build_complex (type
, rpart
, ipart
);
580 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
581 tree
*elts
= XALLOCAVEC (tree
, count
);
583 for (i
= 0; i
< count
; i
++)
585 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
586 if (elts
[i
] == NULL_TREE
)
590 return build_vector (type
, elts
);
594 if (negate_expr_p (t
))
595 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
596 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
597 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
601 if (negate_expr_p (t
))
602 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
603 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
607 if (!TYPE_OVERFLOW_SANITIZED (type
))
608 return TREE_OPERAND (t
, 0);
612 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
613 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
615 /* -(A + B) -> (-B) - A. */
616 if (negate_expr_p (TREE_OPERAND (t
, 1))
617 && reorder_operands_p (TREE_OPERAND (t
, 0),
618 TREE_OPERAND (t
, 1)))
620 tem
= negate_expr (TREE_OPERAND (t
, 1));
621 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
622 tem
, TREE_OPERAND (t
, 0));
625 /* -(A + B) -> (-A) - B. */
626 if (negate_expr_p (TREE_OPERAND (t
, 0)))
628 tem
= negate_expr (TREE_OPERAND (t
, 0));
629 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
630 tem
, TREE_OPERAND (t
, 1));
636 /* - (A - B) -> B - A */
637 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
638 && !HONOR_SIGNED_ZEROS (element_mode (type
))
639 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
640 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
641 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
645 if (TYPE_UNSIGNED (type
))
651 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
653 tem
= TREE_OPERAND (t
, 1);
654 if (negate_expr_p (tem
))
655 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
656 TREE_OPERAND (t
, 0), negate_expr (tem
));
657 tem
= TREE_OPERAND (t
, 0);
658 if (negate_expr_p (tem
))
659 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
660 negate_expr (tem
), TREE_OPERAND (t
, 1));
667 /* In general we can't negate A / B, because if A is INT_MIN and
668 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 and actually traps on some architectures. But if overflow is
670 undefined, we can negate, because - (INT_MIN / 1) is an
672 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
674 const char * const warnmsg
= G_("assuming signed overflow does not "
675 "occur when negating a division");
676 tem
= TREE_OPERAND (t
, 1);
677 if (negate_expr_p (tem
))
679 if (INTEGRAL_TYPE_P (type
)
680 && (TREE_CODE (tem
) != INTEGER_CST
681 || integer_onep (tem
)))
682 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
683 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
684 TREE_OPERAND (t
, 0), negate_expr (tem
));
686 /* If overflow is undefined then we have to be careful because
687 we ask whether it's ok to associate the negate with the
688 division which is not ok for example for
689 -((a - b) / c) where (-(a - b)) / c may invoke undefined
690 overflow because of negating INT_MIN. So do not use
691 negate_expr_p here but open-code the two important cases. */
692 tem
= TREE_OPERAND (t
, 0);
693 if ((INTEGRAL_TYPE_P (type
)
694 && (TREE_CODE (tem
) == NEGATE_EXPR
695 || (TREE_CODE (tem
) == INTEGER_CST
696 && may_negate_without_overflow_p (tem
))))
697 || !INTEGRAL_TYPE_P (type
))
698 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
699 negate_expr (tem
), TREE_OPERAND (t
, 1));
704 /* Convert -((double)float) into (double)(-float). */
705 if (TREE_CODE (type
) == REAL_TYPE
)
707 tem
= strip_float_extensions (t
);
708 if (tem
!= t
&& negate_expr_p (tem
))
709 return fold_convert_loc (loc
, type
, negate_expr (tem
));
714 /* Negate -f(x) as f(-x). */
715 if (negate_mathfn_p (builtin_mathfn_code (t
))
716 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
720 fndecl
= get_callee_fndecl (t
);
721 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
722 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
727 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
728 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
730 tree op1
= TREE_OPERAND (t
, 1);
731 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
733 tree ntype
= TYPE_UNSIGNED (type
)
734 ? signed_type_for (type
)
735 : unsigned_type_for (type
);
736 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
737 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
738 return fold_convert_loc (loc
, type
, temp
);
750 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
751 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
763 loc
= EXPR_LOCATION (t
);
764 type
= TREE_TYPE (t
);
767 tem
= fold_negate_expr (loc
, t
);
769 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
770 return fold_convert_loc (loc
, type
, tem
);
773 /* Split a tree IN into a constant, literal and variable parts that could be
774 combined with CODE to make IN. "constant" means an expression with
775 TREE_CONSTANT but that isn't an actual constant. CODE must be a
776 commutative arithmetic operation. Store the constant part into *CONP,
777 the literal in *LITP and return the variable part. If a part isn't
778 present, set it to null. If the tree does not decompose in this way,
779 return the entire tree as the variable part and the other parts as null.
781 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
782 case, we negate an operand that was subtracted. Except if it is a
783 literal for which we use *MINUS_LITP instead.
785 If NEGATE_P is true, we are negating all of IN, again except a literal
786 for which we use *MINUS_LITP instead.
788 If IN is itself a literal or constant, return it as appropriate.
790 Note that we do not guarantee that any of the three values will be the
791 same type as IN, but they will have the same signedness and mode. */
794 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
795 tree
*minus_litp
, int negate_p
)
803 /* Strip any conversions that don't change the machine mode or signedness. */
804 STRIP_SIGN_NOPS (in
);
806 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
807 || TREE_CODE (in
) == FIXED_CST
)
809 else if (TREE_CODE (in
) == code
810 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
811 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
812 /* We can associate addition and subtraction together (even
813 though the C standard doesn't say so) for integers because
814 the value is not affected. For reals, the value might be
815 affected, so we can't. */
816 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
817 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
819 tree op0
= TREE_OPERAND (in
, 0);
820 tree op1
= TREE_OPERAND (in
, 1);
821 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
822 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
824 /* First see if either of the operands is a literal, then a constant. */
825 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
826 || TREE_CODE (op0
) == FIXED_CST
)
827 *litp
= op0
, op0
= 0;
828 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
829 || TREE_CODE (op1
) == FIXED_CST
)
830 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
832 if (op0
!= 0 && TREE_CONSTANT (op0
))
833 *conp
= op0
, op0
= 0;
834 else if (op1
!= 0 && TREE_CONSTANT (op1
))
835 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
837 /* If we haven't dealt with either operand, this is not a case we can
838 decompose. Otherwise, VAR is either of the ones remaining, if any. */
839 if (op0
!= 0 && op1
!= 0)
844 var
= op1
, neg_var_p
= neg1_p
;
846 /* Now do any needed negations. */
848 *minus_litp
= *litp
, *litp
= 0;
850 *conp
= negate_expr (*conp
);
852 var
= negate_expr (var
);
854 else if (TREE_CODE (in
) == BIT_NOT_EXPR
855 && code
== PLUS_EXPR
)
857 /* -X - 1 is folded to ~X, undo that here. */
858 *minus_litp
= build_one_cst (TREE_TYPE (in
));
859 var
= negate_expr (TREE_OPERAND (in
, 0));
861 else if (TREE_CONSTANT (in
))
869 *minus_litp
= *litp
, *litp
= 0;
870 else if (*minus_litp
)
871 *litp
= *minus_litp
, *minus_litp
= 0;
872 *conp
= negate_expr (*conp
);
873 var
= negate_expr (var
);
879 /* Re-associate trees split by the above function. T1 and T2 are
880 either expressions to associate or null. Return the new
881 expression, if any. LOC is the location of the new expression. If
882 we build an operation, do it in TYPE and with CODE. */
885 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
892 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
893 try to fold this since we will have infinite recursion. But do
894 deal with any NEGATE_EXPRs. */
895 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
896 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
898 if (code
== PLUS_EXPR
)
900 if (TREE_CODE (t1
) == NEGATE_EXPR
)
901 return build2_loc (loc
, MINUS_EXPR
, type
,
902 fold_convert_loc (loc
, type
, t2
),
903 fold_convert_loc (loc
, type
,
904 TREE_OPERAND (t1
, 0)));
905 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
906 return build2_loc (loc
, MINUS_EXPR
, type
,
907 fold_convert_loc (loc
, type
, t1
),
908 fold_convert_loc (loc
, type
,
909 TREE_OPERAND (t2
, 0)));
910 else if (integer_zerop (t2
))
911 return fold_convert_loc (loc
, type
, t1
);
913 else if (code
== MINUS_EXPR
)
915 if (integer_zerop (t2
))
916 return fold_convert_loc (loc
, type
, t1
);
919 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
920 fold_convert_loc (loc
, type
, t2
));
923 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
924 fold_convert_loc (loc
, type
, t2
));
927 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
928 for use in int_const_binop, size_binop and size_diffop. */
931 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
933 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
935 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
950 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
951 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
952 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
956 /* Combine two integer constants ARG1 and ARG2 under operation CODE
957 to produce a new constant. Return NULL_TREE if we don't know how
958 to evaluate CODE at compile-time. */
961 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
966 tree type
= TREE_TYPE (arg1
);
967 signop sign
= TYPE_SIGN (type
);
968 bool overflow
= false;
970 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
971 TYPE_SIGN (TREE_TYPE (parg2
)));
976 res
= wi::bit_or (arg1
, arg2
);
980 res
= wi::bit_xor (arg1
, arg2
);
984 res
= wi::bit_and (arg1
, arg2
);
989 if (wi::neg_p (arg2
))
992 if (code
== RSHIFT_EXPR
)
998 if (code
== RSHIFT_EXPR
)
999 /* It's unclear from the C standard whether shifts can overflow.
1000 The following code ignores overflow; perhaps a C standard
1001 interpretation ruling is needed. */
1002 res
= wi::rshift (arg1
, arg2
, sign
);
1004 res
= wi::lshift (arg1
, arg2
);
1009 if (wi::neg_p (arg2
))
1012 if (code
== RROTATE_EXPR
)
1013 code
= LROTATE_EXPR
;
1015 code
= RROTATE_EXPR
;
1018 if (code
== RROTATE_EXPR
)
1019 res
= wi::rrotate (arg1
, arg2
);
1021 res
= wi::lrotate (arg1
, arg2
);
1025 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1029 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1033 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1036 case MULT_HIGHPART_EXPR
:
1037 res
= wi::mul_high (arg1
, arg2
, sign
);
1040 case TRUNC_DIV_EXPR
:
1041 case EXACT_DIV_EXPR
:
1044 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1047 case FLOOR_DIV_EXPR
:
1050 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1056 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1059 case ROUND_DIV_EXPR
:
1062 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1065 case TRUNC_MOD_EXPR
:
1068 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1071 case FLOOR_MOD_EXPR
:
1074 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1080 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1083 case ROUND_MOD_EXPR
:
1086 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1090 res
= wi::min (arg1
, arg2
, sign
);
1094 res
= wi::max (arg1
, arg2
, sign
);
1101 t
= force_fit_type (type
, res
, overflowable
,
1102 (((sign
== SIGNED
|| overflowable
== -1)
1104 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1110 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1112 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1115 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1116 constant. We assume ARG1 and ARG2 have the same data type, or at least
1117 are the same kind of constant and the same machine mode. Return zero if
1118 combining the constants is not allowed in the current operating mode. */
1121 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1123 /* Sanity check for the recursive cases. */
1130 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1132 if (code
== POINTER_PLUS_EXPR
)
1133 return int_const_binop (PLUS_EXPR
,
1134 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1136 return int_const_binop (code
, arg1
, arg2
);
1139 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1144 REAL_VALUE_TYPE value
;
1145 REAL_VALUE_TYPE result
;
1149 /* The following codes are handled by real_arithmetic. */
1164 d1
= TREE_REAL_CST (arg1
);
1165 d2
= TREE_REAL_CST (arg2
);
1167 type
= TREE_TYPE (arg1
);
1168 mode
= TYPE_MODE (type
);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode
)
1173 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code
== RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2
, dconst0
)
1180 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1
))
1187 else if (REAL_VALUE_ISNAN (d2
))
1190 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1191 real_convert (&result
, mode
, &value
);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode
)
1197 && REAL_VALUE_ISINF (result
)
1198 && !REAL_VALUE_ISINF (d1
)
1199 && !REAL_VALUE_ISINF (d2
))
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1208 && (inexact
|| !real_identical (&result
, &value
)))
1211 t
= build_real (type
, result
);
1213 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1217 if (TREE_CODE (arg1
) == FIXED_CST
)
1219 FIXED_VALUE_TYPE f1
;
1220 FIXED_VALUE_TYPE f2
;
1221 FIXED_VALUE_TYPE result
;
1226 /* The following codes are handled by fixed_arithmetic. */
1232 case TRUNC_DIV_EXPR
:
1233 if (TREE_CODE (arg2
) != FIXED_CST
)
1235 f2
= TREE_FIXED_CST (arg2
);
1241 if (TREE_CODE (arg2
) != INTEGER_CST
)
1244 f2
.data
.high
= w2
.elt (1);
1245 f2
.data
.low
= w2
.elt (0);
1254 f1
= TREE_FIXED_CST (arg1
);
1255 type
= TREE_TYPE (arg1
);
1256 sat_p
= TYPE_SATURATING (type
);
1257 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1258 t
= build_fixed (type
, result
);
1259 /* Propagate overflow flags. */
1260 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1261 TREE_OVERFLOW (t
) = 1;
1265 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1267 tree type
= TREE_TYPE (arg1
);
1268 tree r1
= TREE_REALPART (arg1
);
1269 tree i1
= TREE_IMAGPART (arg1
);
1270 tree r2
= TREE_REALPART (arg2
);
1271 tree i2
= TREE_IMAGPART (arg2
);
1278 real
= const_binop (code
, r1
, r2
);
1279 imag
= const_binop (code
, i1
, i2
);
1283 if (COMPLEX_FLOAT_TYPE_P (type
))
1284 return do_mpc_arg2 (arg1
, arg2
, type
,
1285 /* do_nonfinite= */ folding_initializer
,
1288 real
= const_binop (MINUS_EXPR
,
1289 const_binop (MULT_EXPR
, r1
, r2
),
1290 const_binop (MULT_EXPR
, i1
, i2
));
1291 imag
= const_binop (PLUS_EXPR
,
1292 const_binop (MULT_EXPR
, r1
, i2
),
1293 const_binop (MULT_EXPR
, i1
, r2
));
1297 if (COMPLEX_FLOAT_TYPE_P (type
))
1298 return do_mpc_arg2 (arg1
, arg2
, type
,
1299 /* do_nonfinite= */ folding_initializer
,
1302 case TRUNC_DIV_EXPR
:
1304 case FLOOR_DIV_EXPR
:
1305 case ROUND_DIV_EXPR
:
1306 if (flag_complex_method
== 0)
1308 /* Keep this algorithm in sync with
1309 tree-complex.c:expand_complex_div_straight().
1311 Expand complex division to scalars, straightforward algorithm.
1312 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1316 = const_binop (PLUS_EXPR
,
1317 const_binop (MULT_EXPR
, r2
, r2
),
1318 const_binop (MULT_EXPR
, i2
, i2
));
1320 = const_binop (PLUS_EXPR
,
1321 const_binop (MULT_EXPR
, r1
, r2
),
1322 const_binop (MULT_EXPR
, i1
, i2
));
1324 = const_binop (MINUS_EXPR
,
1325 const_binop (MULT_EXPR
, i1
, r2
),
1326 const_binop (MULT_EXPR
, r1
, i2
));
1328 real
= const_binop (code
, t1
, magsquared
);
1329 imag
= const_binop (code
, t2
, magsquared
);
1333 /* Keep this algorithm in sync with
1334 tree-complex.c:expand_complex_div_wide().
1336 Expand complex division to scalars, modified algorithm to minimize
1337 overflow with wide input ranges. */
1338 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1339 fold_abs_const (r2
, TREE_TYPE (type
)),
1340 fold_abs_const (i2
, TREE_TYPE (type
)));
1342 if (integer_nonzerop (compare
))
1344 /* In the TRUE branch, we compute
1346 div = (br * ratio) + bi;
1347 tr = (ar * ratio) + ai;
1348 ti = (ai * ratio) - ar;
1351 tree ratio
= const_binop (code
, r2
, i2
);
1352 tree div
= const_binop (PLUS_EXPR
, i2
,
1353 const_binop (MULT_EXPR
, r2
, ratio
));
1354 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1355 real
= const_binop (PLUS_EXPR
, real
, i1
);
1356 real
= const_binop (code
, real
, div
);
1358 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1359 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1360 imag
= const_binop (code
, imag
, div
);
1364 /* In the FALSE branch, we compute
1366 divisor = (d * ratio) + c;
1367 tr = (b * ratio) + a;
1368 ti = b - (a * ratio);
1371 tree ratio
= const_binop (code
, i2
, r2
);
1372 tree div
= const_binop (PLUS_EXPR
, r2
,
1373 const_binop (MULT_EXPR
, i2
, ratio
));
1375 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1376 real
= const_binop (PLUS_EXPR
, real
, r1
);
1377 real
= const_binop (code
, real
, div
);
1379 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1380 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1381 imag
= const_binop (code
, imag
, div
);
1391 return build_complex (type
, real
, imag
);
1394 if (TREE_CODE (arg1
) == VECTOR_CST
1395 && TREE_CODE (arg2
) == VECTOR_CST
)
1397 tree type
= TREE_TYPE (arg1
);
1398 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1399 tree
*elts
= XALLOCAVEC (tree
, count
);
1401 for (i
= 0; i
< count
; i
++)
1403 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1404 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1406 elts
[i
] = const_binop (code
, elem1
, elem2
);
1408 /* It is possible that const_binop cannot handle the given
1409 code and return NULL_TREE */
1410 if (elts
[i
] == NULL_TREE
)
1414 return build_vector (type
, elts
);
1417 /* Shifts allow a scalar offset for a vector. */
1418 if (TREE_CODE (arg1
) == VECTOR_CST
1419 && TREE_CODE (arg2
) == INTEGER_CST
)
1421 tree type
= TREE_TYPE (arg1
);
1422 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1423 tree
*elts
= XALLOCAVEC (tree
, count
);
1425 for (i
= 0; i
< count
; i
++)
1427 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1429 elts
[i
] = const_binop (code
, elem1
, arg2
);
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE. */
1433 if (elts
[i
] == NULL_TREE
)
1437 return build_vector (type
, elts
);
1442 /* Overload that adds a TYPE parameter to be able to dispatch
1443 to fold_relational_const. */
1446 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1448 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1449 return fold_relational_const (code
, type
, arg1
, arg2
);
1451 /* ??? Until we make the const_binop worker take the type of the
1452 result as argument put those cases that need it here. */
1456 if ((TREE_CODE (arg1
) == REAL_CST
1457 && TREE_CODE (arg2
) == REAL_CST
)
1458 || (TREE_CODE (arg1
) == INTEGER_CST
1459 && TREE_CODE (arg2
) == INTEGER_CST
))
1460 return build_complex (type
, arg1
, arg2
);
1463 case VEC_PACK_TRUNC_EXPR
:
1464 case VEC_PACK_FIX_TRUNC_EXPR
:
1466 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1469 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1470 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1471 if (TREE_CODE (arg1
) != VECTOR_CST
1472 || TREE_CODE (arg2
) != VECTOR_CST
)
1475 elts
= XALLOCAVEC (tree
, nelts
);
1476 if (!vec_cst_ctor_to_array (arg1
, elts
)
1477 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1480 for (i
= 0; i
< nelts
; i
++)
1482 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1483 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1484 TREE_TYPE (type
), elts
[i
]);
1485 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1489 return build_vector (type
, elts
);
1492 case VEC_WIDEN_MULT_LO_EXPR
:
1493 case VEC_WIDEN_MULT_HI_EXPR
:
1494 case VEC_WIDEN_MULT_EVEN_EXPR
:
1495 case VEC_WIDEN_MULT_ODD_EXPR
:
1497 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1498 unsigned int out
, ofs
, scale
;
1501 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1502 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1503 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1506 elts
= XALLOCAVEC (tree
, nelts
* 4);
1507 if (!vec_cst_ctor_to_array (arg1
, elts
)
1508 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1511 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1512 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1513 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1514 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1515 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1517 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1520 for (out
= 0; out
< nelts
; out
++)
1522 unsigned int in1
= (out
<< scale
) + ofs
;
1523 unsigned int in2
= in1
+ nelts
* 2;
1526 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1527 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1529 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1531 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1532 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1536 return build_vector (type
, elts
);
1542 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1545 /* Make sure type and arg0 have the same saturating flag. */
1546 gcc_checking_assert (TYPE_SATURATING (type
)
1547 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1549 return const_binop (code
, arg1
, arg2
);
1552 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1553 Return zero if computing the constants is not possible. */
1556 const_unop (enum tree_code code
, tree type
, tree arg0
)
1562 case FIX_TRUNC_EXPR
:
1563 case FIXED_CONVERT_EXPR
:
1564 return fold_convert_const (code
, type
, arg0
);
1566 case ADDR_SPACE_CONVERT_EXPR
:
1567 if (integer_zerop (arg0
))
1568 return fold_convert_const (code
, type
, arg0
);
1571 case VIEW_CONVERT_EXPR
:
1572 return fold_view_convert_expr (type
, arg0
);
1576 /* Can't call fold_negate_const directly here as that doesn't
1577 handle all cases and we might not be able to negate some
1579 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1580 if (tem
&& CONSTANT_CLASS_P (tem
))
1586 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1587 return fold_abs_const (arg0
, type
);
1591 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1593 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1595 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1600 if (TREE_CODE (arg0
) == INTEGER_CST
)
1601 return fold_not_const (arg0
, type
);
1602 /* Perform BIT_NOT_EXPR on each element individually. */
1603 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1607 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1609 elements
= XALLOCAVEC (tree
, count
);
1610 for (i
= 0; i
< count
; i
++)
1612 elem
= VECTOR_CST_ELT (arg0
, i
);
1613 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1614 if (elem
== NULL_TREE
)
1619 return build_vector (type
, elements
);
1623 case TRUTH_NOT_EXPR
:
1624 if (TREE_CODE (arg0
) == INTEGER_CST
)
1625 return constant_boolean_node (integer_zerop (arg0
), type
);
1629 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1630 return fold_convert (type
, TREE_REALPART (arg0
));
1634 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1635 return fold_convert (type
, TREE_IMAGPART (arg0
));
1638 case VEC_UNPACK_LO_EXPR
:
1639 case VEC_UNPACK_HI_EXPR
:
1640 case VEC_UNPACK_FLOAT_LO_EXPR
:
1641 case VEC_UNPACK_FLOAT_HI_EXPR
:
1643 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1645 enum tree_code subcode
;
1647 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1648 if (TREE_CODE (arg0
) != VECTOR_CST
)
1651 elts
= XALLOCAVEC (tree
, nelts
* 2);
1652 if (!vec_cst_ctor_to_array (arg0
, elts
))
1655 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1656 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1659 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1662 subcode
= FLOAT_EXPR
;
1664 for (i
= 0; i
< nelts
; i
++)
1666 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1667 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1671 return build_vector (type
, elts
);
1674 case REDUC_MIN_EXPR
:
1675 case REDUC_MAX_EXPR
:
1676 case REDUC_PLUS_EXPR
:
1678 unsigned int nelts
, i
;
1680 enum tree_code subcode
;
1682 if (TREE_CODE (arg0
) != VECTOR_CST
)
1684 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1686 elts
= XALLOCAVEC (tree
, nelts
);
1687 if (!vec_cst_ctor_to_array (arg0
, elts
))
1692 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1693 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1694 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1695 default: gcc_unreachable ();
1698 for (i
= 1; i
< nelts
; i
++)
1700 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1701 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1715 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1716 indicates which particular sizetype to create. */
1719 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1721 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1724 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1725 is a tree code. The type of the result is taken from the operands.
1726 Both must be equivalent integer types, ala int_binop_types_match_p.
1727 If the operands are constant, so is the result. */
1730 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1732 tree type
= TREE_TYPE (arg0
);
1734 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1735 return error_mark_node
;
1737 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1740 /* Handle the special case of two integer constants faster. */
1741 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1743 /* And some specific cases even faster than that. */
1744 if (code
== PLUS_EXPR
)
1746 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1748 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1751 else if (code
== MINUS_EXPR
)
1753 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1756 else if (code
== MULT_EXPR
)
1758 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1762 /* Handle general case of two integer constants. For sizetype
1763 constant calculations we always want to know about overflow,
1764 even in the unsigned case. */
1765 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1768 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1771 /* Given two values, either both of sizetype or both of bitsizetype,
1772 compute the difference between the two values. Return the value
1773 in signed type corresponding to the type of the operands. */
1776 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1778 tree type
= TREE_TYPE (arg0
);
1781 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1784 /* If the type is already signed, just do the simple thing. */
1785 if (!TYPE_UNSIGNED (type
))
1786 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1788 if (type
== sizetype
)
1790 else if (type
== bitsizetype
)
1791 ctype
= sbitsizetype
;
1793 ctype
= signed_type_for (type
);
1795 /* If either operand is not a constant, do the conversions to the signed
1796 type and subtract. The hardware will do the right thing with any
1797 overflow in the subtraction. */
1798 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1799 return size_binop_loc (loc
, MINUS_EXPR
,
1800 fold_convert_loc (loc
, ctype
, arg0
),
1801 fold_convert_loc (loc
, ctype
, arg1
));
1803 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1804 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1805 overflow) and negate (which can't either). Special-case a result
1806 of zero while we're here. */
1807 if (tree_int_cst_equal (arg0
, arg1
))
1808 return build_int_cst (ctype
, 0);
1809 else if (tree_int_cst_lt (arg1
, arg0
))
1810 return fold_convert_loc (loc
, ctype
,
1811 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1813 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1814 fold_convert_loc (loc
, ctype
,
1815 size_binop_loc (loc
,
1820 /* A subroutine of fold_convert_const handling conversions of an
1821 INTEGER_CST to another integer type. */
1824 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1826 /* Given an integer constant, make new constant with new type,
1827 appropriately sign-extended or truncated. Use widest_int
1828 so that any extension is done according ARG1's type. */
1829 return force_fit_type (type
, wi::to_widest (arg1
),
1830 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1831 TREE_OVERFLOW (arg1
));
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1838 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1840 bool overflow
= false;
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1854 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1858 case FIX_TRUNC_EXPR
:
1859 real_trunc (&r
, VOIDmode
, &x
);
1866 /* If R is NaN, return zero and show we have an overflow. */
1867 if (REAL_VALUE_ISNAN (r
))
1870 val
= wi::zero (TYPE_PRECISION (type
));
1873 /* See if R is less than the lower bound or greater than the
1878 tree lt
= TYPE_MIN_VALUE (type
);
1879 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1880 if (REAL_VALUES_LESS (r
, l
))
1889 tree ut
= TYPE_MAX_VALUE (type
);
1892 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1893 if (REAL_VALUES_LESS (u
, r
))
1902 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1904 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1908 /* A subroutine of fold_convert_const handling conversions of a
1909 FIXED_CST to an integer type. */
1912 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1915 double_int temp
, temp_trunc
;
1918 /* Right shift FIXED_CST to temp by fbit. */
1919 temp
= TREE_FIXED_CST (arg1
).data
;
1920 mode
= TREE_FIXED_CST (arg1
).mode
;
1921 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1923 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1924 HOST_BITS_PER_DOUBLE_INT
,
1925 SIGNED_FIXED_POINT_MODE_P (mode
));
1927 /* Left shift temp to temp_trunc by fbit. */
1928 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1929 HOST_BITS_PER_DOUBLE_INT
,
1930 SIGNED_FIXED_POINT_MODE_P (mode
));
1934 temp
= double_int_zero
;
1935 temp_trunc
= double_int_zero
;
1938 /* If FIXED_CST is negative, we need to round the value toward 0.
1939 By checking if the fractional bits are not zero to add 1 to temp. */
1940 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1941 && temp_trunc
.is_negative ()
1942 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1943 temp
+= double_int_one
;
1945 /* Given a fixed-point constant, make new constant with new type,
1946 appropriately sign-extended or truncated. */
1947 t
= force_fit_type (type
, temp
, -1,
1948 (temp
.is_negative ()
1949 && (TYPE_UNSIGNED (type
)
1950 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1951 | TREE_OVERFLOW (arg1
));
1956 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1957 to another floating point type. */
1960 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1962 REAL_VALUE_TYPE value
;
1965 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1966 t
= build_real (type
, value
);
1968 /* If converting an infinity or NAN to a representation that doesn't
1969 have one, set the overflow bit so that we can produce some kind of
1970 error message at the appropriate point if necessary. It's not the
1971 most user-friendly message, but it's better than nothing. */
1972 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1973 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1974 TREE_OVERFLOW (t
) = 1;
1975 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1976 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1977 TREE_OVERFLOW (t
) = 1;
1978 /* Regular overflow, conversion produced an infinity in a mode that
1979 can't represent them. */
1980 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1981 && REAL_VALUE_ISINF (value
)
1982 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1983 TREE_OVERFLOW (t
) = 1;
1985 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1989 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1990 to a floating point type. */
1993 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1995 REAL_VALUE_TYPE value
;
1998 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1999 t
= build_real (type
, value
);
2001 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2005 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2006 to another fixed-point type. */
2009 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2011 FIXED_VALUE_TYPE value
;
2015 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2016 TYPE_SATURATING (type
));
2017 t
= build_fixed (type
, value
);
2019 /* Propagate overflow flags. */
2020 if (overflow_p
| TREE_OVERFLOW (arg1
))
2021 TREE_OVERFLOW (t
) = 1;
2025 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2026 to a fixed-point type. */
2029 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2031 FIXED_VALUE_TYPE value
;
2036 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2038 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2039 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2040 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2042 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2044 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2045 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2046 TYPE_SATURATING (type
));
2047 t
= build_fixed (type
, value
);
2049 /* Propagate overflow flags. */
2050 if (overflow_p
| TREE_OVERFLOW (arg1
))
2051 TREE_OVERFLOW (t
) = 1;
2055 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2056 to a fixed-point type. */
2059 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2061 FIXED_VALUE_TYPE value
;
2065 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2066 &TREE_REAL_CST (arg1
),
2067 TYPE_SATURATING (type
));
2068 t
= build_fixed (type
, value
);
2070 /* Propagate overflow flags. */
2071 if (overflow_p
| TREE_OVERFLOW (arg1
))
2072 TREE_OVERFLOW (t
) = 1;
2076 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2077 type TYPE. If no simplification can be done return NULL_TREE. */
2080 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2082 if (TREE_TYPE (arg1
) == type
)
2085 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2086 || TREE_CODE (type
) == OFFSET_TYPE
)
2088 if (TREE_CODE (arg1
) == INTEGER_CST
)
2089 return fold_convert_const_int_from_int (type
, arg1
);
2090 else if (TREE_CODE (arg1
) == REAL_CST
)
2091 return fold_convert_const_int_from_real (code
, type
, arg1
);
2092 else if (TREE_CODE (arg1
) == FIXED_CST
)
2093 return fold_convert_const_int_from_fixed (type
, arg1
);
2095 else if (TREE_CODE (type
) == REAL_TYPE
)
2097 if (TREE_CODE (arg1
) == INTEGER_CST
)
2098 return build_real_from_int_cst (type
, arg1
);
2099 else if (TREE_CODE (arg1
) == REAL_CST
)
2100 return fold_convert_const_real_from_real (type
, arg1
);
2101 else if (TREE_CODE (arg1
) == FIXED_CST
)
2102 return fold_convert_const_real_from_fixed (type
, arg1
);
2104 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2106 if (TREE_CODE (arg1
) == FIXED_CST
)
2107 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2108 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2109 return fold_convert_const_fixed_from_int (type
, arg1
);
2110 else if (TREE_CODE (arg1
) == REAL_CST
)
2111 return fold_convert_const_fixed_from_real (type
, arg1
);
2116 /* Construct a vector of zero elements of vector type TYPE. */
2119 build_zero_vector (tree type
)
2123 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2124 return build_vector_from_val (type
, t
);
2127 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2130 fold_convertible_p (const_tree type
, const_tree arg
)
2132 tree orig
= TREE_TYPE (arg
);
2137 if (TREE_CODE (arg
) == ERROR_MARK
2138 || TREE_CODE (type
) == ERROR_MARK
2139 || TREE_CODE (orig
) == ERROR_MARK
)
2142 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2145 switch (TREE_CODE (type
))
2147 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2148 case POINTER_TYPE
: case REFERENCE_TYPE
:
2150 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2151 || TREE_CODE (orig
) == OFFSET_TYPE
)
2153 return (TREE_CODE (orig
) == VECTOR_TYPE
2154 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2157 case FIXED_POINT_TYPE
:
2161 return TREE_CODE (type
) == TREE_CODE (orig
);
2168 /* Convert expression ARG to type TYPE. Used by the middle-end for
2169 simple conversions in preference to calling the front-end's convert. */
2172 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2174 tree orig
= TREE_TYPE (arg
);
2180 if (TREE_CODE (arg
) == ERROR_MARK
2181 || TREE_CODE (type
) == ERROR_MARK
2182 || TREE_CODE (orig
) == ERROR_MARK
)
2183 return error_mark_node
;
2185 switch (TREE_CODE (type
))
2188 case REFERENCE_TYPE
:
2189 /* Handle conversions between pointers to different address spaces. */
2190 if (POINTER_TYPE_P (orig
)
2191 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2192 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2193 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2196 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2198 if (TREE_CODE (arg
) == INTEGER_CST
)
2200 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2201 if (tem
!= NULL_TREE
)
2204 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2205 || TREE_CODE (orig
) == OFFSET_TYPE
)
2206 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2207 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2208 return fold_convert_loc (loc
, type
,
2209 fold_build1_loc (loc
, REALPART_EXPR
,
2210 TREE_TYPE (orig
), arg
));
2211 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2212 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2213 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2216 if (TREE_CODE (arg
) == INTEGER_CST
)
2218 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2219 if (tem
!= NULL_TREE
)
2222 else if (TREE_CODE (arg
) == REAL_CST
)
2224 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2225 if (tem
!= NULL_TREE
)
2228 else if (TREE_CODE (arg
) == FIXED_CST
)
2230 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2231 if (tem
!= NULL_TREE
)
2235 switch (TREE_CODE (orig
))
2238 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2239 case POINTER_TYPE
: case REFERENCE_TYPE
:
2240 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2243 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2245 case FIXED_POINT_TYPE
:
2246 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2249 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2250 return fold_convert_loc (loc
, type
, tem
);
2256 case FIXED_POINT_TYPE
:
2257 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2258 || TREE_CODE (arg
) == REAL_CST
)
2260 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2261 if (tem
!= NULL_TREE
)
2262 goto fold_convert_exit
;
2265 switch (TREE_CODE (orig
))
2267 case FIXED_POINT_TYPE
:
2272 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2275 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2276 return fold_convert_loc (loc
, type
, tem
);
2283 switch (TREE_CODE (orig
))
2286 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2287 case POINTER_TYPE
: case REFERENCE_TYPE
:
2289 case FIXED_POINT_TYPE
:
2290 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2291 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2292 fold_convert_loc (loc
, TREE_TYPE (type
),
2293 integer_zero_node
));
2298 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2300 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2301 TREE_OPERAND (arg
, 0));
2302 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2303 TREE_OPERAND (arg
, 1));
2304 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2307 arg
= save_expr (arg
);
2308 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2309 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2310 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2311 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2312 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2320 if (integer_zerop (arg
))
2321 return build_zero_vector (type
);
2322 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2323 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2324 || TREE_CODE (orig
) == VECTOR_TYPE
);
2325 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2328 tem
= fold_ignored_result (arg
);
2329 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2332 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2333 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2337 protected_set_expr_location_unshare (tem
, loc
);
2341 /* Return false if expr can be assumed not to be an lvalue, true
2345 maybe_lvalue_p (const_tree x
)
2347 /* We only need to wrap lvalue tree codes. */
2348 switch (TREE_CODE (x
))
2361 case ARRAY_RANGE_REF
:
2367 case PREINCREMENT_EXPR
:
2368 case PREDECREMENT_EXPR
:
2370 case TRY_CATCH_EXPR
:
2371 case WITH_CLEANUP_EXPR
:
2380 /* Assume the worst for front-end tree codes. */
2381 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2389 /* Return an expr equal to X but certainly not valid as an lvalue. */
2392 non_lvalue_loc (location_t loc
, tree x
)
2394 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2399 if (! maybe_lvalue_p (x
))
2401 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2404 /* When pedantic, return an expr equal to X but certainly not valid as a
2405 pedantic lvalue. Otherwise, return X. */
2408 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2410 return protected_set_expr_location_unshare (x
, loc
);
2413 /* Given a tree comparison code, return the code that is the logical inverse.
2414 It is generally not safe to do this for floating-point comparisons, except
2415 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2416 ERROR_MARK in this case. */
2419 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2421 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2422 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2432 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2434 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2436 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2438 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2452 return UNORDERED_EXPR
;
2453 case UNORDERED_EXPR
:
2454 return ORDERED_EXPR
;
2460 /* Similar, but return the comparison that results if the operands are
2461 swapped. This is safe for floating-point. */
2464 swap_tree_comparison (enum tree_code code
)
2471 case UNORDERED_EXPR
:
2497 /* Convert a comparison tree code from an enum tree_code representation
2498 into a compcode bit-based encoding. This function is the inverse of
2499 compcode_to_comparison. */
2501 static enum comparison_code
2502 comparison_to_compcode (enum tree_code code
)
2519 return COMPCODE_ORD
;
2520 case UNORDERED_EXPR
:
2521 return COMPCODE_UNORD
;
2523 return COMPCODE_UNLT
;
2525 return COMPCODE_UNEQ
;
2527 return COMPCODE_UNLE
;
2529 return COMPCODE_UNGT
;
2531 return COMPCODE_LTGT
;
2533 return COMPCODE_UNGE
;
2539 /* Convert a compcode bit-based encoding of a comparison operator back
2540 to GCC's enum tree_code representation. This function is the
2541 inverse of comparison_to_compcode. */
2543 static enum tree_code
2544 compcode_to_comparison (enum comparison_code code
)
2561 return ORDERED_EXPR
;
2562 case COMPCODE_UNORD
:
2563 return UNORDERED_EXPR
;
2581 /* Return a tree for the comparison which is the combination of
2582 doing the AND or OR (depending on CODE) of the two operations LCODE
2583 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2584 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2585 if this makes the transformation invalid. */
2588 combine_comparisons (location_t loc
,
2589 enum tree_code code
, enum tree_code lcode
,
2590 enum tree_code rcode
, tree truth_type
,
2591 tree ll_arg
, tree lr_arg
)
2593 bool honor_nans
= HONOR_NANS (ll_arg
);
2594 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2595 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2600 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2601 compcode
= lcompcode
& rcompcode
;
2604 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2605 compcode
= lcompcode
| rcompcode
;
2614 /* Eliminate unordered comparisons, as well as LTGT and ORD
2615 which are not used unless the mode has NaNs. */
2616 compcode
&= ~COMPCODE_UNORD
;
2617 if (compcode
== COMPCODE_LTGT
)
2618 compcode
= COMPCODE_NE
;
2619 else if (compcode
== COMPCODE_ORD
)
2620 compcode
= COMPCODE_TRUE
;
2622 else if (flag_trapping_math
)
2624 /* Check that the original operation and the optimized ones will trap
2625 under the same condition. */
2626 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2627 && (lcompcode
!= COMPCODE_EQ
)
2628 && (lcompcode
!= COMPCODE_ORD
);
2629 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2630 && (rcompcode
!= COMPCODE_EQ
)
2631 && (rcompcode
!= COMPCODE_ORD
);
2632 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2633 && (compcode
!= COMPCODE_EQ
)
2634 && (compcode
!= COMPCODE_ORD
);
2636 /* In a short-circuited boolean expression the LHS might be
2637 such that the RHS, if evaluated, will never trap. For
2638 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2639 if neither x nor y is NaN. (This is a mixed blessing: for
2640 example, the expression above will never trap, hence
2641 optimizing it to x < y would be invalid). */
2642 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2643 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2646 /* If the comparison was short-circuited, and only the RHS
2647 trapped, we may now generate a spurious trap. */
2649 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2652 /* If we changed the conditions that cause a trap, we lose. */
2653 if ((ltrap
|| rtrap
) != trap
)
2657 if (compcode
== COMPCODE_TRUE
)
2658 return constant_boolean_node (true, truth_type
);
2659 else if (compcode
== COMPCODE_FALSE
)
2660 return constant_boolean_node (false, truth_type
);
2663 enum tree_code tcode
;
2665 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2666 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2670 /* Return nonzero if two operands (typically of the same tree node)
2671 are necessarily equal. If either argument has side-effects this
2672 function returns zero. FLAGS modifies behavior as follows:
2674 If OEP_ONLY_CONST is set, only return nonzero for constants.
2675 This function tests whether the operands are indistinguishable;
2676 it does not test whether they are equal using C's == operation.
2677 The distinction is important for IEEE floating point, because
2678 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2679 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2681 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2682 even though it may hold multiple values during a function.
2683 This is because a GCC tree node guarantees that nothing else is
2684 executed between the evaluation of its "operands" (which may often
2685 be evaluated in arbitrary order). Hence if the operands themselves
2686 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2687 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2688 unset means assuming isochronic (or instantaneous) tree equivalence.
2689 Unless comparing arbitrary expression trees, such as from different
2690 statements, this flag can usually be left unset.
2692 If OEP_PURE_SAME is set, then pure functions with identical arguments
2693 are considered the same. It is used when the caller has other ways
2694 to ensure that global memory is unchanged in between. */
2697 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2699 /* If either is ERROR_MARK, they aren't equal. */
2700 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2701 || TREE_TYPE (arg0
) == error_mark_node
2702 || TREE_TYPE (arg1
) == error_mark_node
)
2705 /* Similar, if either does not have a type (like a released SSA name),
2706 they aren't equal. */
2707 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2710 /* Check equality of integer constants before bailing out due to
2711 precision differences. */
2712 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2713 return tree_int_cst_equal (arg0
, arg1
);
2715 /* If both types don't have the same signedness, then we can't consider
2716 them equal. We must check this before the STRIP_NOPS calls
2717 because they may change the signedness of the arguments. As pointers
2718 strictly don't have a signedness, require either two pointers or
2719 two non-pointers as well. */
2720 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2721 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2724 /* We cannot consider pointers to different address space equal. */
2725 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2726 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2727 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2730 /* If both types don't have the same precision, then it is not safe
2732 if (element_precision (TREE_TYPE (arg0
))
2733 != element_precision (TREE_TYPE (arg1
)))
2739 /* In case both args are comparisons but with different comparison
2740 code, try to swap the comparison operands of one arg to produce
2741 a match and compare that variant. */
2742 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2743 && COMPARISON_CLASS_P (arg0
)
2744 && COMPARISON_CLASS_P (arg1
))
2746 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2748 if (TREE_CODE (arg0
) == swap_code
)
2749 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2750 TREE_OPERAND (arg1
, 1), flags
)
2751 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2752 TREE_OPERAND (arg1
, 0), flags
);
2755 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2756 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2757 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2760 /* This is needed for conversions and for COMPONENT_REF.
2761 Might as well play it safe and always test this. */
2762 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2763 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2764 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2767 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2768 We don't care about side effects in that case because the SAVE_EXPR
2769 takes care of that for us. In all other cases, two expressions are
2770 equal if they have no side effects. If we have two identical
2771 expressions with side effects that should be treated the same due
2772 to the only side effects being identical SAVE_EXPR's, that will
2773 be detected in the recursive calls below.
2774 If we are taking an invariant address of two identical objects
2775 they are necessarily equal as well. */
2776 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2777 && (TREE_CODE (arg0
) == SAVE_EXPR
2778 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2779 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2782 /* Next handle constant cases, those for which we can return 1 even
2783 if ONLY_CONST is set. */
2784 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2785 switch (TREE_CODE (arg0
))
2788 return tree_int_cst_equal (arg0
, arg1
);
2791 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2792 TREE_FIXED_CST (arg1
));
2795 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2796 TREE_REAL_CST (arg1
)))
2800 if (!HONOR_SIGNED_ZEROS (arg0
))
2802 /* If we do not distinguish between signed and unsigned zero,
2803 consider them equal. */
2804 if (real_zerop (arg0
) && real_zerop (arg1
))
2813 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2816 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2818 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2819 VECTOR_CST_ELT (arg1
, i
), flags
))
2826 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2828 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2832 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2833 && ! memcmp (TREE_STRING_POINTER (arg0
),
2834 TREE_STRING_POINTER (arg1
),
2835 TREE_STRING_LENGTH (arg0
)));
2838 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2839 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2840 ? OEP_CONSTANT_ADDRESS_OF
| OEP_ADDRESS_OF
: 0);
2845 if (flags
& OEP_ONLY_CONST
)
2848 /* Define macros to test an operand from arg0 and arg1 for equality and a
2849 variant that allows null and views null as being different from any
2850 non-null value. In the latter case, if either is null, the both
2851 must be; otherwise, do the normal comparison. */
2852 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2853 TREE_OPERAND (arg1, N), flags)
2855 #define OP_SAME_WITH_NULL(N) \
2856 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2857 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2859 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2862 /* Two conversions are equal only if signedness and modes match. */
2863 switch (TREE_CODE (arg0
))
2866 case FIX_TRUNC_EXPR
:
2867 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2868 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2878 case tcc_comparison
:
2880 if (OP_SAME (0) && OP_SAME (1))
2883 /* For commutative ops, allow the other order. */
2884 return (commutative_tree_code (TREE_CODE (arg0
))
2885 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2886 TREE_OPERAND (arg1
, 1), flags
)
2887 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2888 TREE_OPERAND (arg1
, 0), flags
));
2891 /* If either of the pointer (or reference) expressions we are
2892 dereferencing contain a side effect, these cannot be equal,
2893 but their addresses can be. */
2894 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2895 && (TREE_SIDE_EFFECTS (arg0
)
2896 || TREE_SIDE_EFFECTS (arg1
)))
2899 switch (TREE_CODE (arg0
))
2902 if (!(flags
& OEP_ADDRESS_OF
)
2903 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2904 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2906 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2913 case TARGET_MEM_REF
:
2915 /* Require equal access sizes, and similar pointer types.
2916 We can have incomplete types for array references of
2917 variable-sized arrays from the Fortran frontend
2918 though. Also verify the types are compatible. */
2919 if (!((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2920 || (TYPE_SIZE (TREE_TYPE (arg0
))
2921 && TYPE_SIZE (TREE_TYPE (arg1
))
2922 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2923 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2924 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2925 && ((flags
& OEP_ADDRESS_OF
)
2926 || (alias_ptr_types_compatible_p
2927 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2928 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2929 && (MR_DEPENDENCE_CLIQUE (arg0
)
2930 == MR_DEPENDENCE_CLIQUE (arg1
))
2931 && (MR_DEPENDENCE_BASE (arg0
)
2932 == MR_DEPENDENCE_BASE (arg1
))
2933 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2934 == TYPE_ALIGN (TREE_TYPE (arg1
)))))))
2936 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2937 return (OP_SAME (0) && OP_SAME (1)
2938 /* TARGET_MEM_REF require equal extra operands. */
2939 && (TREE_CODE (arg0
) != TARGET_MEM_REF
2940 || (OP_SAME_WITH_NULL (2)
2941 && OP_SAME_WITH_NULL (3)
2942 && OP_SAME_WITH_NULL (4))));
2945 case ARRAY_RANGE_REF
:
2946 /* Operands 2 and 3 may be null.
2947 Compare the array index by value if it is constant first as we
2948 may have different types but same value here. */
2951 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2952 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2953 TREE_OPERAND (arg1
, 1))
2955 && OP_SAME_WITH_NULL (2)
2956 && OP_SAME_WITH_NULL (3));
2959 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2960 may be NULL when we're called to compare MEM_EXPRs. */
2961 if (!OP_SAME_WITH_NULL (0)
2964 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2965 return OP_SAME_WITH_NULL (2);
2970 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2971 return OP_SAME (1) && OP_SAME (2);
2977 case tcc_expression
:
2978 switch (TREE_CODE (arg0
))
2981 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2982 TREE_OPERAND (arg1
, 0),
2983 flags
| OEP_ADDRESS_OF
);
2985 case TRUTH_NOT_EXPR
:
2988 case TRUTH_ANDIF_EXPR
:
2989 case TRUTH_ORIF_EXPR
:
2990 return OP_SAME (0) && OP_SAME (1);
2993 case WIDEN_MULT_PLUS_EXPR
:
2994 case WIDEN_MULT_MINUS_EXPR
:
2997 /* The multiplcation operands are commutative. */
3000 case TRUTH_AND_EXPR
:
3002 case TRUTH_XOR_EXPR
:
3003 if (OP_SAME (0) && OP_SAME (1))
3006 /* Otherwise take into account this is a commutative operation. */
3007 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3008 TREE_OPERAND (arg1
, 1), flags
)
3009 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3010 TREE_OPERAND (arg1
, 0), flags
));
3015 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3022 switch (TREE_CODE (arg0
))
3025 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3026 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3027 /* If not both CALL_EXPRs are either internal or normal function
3028 functions, then they are not equal. */
3030 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3032 /* If the CALL_EXPRs call different internal functions, then they
3034 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3039 /* If the CALL_EXPRs call different functions, then they are not
3041 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3047 unsigned int cef
= call_expr_flags (arg0
);
3048 if (flags
& OEP_PURE_SAME
)
3049 cef
&= ECF_CONST
| ECF_PURE
;
3056 /* Now see if all the arguments are the same. */
3058 const_call_expr_arg_iterator iter0
, iter1
;
3060 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3061 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3063 a0
= next_const_call_expr_arg (&iter0
),
3064 a1
= next_const_call_expr_arg (&iter1
))
3065 if (! operand_equal_p (a0
, a1
, flags
))
3068 /* If we get here and both argument lists are exhausted
3069 then the CALL_EXPRs are equal. */
3070 return ! (a0
|| a1
);
3076 case tcc_declaration
:
3077 /* Consider __builtin_sqrt equal to sqrt. */
3078 return (TREE_CODE (arg0
) == FUNCTION_DECL
3079 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3080 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3081 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3088 #undef OP_SAME_WITH_NULL
3091 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3092 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3094 When in doubt, return 0. */
3097 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3099 int unsignedp1
, unsignedpo
;
3100 tree primarg0
, primarg1
, primother
;
3101 unsigned int correct_width
;
3103 if (operand_equal_p (arg0
, arg1
, 0))
3106 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3107 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3110 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3111 and see if the inner values are the same. This removes any
3112 signedness comparison, which doesn't matter here. */
3113 primarg0
= arg0
, primarg1
= arg1
;
3114 STRIP_NOPS (primarg0
);
3115 STRIP_NOPS (primarg1
);
3116 if (operand_equal_p (primarg0
, primarg1
, 0))
3119 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3120 actual comparison operand, ARG0.
3122 First throw away any conversions to wider types
3123 already present in the operands. */
3125 primarg1
= get_narrower (arg1
, &unsignedp1
);
3126 primother
= get_narrower (other
, &unsignedpo
);
3128 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3129 if (unsignedp1
== unsignedpo
3130 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3131 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3133 tree type
= TREE_TYPE (arg0
);
3135 /* Make sure shorter operand is extended the right way
3136 to match the longer operand. */
3137 primarg1
= fold_convert (signed_or_unsigned_type_for
3138 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3140 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3147 /* See if ARG is an expression that is either a comparison or is performing
3148 arithmetic on comparisons. The comparisons must only be comparing
3149 two different values, which will be stored in *CVAL1 and *CVAL2; if
3150 they are nonzero it means that some operands have already been found.
3151 No variables may be used anywhere else in the expression except in the
3152 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3153 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3155 If this is true, return 1. Otherwise, return zero. */
3158 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3160 enum tree_code code
= TREE_CODE (arg
);
3161 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3163 /* We can handle some of the tcc_expression cases here. */
3164 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3166 else if (tclass
== tcc_expression
3167 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3168 || code
== COMPOUND_EXPR
))
3169 tclass
= tcc_binary
;
3171 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3172 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3174 /* If we've already found a CVAL1 or CVAL2, this expression is
3175 two complex to handle. */
3176 if (*cval1
|| *cval2
)
3186 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3189 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3190 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3191 cval1
, cval2
, save_p
));
3196 case tcc_expression
:
3197 if (code
== COND_EXPR
)
3198 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3199 cval1
, cval2
, save_p
)
3200 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3201 cval1
, cval2
, save_p
)
3202 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3203 cval1
, cval2
, save_p
));
3206 case tcc_comparison
:
3207 /* First see if we can handle the first operand, then the second. For
3208 the second operand, we know *CVAL1 can't be zero. It must be that
3209 one side of the comparison is each of the values; test for the
3210 case where this isn't true by failing if the two operands
3213 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3214 TREE_OPERAND (arg
, 1), 0))
3218 *cval1
= TREE_OPERAND (arg
, 0);
3219 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3221 else if (*cval2
== 0)
3222 *cval2
= TREE_OPERAND (arg
, 0);
3223 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3228 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3230 else if (*cval2
== 0)
3231 *cval2
= TREE_OPERAND (arg
, 1);
3232 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3244 /* ARG is a tree that is known to contain just arithmetic operations and
3245 comparisons. Evaluate the operations in the tree substituting NEW0 for
3246 any occurrence of OLD0 as an operand of a comparison and likewise for
3250 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3251 tree old1
, tree new1
)
3253 tree type
= TREE_TYPE (arg
);
3254 enum tree_code code
= TREE_CODE (arg
);
3255 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3257 /* We can handle some of the tcc_expression cases here. */
3258 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3260 else if (tclass
== tcc_expression
3261 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3262 tclass
= tcc_binary
;
3267 return fold_build1_loc (loc
, code
, type
,
3268 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3269 old0
, new0
, old1
, new1
));
3272 return fold_build2_loc (loc
, code
, type
,
3273 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3274 old0
, new0
, old1
, new1
),
3275 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3276 old0
, new0
, old1
, new1
));
3278 case tcc_expression
:
3282 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3286 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3290 return fold_build3_loc (loc
, code
, type
,
3291 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3292 old0
, new0
, old1
, new1
),
3293 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3294 old0
, new0
, old1
, new1
),
3295 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3296 old0
, new0
, old1
, new1
));
3300 /* Fall through - ??? */
3302 case tcc_comparison
:
3304 tree arg0
= TREE_OPERAND (arg
, 0);
3305 tree arg1
= TREE_OPERAND (arg
, 1);
3307 /* We need to check both for exact equality and tree equality. The
3308 former will be true if the operand has a side-effect. In that
3309 case, we know the operand occurred exactly once. */
3311 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3313 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3316 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3318 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3321 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3329 /* Return a tree for the case when the result of an expression is RESULT
3330 converted to TYPE and OMITTED was previously an operand of the expression
3331 but is now not needed (e.g., we folded OMITTED * 0).
3333 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3334 the conversion of RESULT to TYPE. */
3337 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3339 tree t
= fold_convert_loc (loc
, type
, result
);
3341 /* If the resulting operand is an empty statement, just return the omitted
3342 statement casted to void. */
3343 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3344 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3345 fold_ignored_result (omitted
));
3347 if (TREE_SIDE_EFFECTS (omitted
))
3348 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3349 fold_ignored_result (omitted
), t
);
3351 return non_lvalue_loc (loc
, t
);
3354 /* Return a tree for the case when the result of an expression is RESULT
3355 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3356 of the expression but are now not needed.
3358 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3359 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3360 evaluated before OMITTED2. Otherwise, if neither has side effects,
3361 just do the conversion of RESULT to TYPE. */
3364 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3365 tree omitted1
, tree omitted2
)
3367 tree t
= fold_convert_loc (loc
, type
, result
);
3369 if (TREE_SIDE_EFFECTS (omitted2
))
3370 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3371 if (TREE_SIDE_EFFECTS (omitted1
))
3372 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3374 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3378 /* Return a simplified tree node for the truth-negation of ARG. This
3379 never alters ARG itself. We assume that ARG is an operation that
3380 returns a truth value (0 or 1).
3382 FIXME: one would think we would fold the result, but it causes
3383 problems with the dominator optimizer. */
3386 fold_truth_not_expr (location_t loc
, tree arg
)
3388 tree type
= TREE_TYPE (arg
);
3389 enum tree_code code
= TREE_CODE (arg
);
3390 location_t loc1
, loc2
;
3392 /* If this is a comparison, we can simply invert it, except for
3393 floating-point non-equality comparisons, in which case we just
3394 enclose a TRUTH_NOT_EXPR around what we have. */
3396 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3398 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3399 if (FLOAT_TYPE_P (op_type
)
3400 && flag_trapping_math
3401 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3402 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3405 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3406 if (code
== ERROR_MARK
)
3409 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3410 TREE_OPERAND (arg
, 1));
3416 return constant_boolean_node (integer_zerop (arg
), type
);
3418 case TRUTH_AND_EXPR
:
3419 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3420 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3421 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3422 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3423 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3426 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3427 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3428 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3429 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3430 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3432 case TRUTH_XOR_EXPR
:
3433 /* Here we can invert either operand. We invert the first operand
3434 unless the second operand is a TRUTH_NOT_EXPR in which case our
3435 result is the XOR of the first operand with the inside of the
3436 negation of the second operand. */
3438 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3439 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3440 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3442 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3443 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3444 TREE_OPERAND (arg
, 1));
3446 case TRUTH_ANDIF_EXPR
:
3447 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3448 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3449 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3450 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3451 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3453 case TRUTH_ORIF_EXPR
:
3454 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3455 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3456 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3457 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3458 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3460 case TRUTH_NOT_EXPR
:
3461 return TREE_OPERAND (arg
, 0);
3465 tree arg1
= TREE_OPERAND (arg
, 1);
3466 tree arg2
= TREE_OPERAND (arg
, 2);
3468 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3469 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3471 /* A COND_EXPR may have a throw as one operand, which
3472 then has void type. Just leave void operands
3474 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3475 VOID_TYPE_P (TREE_TYPE (arg1
))
3476 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3477 VOID_TYPE_P (TREE_TYPE (arg2
))
3478 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3482 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3483 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3484 TREE_OPERAND (arg
, 0),
3485 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3487 case NON_LVALUE_EXPR
:
3488 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3489 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3492 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3493 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3495 /* ... fall through ... */
3498 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3499 return build1_loc (loc
, TREE_CODE (arg
), type
,
3500 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3503 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3505 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3508 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3510 case CLEANUP_POINT_EXPR
:
3511 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3512 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3513 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3520 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3521 assume that ARG is an operation that returns a truth value (0 or 1
3522 for scalars, 0 or -1 for vectors). Return the folded expression if
3523 folding is successful. Otherwise, return NULL_TREE. */
3526 fold_invert_truthvalue (location_t loc
, tree arg
)
3528 tree type
= TREE_TYPE (arg
);
3529 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3535 /* Return a simplified tree node for the truth-negation of ARG. This
3536 never alters ARG itself. We assume that ARG is an operation that
3537 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3540 invert_truthvalue_loc (location_t loc
, tree arg
)
3542 if (TREE_CODE (arg
) == ERROR_MARK
)
3545 tree type
= TREE_TYPE (arg
);
3546 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3552 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3553 with code CODE. This optimization is unsafe. */
3555 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3556 tree arg0
, tree arg1
)
3558 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3559 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3561 /* (A / C) +- (B / C) -> (A +- B) / C. */
3563 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3564 TREE_OPERAND (arg1
, 1), 0))
3565 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3566 fold_build2_loc (loc
, code
, type
,
3567 TREE_OPERAND (arg0
, 0),
3568 TREE_OPERAND (arg1
, 0)),
3569 TREE_OPERAND (arg0
, 1));
3571 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3572 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3573 TREE_OPERAND (arg1
, 0), 0)
3574 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3575 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3577 REAL_VALUE_TYPE r0
, r1
;
3578 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3579 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3581 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3583 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3584 real_arithmetic (&r0
, code
, &r0
, &r1
);
3585 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3586 TREE_OPERAND (arg0
, 0),
3587 build_real (type
, r0
));
3593 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3594 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3597 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3598 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3600 tree result
, bftype
;
3604 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3605 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3606 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3607 && tree_fits_shwi_p (size
)
3608 && tree_to_shwi (size
) == bitsize
)
3609 return fold_convert_loc (loc
, type
, inner
);
3613 if (TYPE_PRECISION (bftype
) != bitsize
3614 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3615 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3617 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3618 size_int (bitsize
), bitsize_int (bitpos
));
3621 result
= fold_convert_loc (loc
, type
, result
);
3626 /* Optimize a bit-field compare.
3628 There are two cases: First is a compare against a constant and the
3629 second is a comparison of two items where the fields are at the same
3630 bit position relative to the start of a chunk (byte, halfword, word)
3631 large enough to contain it. In these cases we can avoid the shift
3632 implicit in bitfield extractions.
3634 For constants, we emit a compare of the shifted constant with the
3635 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3636 compared. For two fields at the same position, we do the ANDs with the
3637 similar mask and compare the result of the ANDs.
3639 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3640 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3641 are the left and right operands of the comparison, respectively.
3643 If the optimization described above can be done, we return the resulting
3644 tree. Otherwise we return zero. */
3647 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3648 tree compare_type
, tree lhs
, tree rhs
)
3650 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3651 tree type
= TREE_TYPE (lhs
);
3653 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3654 machine_mode lmode
, rmode
, nmode
;
3655 int lunsignedp
, runsignedp
;
3656 int lvolatilep
= 0, rvolatilep
= 0;
3657 tree linner
, rinner
= NULL_TREE
;
3661 /* Get all the information about the extractions being done. If the bit size
3662 if the same as the size of the underlying object, we aren't doing an
3663 extraction at all and so can do nothing. We also don't want to
3664 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3665 then will no longer be able to replace it. */
3666 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3667 &lunsignedp
, &lvolatilep
, false);
3668 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3669 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3674 /* If this is not a constant, we can only do something if bit positions,
3675 sizes, and signedness are the same. */
3676 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3677 &runsignedp
, &rvolatilep
, false);
3679 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3680 || lunsignedp
!= runsignedp
|| offset
!= 0
3681 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3685 /* See if we can find a mode to refer to this field. We should be able to,
3686 but fail if we can't. */
3687 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3688 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3689 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3690 TYPE_ALIGN (TREE_TYPE (rinner
))),
3692 if (nmode
== VOIDmode
)
3695 /* Set signed and unsigned types of the precision of this mode for the
3697 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3699 /* Compute the bit position and size for the new reference and our offset
3700 within it. If the new reference is the same size as the original, we
3701 won't optimize anything, so return zero. */
3702 nbitsize
= GET_MODE_BITSIZE (nmode
);
3703 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3705 if (nbitsize
== lbitsize
)
3708 if (BYTES_BIG_ENDIAN
)
3709 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3711 /* Make the mask to be used against the extracted field. */
3712 mask
= build_int_cst_type (unsigned_type
, -1);
3713 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3714 mask
= const_binop (RSHIFT_EXPR
, mask
,
3715 size_int (nbitsize
- lbitsize
- lbitpos
));
3718 /* If not comparing with constant, just rework the comparison
3720 return fold_build2_loc (loc
, code
, compare_type
,
3721 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3722 make_bit_field_ref (loc
, linner
,
3727 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3728 make_bit_field_ref (loc
, rinner
,
3734 /* Otherwise, we are handling the constant case. See if the constant is too
3735 big for the field. Warn and return a tree of for 0 (false) if so. We do
3736 this not only for its own sake, but to avoid having to test for this
3737 error case below. If we didn't, we might generate wrong code.
3739 For unsigned fields, the constant shifted right by the field length should
3740 be all zero. For signed fields, the high-order bits should agree with
3745 if (wi::lrshift (rhs
, lbitsize
) != 0)
3747 warning (0, "comparison is always %d due to width of bit-field",
3749 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3754 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3755 if (tem
!= 0 && tem
!= -1)
3757 warning (0, "comparison is always %d due to width of bit-field",
3759 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3763 /* Single-bit compares should always be against zero. */
3764 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3766 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3767 rhs
= build_int_cst (type
, 0);
3770 /* Make a new bitfield reference, shift the constant over the
3771 appropriate number of bits and mask it with the computed mask
3772 (in case this was a signed field). If we changed it, make a new one. */
3773 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3775 rhs
= const_binop (BIT_AND_EXPR
,
3776 const_binop (LSHIFT_EXPR
,
3777 fold_convert_loc (loc
, unsigned_type
, rhs
),
3778 size_int (lbitpos
)),
3781 lhs
= build2_loc (loc
, code
, compare_type
,
3782 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3786 /* Subroutine for fold_truth_andor_1: decode a field reference.
3788 If EXP is a comparison reference, we return the innermost reference.
3790 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3791 set to the starting bit number.
3793 If the innermost field can be completely contained in a mode-sized
3794 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3796 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3797 otherwise it is not changed.
3799 *PUNSIGNEDP is set to the signedness of the field.
3801 *PMASK is set to the mask used. This is either contained in a
3802 BIT_AND_EXPR or derived from the width of the field.
3804 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3806 Return 0 if this is not a component reference or is one that we can't
3807 do anything with. */
3810 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3811 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3812 int *punsignedp
, int *pvolatilep
,
3813 tree
*pmask
, tree
*pand_mask
)
3815 tree outer_type
= 0;
3817 tree mask
, inner
, offset
;
3819 unsigned int precision
;
3821 /* All the optimizations using this function assume integer fields.
3822 There are problems with FP fields since the type_for_size call
3823 below can fail for, e.g., XFmode. */
3824 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3827 /* We are interested in the bare arrangement of bits, so strip everything
3828 that doesn't affect the machine mode. However, record the type of the
3829 outermost expression if it may matter below. */
3830 if (CONVERT_EXPR_P (exp
)
3831 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3832 outer_type
= TREE_TYPE (exp
);
3835 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3837 and_mask
= TREE_OPERAND (exp
, 1);
3838 exp
= TREE_OPERAND (exp
, 0);
3839 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3840 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3844 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3845 punsignedp
, pvolatilep
, false);
3846 if ((inner
== exp
&& and_mask
== 0)
3847 || *pbitsize
< 0 || offset
!= 0
3848 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3851 /* If the number of bits in the reference is the same as the bitsize of
3852 the outer type, then the outer type gives the signedness. Otherwise
3853 (in case of a small bitfield) the signedness is unchanged. */
3854 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3855 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3857 /* Compute the mask to access the bitfield. */
3858 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3859 precision
= TYPE_PRECISION (unsigned_type
);
3861 mask
= build_int_cst_type (unsigned_type
, -1);
3863 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3864 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3866 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3868 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3869 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3872 *pand_mask
= and_mask
;
3876 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3877 bit positions and MASK is SIGNED. */
3880 all_ones_mask_p (const_tree mask
, unsigned int size
)
3882 tree type
= TREE_TYPE (mask
);
3883 unsigned int precision
= TYPE_PRECISION (type
);
3885 /* If this function returns true when the type of the mask is
3886 UNSIGNED, then there will be errors. In particular see
3887 gcc.c-torture/execute/990326-1.c. There does not appear to be
3888 any documentation paper trail as to why this is so. But the pre
3889 wide-int worked with that restriction and it has been preserved
3891 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3894 return wi::mask (size
, false, precision
) == mask
;
3897 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3898 represents the sign bit of EXP's type. If EXP represents a sign
3899 or zero extension, also test VAL against the unextended type.
3900 The return value is the (sub)expression whose sign bit is VAL,
3901 or NULL_TREE otherwise. */
3904 sign_bit_p (tree exp
, const_tree val
)
3909 /* Tree EXP must have an integral type. */
3910 t
= TREE_TYPE (exp
);
3911 if (! INTEGRAL_TYPE_P (t
))
3914 /* Tree VAL must be an integer constant. */
3915 if (TREE_CODE (val
) != INTEGER_CST
3916 || TREE_OVERFLOW (val
))
3919 width
= TYPE_PRECISION (t
);
3920 if (wi::only_sign_bit_p (val
, width
))
3923 /* Handle extension from a narrower type. */
3924 if (TREE_CODE (exp
) == NOP_EXPR
3925 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3926 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3931 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3932 to be evaluated unconditionally. */
3935 simple_operand_p (const_tree exp
)
3937 /* Strip any conversions that don't change the machine mode. */
3940 return (CONSTANT_CLASS_P (exp
)
3941 || TREE_CODE (exp
) == SSA_NAME
3943 && ! TREE_ADDRESSABLE (exp
)
3944 && ! TREE_THIS_VOLATILE (exp
)
3945 && ! DECL_NONLOCAL (exp
)
3946 /* Don't regard global variables as simple. They may be
3947 allocated in ways unknown to the compiler (shared memory,
3948 #pragma weak, etc). */
3949 && ! TREE_PUBLIC (exp
)
3950 && ! DECL_EXTERNAL (exp
)
3951 /* Weakrefs are not safe to be read, since they can be NULL.
3952 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3953 have DECL_WEAK flag set. */
3954 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3955 /* Loading a static variable is unduly expensive, but global
3956 registers aren't expensive. */
3957 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3960 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3961 to be evaluated unconditionally.
3962 I addition to simple_operand_p, we assume that comparisons, conversions,
3963 and logic-not operations are simple, if their operands are simple, too. */
3966 simple_operand_p_2 (tree exp
)
3968 enum tree_code code
;
3970 if (TREE_SIDE_EFFECTS (exp
)
3971 || tree_could_trap_p (exp
))
3974 while (CONVERT_EXPR_P (exp
))
3975 exp
= TREE_OPERAND (exp
, 0);
3977 code
= TREE_CODE (exp
);
3979 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3980 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3981 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3983 if (code
== TRUTH_NOT_EXPR
)
3984 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3986 return simple_operand_p (exp
);
3990 /* The following functions are subroutines to fold_range_test and allow it to
3991 try to change a logical combination of comparisons into a range test.
3994 X == 2 || X == 3 || X == 4 || X == 5
3998 (unsigned) (X - 2) <= 3
4000 We describe each set of comparisons as being either inside or outside
4001 a range, using a variable named like IN_P, and then describe the
4002 range with a lower and upper bound. If one of the bounds is omitted,
4003 it represents either the highest or lowest value of the type.
4005 In the comments below, we represent a range by two numbers in brackets
4006 preceded by a "+" to designate being inside that range, or a "-" to
4007 designate being outside that range, so the condition can be inverted by
4008 flipping the prefix. An omitted bound is represented by a "-". For
4009 example, "- [-, 10]" means being outside the range starting at the lowest
4010 possible value and ending at 10, in other words, being greater than 10.
4011 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4014 We set up things so that the missing bounds are handled in a consistent
4015 manner so neither a missing bound nor "true" and "false" need to be
4016 handled using a special case. */
4018 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4019 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4020 and UPPER1_P are nonzero if the respective argument is an upper bound
4021 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4022 must be specified for a comparison. ARG1 will be converted to ARG0's
4023 type if both are specified. */
4026 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4027 tree arg1
, int upper1_p
)
4033 /* If neither arg represents infinity, do the normal operation.
4034 Else, if not a comparison, return infinity. Else handle the special
4035 comparison rules. Note that most of the cases below won't occur, but
4036 are handled for consistency. */
4038 if (arg0
!= 0 && arg1
!= 0)
4040 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4041 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4043 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4046 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4049 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4050 for neither. In real maths, we cannot assume open ended ranges are
4051 the same. But, this is computer arithmetic, where numbers are finite.
4052 We can therefore make the transformation of any unbounded range with
4053 the value Z, Z being greater than any representable number. This permits
4054 us to treat unbounded ranges as equal. */
4055 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4056 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4060 result
= sgn0
== sgn1
;
4063 result
= sgn0
!= sgn1
;
4066 result
= sgn0
< sgn1
;
4069 result
= sgn0
<= sgn1
;
4072 result
= sgn0
> sgn1
;
4075 result
= sgn0
>= sgn1
;
4081 return constant_boolean_node (result
, type
);
4084 /* Helper routine for make_range. Perform one step for it, return
4085 new expression if the loop should continue or NULL_TREE if it should
4089 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4090 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4091 bool *strict_overflow_p
)
4093 tree arg0_type
= TREE_TYPE (arg0
);
4094 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4095 int in_p
= *p_in_p
, n_in_p
;
4099 case TRUTH_NOT_EXPR
:
4100 /* We can only do something if the range is testing for zero. */
4101 if (low
== NULL_TREE
|| high
== NULL_TREE
4102 || ! integer_zerop (low
) || ! integer_zerop (high
))
4107 case EQ_EXPR
: case NE_EXPR
:
4108 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4109 /* We can only do something if the range is testing for zero
4110 and if the second operand is an integer constant. Note that
4111 saying something is "in" the range we make is done by
4112 complementing IN_P since it will set in the initial case of
4113 being not equal to zero; "out" is leaving it alone. */
4114 if (low
== NULL_TREE
|| high
== NULL_TREE
4115 || ! integer_zerop (low
) || ! integer_zerop (high
)
4116 || TREE_CODE (arg1
) != INTEGER_CST
)
4121 case NE_EXPR
: /* - [c, c] */
4124 case EQ_EXPR
: /* + [c, c] */
4125 in_p
= ! in_p
, low
= high
= arg1
;
4127 case GT_EXPR
: /* - [-, c] */
4128 low
= 0, high
= arg1
;
4130 case GE_EXPR
: /* + [c, -] */
4131 in_p
= ! in_p
, low
= arg1
, high
= 0;
4133 case LT_EXPR
: /* - [c, -] */
4134 low
= arg1
, high
= 0;
4136 case LE_EXPR
: /* + [-, c] */
4137 in_p
= ! in_p
, low
= 0, high
= arg1
;
4143 /* If this is an unsigned comparison, we also know that EXP is
4144 greater than or equal to zero. We base the range tests we make
4145 on that fact, so we record it here so we can parse existing
4146 range tests. We test arg0_type since often the return type
4147 of, e.g. EQ_EXPR, is boolean. */
4148 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4150 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4152 build_int_cst (arg0_type
, 0),
4156 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4158 /* If the high bound is missing, but we have a nonzero low
4159 bound, reverse the range so it goes from zero to the low bound
4161 if (high
== 0 && low
&& ! integer_zerop (low
))
4164 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4165 build_int_cst (TREE_TYPE (low
), 1), 0);
4166 low
= build_int_cst (arg0_type
, 0);
4176 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4177 low and high are non-NULL, then normalize will DTRT. */
4178 if (!TYPE_UNSIGNED (arg0_type
)
4179 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4181 if (low
== NULL_TREE
)
4182 low
= TYPE_MIN_VALUE (arg0_type
);
4183 if (high
== NULL_TREE
)
4184 high
= TYPE_MAX_VALUE (arg0_type
);
4187 /* (-x) IN [a,b] -> x in [-b, -a] */
4188 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4189 build_int_cst (exp_type
, 0),
4191 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4192 build_int_cst (exp_type
, 0),
4194 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4200 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4201 build_int_cst (exp_type
, 1));
4205 if (TREE_CODE (arg1
) != INTEGER_CST
)
4208 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4209 move a constant to the other side. */
4210 if (!TYPE_UNSIGNED (arg0_type
)
4211 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4214 /* If EXP is signed, any overflow in the computation is undefined,
4215 so we don't worry about it so long as our computations on
4216 the bounds don't overflow. For unsigned, overflow is defined
4217 and this is exactly the right thing. */
4218 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4219 arg0_type
, low
, 0, arg1
, 0);
4220 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4221 arg0_type
, high
, 1, arg1
, 0);
4222 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4223 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4226 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4227 *strict_overflow_p
= true;
4230 /* Check for an unsigned range which has wrapped around the maximum
4231 value thus making n_high < n_low, and normalize it. */
4232 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4234 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4235 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4236 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4237 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4239 /* If the range is of the form +/- [ x+1, x ], we won't
4240 be able to normalize it. But then, it represents the
4241 whole range or the empty set, so make it
4243 if (tree_int_cst_equal (n_low
, low
)
4244 && tree_int_cst_equal (n_high
, high
))
4250 low
= n_low
, high
= n_high
;
4258 case NON_LVALUE_EXPR
:
4259 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4262 if (! INTEGRAL_TYPE_P (arg0_type
)
4263 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4264 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4267 n_low
= low
, n_high
= high
;
4270 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4273 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4275 /* If we're converting arg0 from an unsigned type, to exp,
4276 a signed type, we will be doing the comparison as unsigned.
4277 The tests above have already verified that LOW and HIGH
4280 So we have to ensure that we will handle large unsigned
4281 values the same way that the current signed bounds treat
4284 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4288 /* For fixed-point modes, we need to pass the saturating flag
4289 as the 2nd parameter. */
4290 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4292 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4293 TYPE_SATURATING (arg0_type
));
4296 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4298 /* A range without an upper bound is, naturally, unbounded.
4299 Since convert would have cropped a very large value, use
4300 the max value for the destination type. */
4302 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4303 : TYPE_MAX_VALUE (arg0_type
);
4305 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4306 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4307 fold_convert_loc (loc
, arg0_type
,
4309 build_int_cst (arg0_type
, 1));
4311 /* If the low bound is specified, "and" the range with the
4312 range for which the original unsigned value will be
4316 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4317 1, fold_convert_loc (loc
, arg0_type
,
4322 in_p
= (n_in_p
== in_p
);
4326 /* Otherwise, "or" the range with the range of the input
4327 that will be interpreted as negative. */
4328 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4329 1, fold_convert_loc (loc
, arg0_type
,
4334 in_p
= (in_p
!= n_in_p
);
4348 /* Given EXP, a logical expression, set the range it is testing into
4349 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4350 actually being tested. *PLOW and *PHIGH will be made of the same
4351 type as the returned expression. If EXP is not a comparison, we
4352 will most likely not be returning a useful value and range. Set
4353 *STRICT_OVERFLOW_P to true if the return value is only valid
4354 because signed overflow is undefined; otherwise, do not change
4355 *STRICT_OVERFLOW_P. */
4358 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4359 bool *strict_overflow_p
)
4361 enum tree_code code
;
4362 tree arg0
, arg1
= NULL_TREE
;
4363 tree exp_type
, nexp
;
4366 location_t loc
= EXPR_LOCATION (exp
);
4368 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4369 and see if we can refine the range. Some of the cases below may not
4370 happen, but it doesn't seem worth worrying about this. We "continue"
4371 the outer loop when we've changed something; otherwise we "break"
4372 the switch, which will "break" the while. */
4375 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4379 code
= TREE_CODE (exp
);
4380 exp_type
= TREE_TYPE (exp
);
4383 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4385 if (TREE_OPERAND_LENGTH (exp
) > 0)
4386 arg0
= TREE_OPERAND (exp
, 0);
4387 if (TREE_CODE_CLASS (code
) == tcc_binary
4388 || TREE_CODE_CLASS (code
) == tcc_comparison
4389 || (TREE_CODE_CLASS (code
) == tcc_expression
4390 && TREE_OPERAND_LENGTH (exp
) > 1))
4391 arg1
= TREE_OPERAND (exp
, 1);
4393 if (arg0
== NULL_TREE
)
4396 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4397 &high
, &in_p
, strict_overflow_p
);
4398 if (nexp
== NULL_TREE
)
4403 /* If EXP is a constant, we can evaluate whether this is true or false. */
4404 if (TREE_CODE (exp
) == INTEGER_CST
)
4406 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4408 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4414 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4418 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4419 type, TYPE, return an expression to test if EXP is in (or out of, depending
4420 on IN_P) the range. Return 0 if the test couldn't be created. */
4423 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4424 tree low
, tree high
)
4426 tree etype
= TREE_TYPE (exp
), value
;
4428 /* Disable this optimization for function pointer expressions
4429 on targets that require function pointer canonicalization. */
4430 if (targetm
.have_canonicalize_funcptr_for_compare ()
4431 && TREE_CODE (etype
) == POINTER_TYPE
4432 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4437 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4439 return invert_truthvalue_loc (loc
, value
);
4444 if (low
== 0 && high
== 0)
4445 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4448 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4449 fold_convert_loc (loc
, etype
, high
));
4452 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4453 fold_convert_loc (loc
, etype
, low
));
4455 if (operand_equal_p (low
, high
, 0))
4456 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4457 fold_convert_loc (loc
, etype
, low
));
4459 if (integer_zerop (low
))
4461 if (! TYPE_UNSIGNED (etype
))
4463 etype
= unsigned_type_for (etype
);
4464 high
= fold_convert_loc (loc
, etype
, high
);
4465 exp
= fold_convert_loc (loc
, etype
, exp
);
4467 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4470 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4471 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4473 int prec
= TYPE_PRECISION (etype
);
4475 if (wi::mask (prec
- 1, false, prec
) == high
)
4477 if (TYPE_UNSIGNED (etype
))
4479 tree signed_etype
= signed_type_for (etype
);
4480 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4482 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4484 etype
= signed_etype
;
4485 exp
= fold_convert_loc (loc
, etype
, exp
);
4487 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4488 build_int_cst (etype
, 0));
4492 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4493 This requires wrap-around arithmetics for the type of the expression.
4494 First make sure that arithmetics in this type is valid, then make sure
4495 that it wraps around. */
4496 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4497 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4498 TYPE_UNSIGNED (etype
));
4500 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4502 tree utype
, minv
, maxv
;
4504 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4505 for the type in question, as we rely on this here. */
4506 utype
= unsigned_type_for (etype
);
4507 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4508 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4509 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4510 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4512 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4519 high
= fold_convert_loc (loc
, etype
, high
);
4520 low
= fold_convert_loc (loc
, etype
, low
);
4521 exp
= fold_convert_loc (loc
, etype
, exp
);
4523 value
= const_binop (MINUS_EXPR
, high
, low
);
4526 if (POINTER_TYPE_P (etype
))
4528 if (value
!= 0 && !TREE_OVERFLOW (value
))
4530 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4531 return build_range_check (loc
, type
,
4532 fold_build_pointer_plus_loc (loc
, exp
, low
),
4533 1, build_int_cst (etype
, 0), value
);
4538 if (value
!= 0 && !TREE_OVERFLOW (value
))
4539 return build_range_check (loc
, type
,
4540 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4541 1, build_int_cst (etype
, 0), value
);
4546 /* Return the predecessor of VAL in its type, handling the infinite case. */
4549 range_predecessor (tree val
)
4551 tree type
= TREE_TYPE (val
);
4553 if (INTEGRAL_TYPE_P (type
)
4554 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4557 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4558 build_int_cst (TREE_TYPE (val
), 1), 0);
4561 /* Return the successor of VAL in its type, handling the infinite case. */
4564 range_successor (tree val
)
4566 tree type
= TREE_TYPE (val
);
4568 if (INTEGRAL_TYPE_P (type
)
4569 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4572 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4573 build_int_cst (TREE_TYPE (val
), 1), 0);
4576 /* Given two ranges, see if we can merge them into one. Return 1 if we
4577 can, 0 if we can't. Set the output range into the specified parameters. */
4580 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4581 tree high0
, int in1_p
, tree low1
, tree high1
)
4589 int lowequal
= ((low0
== 0 && low1
== 0)
4590 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4591 low0
, 0, low1
, 0)));
4592 int highequal
= ((high0
== 0 && high1
== 0)
4593 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4594 high0
, 1, high1
, 1)));
4596 /* Make range 0 be the range that starts first, or ends last if they
4597 start at the same value. Swap them if it isn't. */
4598 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4601 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4602 high1
, 1, high0
, 1))))
4604 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4605 tem
= low0
, low0
= low1
, low1
= tem
;
4606 tem
= high0
, high0
= high1
, high1
= tem
;
4609 /* Now flag two cases, whether the ranges are disjoint or whether the
4610 second range is totally subsumed in the first. Note that the tests
4611 below are simplified by the ones above. */
4612 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4613 high0
, 1, low1
, 0));
4614 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4615 high1
, 1, high0
, 1));
4617 /* We now have four cases, depending on whether we are including or
4618 excluding the two ranges. */
4621 /* If they don't overlap, the result is false. If the second range
4622 is a subset it is the result. Otherwise, the range is from the start
4623 of the second to the end of the first. */
4625 in_p
= 0, low
= high
= 0;
4627 in_p
= 1, low
= low1
, high
= high1
;
4629 in_p
= 1, low
= low1
, high
= high0
;
4632 else if (in0_p
&& ! in1_p
)
4634 /* If they don't overlap, the result is the first range. If they are
4635 equal, the result is false. If the second range is a subset of the
4636 first, and the ranges begin at the same place, we go from just after
4637 the end of the second range to the end of the first. If the second
4638 range is not a subset of the first, or if it is a subset and both
4639 ranges end at the same place, the range starts at the start of the
4640 first range and ends just before the second range.
4641 Otherwise, we can't describe this as a single range. */
4643 in_p
= 1, low
= low0
, high
= high0
;
4644 else if (lowequal
&& highequal
)
4645 in_p
= 0, low
= high
= 0;
4646 else if (subset
&& lowequal
)
4648 low
= range_successor (high1
);
4653 /* We are in the weird situation where high0 > high1 but
4654 high1 has no successor. Punt. */
4658 else if (! subset
|| highequal
)
4661 high
= range_predecessor (low1
);
4665 /* low0 < low1 but low1 has no predecessor. Punt. */
4673 else if (! in0_p
&& in1_p
)
4675 /* If they don't overlap, the result is the second range. If the second
4676 is a subset of the first, the result is false. Otherwise,
4677 the range starts just after the first range and ends at the
4678 end of the second. */
4680 in_p
= 1, low
= low1
, high
= high1
;
4681 else if (subset
|| highequal
)
4682 in_p
= 0, low
= high
= 0;
4685 low
= range_successor (high0
);
4690 /* high1 > high0 but high0 has no successor. Punt. */
4698 /* The case where we are excluding both ranges. Here the complex case
4699 is if they don't overlap. In that case, the only time we have a
4700 range is if they are adjacent. If the second is a subset of the
4701 first, the result is the first. Otherwise, the range to exclude
4702 starts at the beginning of the first range and ends at the end of the
4706 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4707 range_successor (high0
),
4709 in_p
= 0, low
= low0
, high
= high1
;
4712 /* Canonicalize - [min, x] into - [-, x]. */
4713 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4714 switch (TREE_CODE (TREE_TYPE (low0
)))
4717 if (TYPE_PRECISION (TREE_TYPE (low0
))
4718 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4722 if (tree_int_cst_equal (low0
,
4723 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4727 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4728 && integer_zerop (low0
))
4735 /* Canonicalize - [x, max] into - [x, -]. */
4736 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4737 switch (TREE_CODE (TREE_TYPE (high1
)))
4740 if (TYPE_PRECISION (TREE_TYPE (high1
))
4741 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4745 if (tree_int_cst_equal (high1
,
4746 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4750 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4751 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4753 build_int_cst (TREE_TYPE (high1
), 1),
4761 /* The ranges might be also adjacent between the maximum and
4762 minimum values of the given type. For
4763 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4764 return + [x + 1, y - 1]. */
4765 if (low0
== 0 && high1
== 0)
4767 low
= range_successor (high0
);
4768 high
= range_predecessor (low1
);
4769 if (low
== 0 || high
== 0)
4779 in_p
= 0, low
= low0
, high
= high0
;
4781 in_p
= 0, low
= low0
, high
= high1
;
4784 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4789 /* Subroutine of fold, looking inside expressions of the form
4790 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4791 of the COND_EXPR. This function is being used also to optimize
4792 A op B ? C : A, by reversing the comparison first.
4794 Return a folded expression whose code is not a COND_EXPR
4795 anymore, or NULL_TREE if no folding opportunity is found. */
4798 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4799 tree arg0
, tree arg1
, tree arg2
)
4801 enum tree_code comp_code
= TREE_CODE (arg0
);
4802 tree arg00
= TREE_OPERAND (arg0
, 0);
4803 tree arg01
= TREE_OPERAND (arg0
, 1);
4804 tree arg1_type
= TREE_TYPE (arg1
);
4810 /* If we have A op 0 ? A : -A, consider applying the following
4813 A == 0? A : -A same as -A
4814 A != 0? A : -A same as A
4815 A >= 0? A : -A same as abs (A)
4816 A > 0? A : -A same as abs (A)
4817 A <= 0? A : -A same as -abs (A)
4818 A < 0? A : -A same as -abs (A)
4820 None of these transformations work for modes with signed
4821 zeros. If A is +/-0, the first two transformations will
4822 change the sign of the result (from +0 to -0, or vice
4823 versa). The last four will fix the sign of the result,
4824 even though the original expressions could be positive or
4825 negative, depending on the sign of A.
4827 Note that all these transformations are correct if A is
4828 NaN, since the two alternatives (A and -A) are also NaNs. */
4829 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4830 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4831 ? real_zerop (arg01
)
4832 : integer_zerop (arg01
))
4833 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4834 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4835 /* In the case that A is of the form X-Y, '-A' (arg2) may
4836 have already been folded to Y-X, check for that. */
4837 || (TREE_CODE (arg1
) == MINUS_EXPR
4838 && TREE_CODE (arg2
) == MINUS_EXPR
4839 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4840 TREE_OPERAND (arg2
, 1), 0)
4841 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4842 TREE_OPERAND (arg2
, 0), 0))))
4847 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4848 return pedantic_non_lvalue_loc (loc
,
4849 fold_convert_loc (loc
, type
,
4850 negate_expr (tem
)));
4853 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4856 if (flag_trapping_math
)
4861 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4862 arg1
= fold_convert_loc (loc
, signed_type_for
4863 (TREE_TYPE (arg1
)), arg1
);
4864 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4865 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4868 if (flag_trapping_math
)
4872 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4873 arg1
= fold_convert_loc (loc
, signed_type_for
4874 (TREE_TYPE (arg1
)), arg1
);
4875 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4876 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4878 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4882 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4883 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4884 both transformations are correct when A is NaN: A != 0
4885 is then true, and A == 0 is false. */
4887 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4888 && integer_zerop (arg01
) && integer_zerop (arg2
))
4890 if (comp_code
== NE_EXPR
)
4891 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4892 else if (comp_code
== EQ_EXPR
)
4893 return build_zero_cst (type
);
4896 /* Try some transformations of A op B ? A : B.
4898 A == B? A : B same as B
4899 A != B? A : B same as A
4900 A >= B? A : B same as max (A, B)
4901 A > B? A : B same as max (B, A)
4902 A <= B? A : B same as min (A, B)
4903 A < B? A : B same as min (B, A)
4905 As above, these transformations don't work in the presence
4906 of signed zeros. For example, if A and B are zeros of
4907 opposite sign, the first two transformations will change
4908 the sign of the result. In the last four, the original
4909 expressions give different results for (A=+0, B=-0) and
4910 (A=-0, B=+0), but the transformed expressions do not.
4912 The first two transformations are correct if either A or B
4913 is a NaN. In the first transformation, the condition will
4914 be false, and B will indeed be chosen. In the case of the
4915 second transformation, the condition A != B will be true,
4916 and A will be chosen.
4918 The conversions to max() and min() are not correct if B is
4919 a number and A is not. The conditions in the original
4920 expressions will be false, so all four give B. The min()
4921 and max() versions would give a NaN instead. */
4922 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4923 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4924 /* Avoid these transformations if the COND_EXPR may be used
4925 as an lvalue in the C++ front-end. PR c++/19199. */
4927 || VECTOR_TYPE_P (type
)
4928 || (! lang_GNU_CXX ()
4929 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4930 || ! maybe_lvalue_p (arg1
)
4931 || ! maybe_lvalue_p (arg2
)))
4933 tree comp_op0
= arg00
;
4934 tree comp_op1
= arg01
;
4935 tree comp_type
= TREE_TYPE (comp_op0
);
4937 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4938 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4948 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4950 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4955 /* In C++ a ?: expression can be an lvalue, so put the
4956 operand which will be used if they are equal first
4957 so that we can convert this back to the
4958 corresponding COND_EXPR. */
4959 if (!HONOR_NANS (arg1
))
4961 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4962 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4963 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4964 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4965 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4966 comp_op1
, comp_op0
);
4967 return pedantic_non_lvalue_loc (loc
,
4968 fold_convert_loc (loc
, type
, tem
));
4975 if (!HONOR_NANS (arg1
))
4977 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4978 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4979 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4980 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4981 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4982 comp_op1
, comp_op0
);
4983 return pedantic_non_lvalue_loc (loc
,
4984 fold_convert_loc (loc
, type
, tem
));
4988 if (!HONOR_NANS (arg1
))
4989 return pedantic_non_lvalue_loc (loc
,
4990 fold_convert_loc (loc
, type
, arg2
));
4993 if (!HONOR_NANS (arg1
))
4994 return pedantic_non_lvalue_loc (loc
,
4995 fold_convert_loc (loc
, type
, arg1
));
4998 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5003 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5004 we might still be able to simplify this. For example,
5005 if C1 is one less or one more than C2, this might have started
5006 out as a MIN or MAX and been transformed by this function.
5007 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5009 if (INTEGRAL_TYPE_P (type
)
5010 && TREE_CODE (arg01
) == INTEGER_CST
5011 && TREE_CODE (arg2
) == INTEGER_CST
)
5015 if (TREE_CODE (arg1
) == INTEGER_CST
)
5017 /* We can replace A with C1 in this case. */
5018 arg1
= fold_convert_loc (loc
, type
, arg01
);
5019 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5022 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5023 MIN_EXPR, to preserve the signedness of the comparison. */
5024 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5026 && operand_equal_p (arg01
,
5027 const_binop (PLUS_EXPR
, arg2
,
5028 build_int_cst (type
, 1)),
5031 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5032 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5034 return pedantic_non_lvalue_loc (loc
,
5035 fold_convert_loc (loc
, type
, tem
));
5040 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5042 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5044 && operand_equal_p (arg01
,
5045 const_binop (MINUS_EXPR
, arg2
,
5046 build_int_cst (type
, 1)),
5049 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5050 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5052 return pedantic_non_lvalue_loc (loc
,
5053 fold_convert_loc (loc
, type
, tem
));
5058 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5059 MAX_EXPR, to preserve the signedness of the comparison. */
5060 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5062 && operand_equal_p (arg01
,
5063 const_binop (MINUS_EXPR
, arg2
,
5064 build_int_cst (type
, 1)),
5067 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5068 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5070 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5075 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5076 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5078 && operand_equal_p (arg01
,
5079 const_binop (PLUS_EXPR
, arg2
,
5080 build_int_cst (type
, 1)),
5083 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5084 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5086 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5100 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5101 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5102 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5106 /* EXP is some logical combination of boolean tests. See if we can
5107 merge it into some range test. Return the new tree if so. */
5110 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5113 int or_op
= (code
== TRUTH_ORIF_EXPR
5114 || code
== TRUTH_OR_EXPR
);
5115 int in0_p
, in1_p
, in_p
;
5116 tree low0
, low1
, low
, high0
, high1
, high
;
5117 bool strict_overflow_p
= false;
5119 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5120 "when simplifying range test");
5122 if (!INTEGRAL_TYPE_P (type
))
5125 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5126 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5128 /* If this is an OR operation, invert both sides; we will invert
5129 again at the end. */
5131 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5133 /* If both expressions are the same, if we can merge the ranges, and we
5134 can build the range test, return it or it inverted. If one of the
5135 ranges is always true or always false, consider it to be the same
5136 expression as the other. */
5137 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5138 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5140 && 0 != (tem
= (build_range_check (loc
, type
,
5142 : rhs
!= 0 ? rhs
: integer_zero_node
,
5145 if (strict_overflow_p
)
5146 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5147 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5150 /* On machines where the branch cost is expensive, if this is a
5151 short-circuited branch and the underlying object on both sides
5152 is the same, make a non-short-circuit operation. */
5153 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5154 && lhs
!= 0 && rhs
!= 0
5155 && (code
== TRUTH_ANDIF_EXPR
5156 || code
== TRUTH_ORIF_EXPR
)
5157 && operand_equal_p (lhs
, rhs
, 0))
5159 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5160 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5161 which cases we can't do this. */
5162 if (simple_operand_p (lhs
))
5163 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5164 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5167 else if (!lang_hooks
.decls
.global_bindings_p ()
5168 && !CONTAINS_PLACEHOLDER_P (lhs
))
5170 tree common
= save_expr (lhs
);
5172 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5173 or_op
? ! in0_p
: in0_p
,
5175 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5176 or_op
? ! in1_p
: in1_p
,
5179 if (strict_overflow_p
)
5180 fold_overflow_warning (warnmsg
,
5181 WARN_STRICT_OVERFLOW_COMPARISON
);
5182 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5183 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5192 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5193 bit value. Arrange things so the extra bits will be set to zero if and
5194 only if C is signed-extended to its full width. If MASK is nonzero,
5195 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5198 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5200 tree type
= TREE_TYPE (c
);
5201 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5204 if (p
== modesize
|| unsignedp
)
5207 /* We work by getting just the sign bit into the low-order bit, then
5208 into the high-order bit, then sign-extend. We then XOR that value
5210 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5212 /* We must use a signed type in order to get an arithmetic right shift.
5213 However, we must also avoid introducing accidental overflows, so that
5214 a subsequent call to integer_zerop will work. Hence we must
5215 do the type conversion here. At this point, the constant is either
5216 zero or one, and the conversion to a signed type can never overflow.
5217 We could get an overflow if this conversion is done anywhere else. */
5218 if (TYPE_UNSIGNED (type
))
5219 temp
= fold_convert (signed_type_for (type
), temp
);
5221 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5222 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5224 temp
= const_binop (BIT_AND_EXPR
, temp
,
5225 fold_convert (TREE_TYPE (c
), mask
));
5226 /* If necessary, convert the type back to match the type of C. */
5227 if (TYPE_UNSIGNED (type
))
5228 temp
= fold_convert (type
, temp
);
5230 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5233 /* For an expression that has the form
5237 we can drop one of the inner expressions and simplify to
5241 LOC is the location of the resulting expression. OP is the inner
5242 logical operation; the left-hand side in the examples above, while CMPOP
5243 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5244 removing a condition that guards another, as in
5245 (A != NULL && A->...) || A == NULL
5246 which we must not transform. If RHS_ONLY is true, only eliminate the
5247 right-most operand of the inner logical operation. */
5250 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5253 tree type
= TREE_TYPE (cmpop
);
5254 enum tree_code code
= TREE_CODE (cmpop
);
5255 enum tree_code truthop_code
= TREE_CODE (op
);
5256 tree lhs
= TREE_OPERAND (op
, 0);
5257 tree rhs
= TREE_OPERAND (op
, 1);
5258 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5259 enum tree_code rhs_code
= TREE_CODE (rhs
);
5260 enum tree_code lhs_code
= TREE_CODE (lhs
);
5261 enum tree_code inv_code
;
5263 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5266 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5269 if (rhs_code
== truthop_code
)
5271 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5272 if (newrhs
!= NULL_TREE
)
5275 rhs_code
= TREE_CODE (rhs
);
5278 if (lhs_code
== truthop_code
&& !rhs_only
)
5280 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5281 if (newlhs
!= NULL_TREE
)
5284 lhs_code
= TREE_CODE (lhs
);
5288 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5289 if (inv_code
== rhs_code
5290 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5291 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5293 if (!rhs_only
&& inv_code
== lhs_code
5294 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5295 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5297 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5298 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5303 /* Find ways of folding logical expressions of LHS and RHS:
5304 Try to merge two comparisons to the same innermost item.
5305 Look for range tests like "ch >= '0' && ch <= '9'".
5306 Look for combinations of simple terms on machines with expensive branches
5307 and evaluate the RHS unconditionally.
5309 For example, if we have p->a == 2 && p->b == 4 and we can make an
5310 object large enough to span both A and B, we can do this with a comparison
5311 against the object ANDed with the a mask.
5313 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5314 operations to do this with one comparison.
5316 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5317 function and the one above.
5319 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5320 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5322 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5325 We return the simplified tree or 0 if no optimization is possible. */
5328 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5331 /* If this is the "or" of two comparisons, we can do something if
5332 the comparisons are NE_EXPR. If this is the "and", we can do something
5333 if the comparisons are EQ_EXPR. I.e.,
5334 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5336 WANTED_CODE is this operation code. For single bit fields, we can
5337 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5338 comparison for one-bit fields. */
5340 enum tree_code wanted_code
;
5341 enum tree_code lcode
, rcode
;
5342 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5343 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5344 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5345 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5346 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5347 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5348 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5349 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5350 machine_mode lnmode
, rnmode
;
5351 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5352 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5353 tree l_const
, r_const
;
5354 tree lntype
, rntype
, result
;
5355 HOST_WIDE_INT first_bit
, end_bit
;
5358 /* Start by getting the comparison codes. Fail if anything is volatile.
5359 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5360 it were surrounded with a NE_EXPR. */
5362 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5365 lcode
= TREE_CODE (lhs
);
5366 rcode
= TREE_CODE (rhs
);
5368 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5370 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5371 build_int_cst (TREE_TYPE (lhs
), 0));
5375 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5377 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5378 build_int_cst (TREE_TYPE (rhs
), 0));
5382 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5383 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5386 ll_arg
= TREE_OPERAND (lhs
, 0);
5387 lr_arg
= TREE_OPERAND (lhs
, 1);
5388 rl_arg
= TREE_OPERAND (rhs
, 0);
5389 rr_arg
= TREE_OPERAND (rhs
, 1);
5391 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5392 if (simple_operand_p (ll_arg
)
5393 && simple_operand_p (lr_arg
))
5395 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5396 && operand_equal_p (lr_arg
, rr_arg
, 0))
5398 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5399 truth_type
, ll_arg
, lr_arg
);
5403 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5404 && operand_equal_p (lr_arg
, rl_arg
, 0))
5406 result
= combine_comparisons (loc
, code
, lcode
,
5407 swap_tree_comparison (rcode
),
5408 truth_type
, ll_arg
, lr_arg
);
5414 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5415 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5417 /* If the RHS can be evaluated unconditionally and its operands are
5418 simple, it wins to evaluate the RHS unconditionally on machines
5419 with expensive branches. In this case, this isn't a comparison
5420 that can be merged. */
5422 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5424 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5425 && simple_operand_p (rl_arg
)
5426 && simple_operand_p (rr_arg
))
5428 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5429 if (code
== TRUTH_OR_EXPR
5430 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5431 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5432 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5433 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5434 return build2_loc (loc
, NE_EXPR
, truth_type
,
5435 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5437 build_int_cst (TREE_TYPE (ll_arg
), 0));
5439 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5440 if (code
== TRUTH_AND_EXPR
5441 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5442 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5443 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5444 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5445 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5446 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5448 build_int_cst (TREE_TYPE (ll_arg
), 0));
5451 /* See if the comparisons can be merged. Then get all the parameters for
5454 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5455 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5459 ll_inner
= decode_field_reference (loc
, ll_arg
,
5460 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5461 &ll_unsignedp
, &volatilep
, &ll_mask
,
5463 lr_inner
= decode_field_reference (loc
, lr_arg
,
5464 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5465 &lr_unsignedp
, &volatilep
, &lr_mask
,
5467 rl_inner
= decode_field_reference (loc
, rl_arg
,
5468 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5469 &rl_unsignedp
, &volatilep
, &rl_mask
,
5471 rr_inner
= decode_field_reference (loc
, rr_arg
,
5472 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5473 &rr_unsignedp
, &volatilep
, &rr_mask
,
5476 /* It must be true that the inner operation on the lhs of each
5477 comparison must be the same if we are to be able to do anything.
5478 Then see if we have constants. If not, the same must be true for
5480 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5481 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5484 if (TREE_CODE (lr_arg
) == INTEGER_CST
5485 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5486 l_const
= lr_arg
, r_const
= rr_arg
;
5487 else if (lr_inner
== 0 || rr_inner
== 0
5488 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5491 l_const
= r_const
= 0;
5493 /* If either comparison code is not correct for our logical operation,
5494 fail. However, we can convert a one-bit comparison against zero into
5495 the opposite comparison against that bit being set in the field. */
5497 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5498 if (lcode
!= wanted_code
)
5500 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5502 /* Make the left operand unsigned, since we are only interested
5503 in the value of one bit. Otherwise we are doing the wrong
5512 /* This is analogous to the code for l_const above. */
5513 if (rcode
!= wanted_code
)
5515 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5524 /* See if we can find a mode that contains both fields being compared on
5525 the left. If we can't, fail. Otherwise, update all constants and masks
5526 to be relative to a field of that size. */
5527 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5528 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5529 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5530 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5532 if (lnmode
== VOIDmode
)
5535 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5536 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5537 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5538 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5540 if (BYTES_BIG_ENDIAN
)
5542 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5543 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5546 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5547 size_int (xll_bitpos
));
5548 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5549 size_int (xrl_bitpos
));
5553 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5554 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5555 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5556 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5557 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5560 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5562 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5567 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5568 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5569 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5570 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5571 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5574 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5576 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5580 /* If the right sides are not constant, do the same for it. Also,
5581 disallow this optimization if a size or signedness mismatch occurs
5582 between the left and right sides. */
5585 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5586 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5587 /* Make sure the two fields on the right
5588 correspond to the left without being swapped. */
5589 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5592 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5593 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5594 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5595 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5597 if (rnmode
== VOIDmode
)
5600 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5601 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5602 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5603 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5605 if (BYTES_BIG_ENDIAN
)
5607 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5608 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5611 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5613 size_int (xlr_bitpos
));
5614 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5616 size_int (xrr_bitpos
));
5618 /* Make a mask that corresponds to both fields being compared.
5619 Do this for both items being compared. If the operands are the
5620 same size and the bits being compared are in the same position
5621 then we can do this by masking both and comparing the masked
5623 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5624 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5625 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5627 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5628 ll_unsignedp
|| rl_unsignedp
);
5629 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5630 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5632 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5633 lr_unsignedp
|| rr_unsignedp
);
5634 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5635 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5637 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5640 /* There is still another way we can do something: If both pairs of
5641 fields being compared are adjacent, we may be able to make a wider
5642 field containing them both.
5644 Note that we still must mask the lhs/rhs expressions. Furthermore,
5645 the mask must be shifted to account for the shift done by
5646 make_bit_field_ref. */
5647 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5648 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5649 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5650 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5654 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5655 ll_bitsize
+ rl_bitsize
,
5656 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5657 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5658 lr_bitsize
+ rr_bitsize
,
5659 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5661 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5662 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5663 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5664 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5666 /* Convert to the smaller type before masking out unwanted bits. */
5668 if (lntype
!= rntype
)
5670 if (lnbitsize
> rnbitsize
)
5672 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5673 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5676 else if (lnbitsize
< rnbitsize
)
5678 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5679 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5684 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5685 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5687 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5688 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5690 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5696 /* Handle the case of comparisons with constants. If there is something in
5697 common between the masks, those bits of the constants must be the same.
5698 If not, the condition is always false. Test for this to avoid generating
5699 incorrect code below. */
5700 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5701 if (! integer_zerop (result
)
5702 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5703 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5705 if (wanted_code
== NE_EXPR
)
5707 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5708 return constant_boolean_node (true, truth_type
);
5712 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5713 return constant_boolean_node (false, truth_type
);
5717 /* Construct the expression we will return. First get the component
5718 reference we will make. Unless the mask is all ones the width of
5719 that field, perform the mask operation. Then compare with the
5721 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5722 ll_unsignedp
|| rl_unsignedp
);
5724 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5725 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5726 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5728 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5729 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5732 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5736 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5740 enum tree_code op_code
;
5743 int consts_equal
, consts_lt
;
5746 STRIP_SIGN_NOPS (arg0
);
5748 op_code
= TREE_CODE (arg0
);
5749 minmax_const
= TREE_OPERAND (arg0
, 1);
5750 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5751 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5752 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5753 inner
= TREE_OPERAND (arg0
, 0);
5755 /* If something does not permit us to optimize, return the original tree. */
5756 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5757 || TREE_CODE (comp_const
) != INTEGER_CST
5758 || TREE_OVERFLOW (comp_const
)
5759 || TREE_CODE (minmax_const
) != INTEGER_CST
5760 || TREE_OVERFLOW (minmax_const
))
5763 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5764 and GT_EXPR, doing the rest with recursive calls using logical
5768 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5771 = optimize_minmax_comparison (loc
,
5772 invert_tree_comparison (code
, false),
5775 return invert_truthvalue_loc (loc
, tem
);
5781 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5782 optimize_minmax_comparison
5783 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5784 optimize_minmax_comparison
5785 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5788 if (op_code
== MAX_EXPR
&& consts_equal
)
5789 /* MAX (X, 0) == 0 -> X <= 0 */
5790 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5792 else if (op_code
== MAX_EXPR
&& consts_lt
)
5793 /* MAX (X, 0) == 5 -> X == 5 */
5794 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5796 else if (op_code
== MAX_EXPR
)
5797 /* MAX (X, 0) == -1 -> false */
5798 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5800 else if (consts_equal
)
5801 /* MIN (X, 0) == 0 -> X >= 0 */
5802 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5805 /* MIN (X, 0) == 5 -> false */
5806 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5809 /* MIN (X, 0) == -1 -> X == -1 */
5810 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5813 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5814 /* MAX (X, 0) > 0 -> X > 0
5815 MAX (X, 0) > 5 -> X > 5 */
5816 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5818 else if (op_code
== MAX_EXPR
)
5819 /* MAX (X, 0) > -1 -> true */
5820 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5822 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5823 /* MIN (X, 0) > 0 -> false
5824 MIN (X, 0) > 5 -> false */
5825 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5828 /* MIN (X, 0) > -1 -> X > -1 */
5829 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5836 /* T is an integer expression that is being multiplied, divided, or taken a
5837 modulus (CODE says which and what kind of divide or modulus) by a
5838 constant C. See if we can eliminate that operation by folding it with
5839 other operations already in T. WIDE_TYPE, if non-null, is a type that
5840 should be used for the computation if wider than our type.
5842 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5843 (X * 2) + (Y * 4). We must, however, be assured that either the original
5844 expression would not overflow or that overflow is undefined for the type
5845 in the language in question.
5847 If we return a non-null expression, it is an equivalent form of the
5848 original computation, but need not be in the original type.
5850 We set *STRICT_OVERFLOW_P to true if the return values depends on
5851 signed overflow being undefined. Otherwise we do not change
5852 *STRICT_OVERFLOW_P. */
5855 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5856 bool *strict_overflow_p
)
5858 /* To avoid exponential search depth, refuse to allow recursion past
5859 three levels. Beyond that (1) it's highly unlikely that we'll find
5860 something interesting and (2) we've probably processed it before
5861 when we built the inner expression. */
5870 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5877 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5878 bool *strict_overflow_p
)
5880 tree type
= TREE_TYPE (t
);
5881 enum tree_code tcode
= TREE_CODE (t
);
5882 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5883 > GET_MODE_SIZE (TYPE_MODE (type
)))
5884 ? wide_type
: type
);
5886 int same_p
= tcode
== code
;
5887 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5888 bool sub_strict_overflow_p
;
5890 /* Don't deal with constants of zero here; they confuse the code below. */
5891 if (integer_zerop (c
))
5894 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5895 op0
= TREE_OPERAND (t
, 0);
5897 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5898 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5900 /* Note that we need not handle conditional operations here since fold
5901 already handles those cases. So just do arithmetic here. */
5905 /* For a constant, we can always simplify if we are a multiply
5906 or (for divide and modulus) if it is a multiple of our constant. */
5907 if (code
== MULT_EXPR
5908 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5909 return const_binop (code
, fold_convert (ctype
, t
),
5910 fold_convert (ctype
, c
));
5913 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5914 /* If op0 is an expression ... */
5915 if ((COMPARISON_CLASS_P (op0
)
5916 || UNARY_CLASS_P (op0
)
5917 || BINARY_CLASS_P (op0
)
5918 || VL_EXP_CLASS_P (op0
)
5919 || EXPRESSION_CLASS_P (op0
))
5920 /* ... and has wrapping overflow, and its type is smaller
5921 than ctype, then we cannot pass through as widening. */
5922 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5923 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
5924 && (TYPE_PRECISION (ctype
)
5925 > TYPE_PRECISION (TREE_TYPE (op0
))))
5926 /* ... or this is a truncation (t is narrower than op0),
5927 then we cannot pass through this narrowing. */
5928 || (TYPE_PRECISION (type
)
5929 < TYPE_PRECISION (TREE_TYPE (op0
)))
5930 /* ... or signedness changes for division or modulus,
5931 then we cannot pass through this conversion. */
5932 || (code
!= MULT_EXPR
5933 && (TYPE_UNSIGNED (ctype
)
5934 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5935 /* ... or has undefined overflow while the converted to
5936 type has not, we cannot do the operation in the inner type
5937 as that would introduce undefined overflow. */
5938 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5939 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
5940 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5943 /* Pass the constant down and see if we can make a simplification. If
5944 we can, replace this expression with the inner simplification for
5945 possible later conversion to our or some other type. */
5946 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5947 && TREE_CODE (t2
) == INTEGER_CST
5948 && !TREE_OVERFLOW (t2
)
5949 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5951 ? ctype
: NULL_TREE
,
5952 strict_overflow_p
))))
5957 /* If widening the type changes it from signed to unsigned, then we
5958 must avoid building ABS_EXPR itself as unsigned. */
5959 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5961 tree cstype
= (*signed_type_for
) (ctype
);
5962 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5965 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5966 return fold_convert (ctype
, t1
);
5970 /* If the constant is negative, we cannot simplify this. */
5971 if (tree_int_cst_sgn (c
) == -1)
5975 /* For division and modulus, type can't be unsigned, as e.g.
5976 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5977 For signed types, even with wrapping overflow, this is fine. */
5978 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5980 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5982 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5985 case MIN_EXPR
: case MAX_EXPR
:
5986 /* If widening the type changes the signedness, then we can't perform
5987 this optimization as that changes the result. */
5988 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5991 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5992 sub_strict_overflow_p
= false;
5993 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5994 &sub_strict_overflow_p
)) != 0
5995 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5996 &sub_strict_overflow_p
)) != 0)
5998 if (tree_int_cst_sgn (c
) < 0)
5999 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6000 if (sub_strict_overflow_p
)
6001 *strict_overflow_p
= true;
6002 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6003 fold_convert (ctype
, t2
));
6007 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6008 /* If the second operand is constant, this is a multiplication
6009 or floor division, by a power of two, so we can treat it that
6010 way unless the multiplier or divisor overflows. Signed
6011 left-shift overflow is implementation-defined rather than
6012 undefined in C90, so do not convert signed left shift into
6014 if (TREE_CODE (op1
) == INTEGER_CST
6015 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6016 /* const_binop may not detect overflow correctly,
6017 so check for it explicitly here. */
6018 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6019 && 0 != (t1
= fold_convert (ctype
,
6020 const_binop (LSHIFT_EXPR
,
6023 && !TREE_OVERFLOW (t1
))
6024 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6025 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6027 fold_convert (ctype
, op0
),
6029 c
, code
, wide_type
, strict_overflow_p
);
6032 case PLUS_EXPR
: case MINUS_EXPR
:
6033 /* See if we can eliminate the operation on both sides. If we can, we
6034 can return a new PLUS or MINUS. If we can't, the only remaining
6035 cases where we can do anything are if the second operand is a
6037 sub_strict_overflow_p
= false;
6038 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6039 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6040 if (t1
!= 0 && t2
!= 0
6041 && (code
== MULT_EXPR
6042 /* If not multiplication, we can only do this if both operands
6043 are divisible by c. */
6044 || (multiple_of_p (ctype
, op0
, c
)
6045 && multiple_of_p (ctype
, op1
, c
))))
6047 if (sub_strict_overflow_p
)
6048 *strict_overflow_p
= true;
6049 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6050 fold_convert (ctype
, t2
));
6053 /* If this was a subtraction, negate OP1 and set it to be an addition.
6054 This simplifies the logic below. */
6055 if (tcode
== MINUS_EXPR
)
6057 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6058 /* If OP1 was not easily negatable, the constant may be OP0. */
6059 if (TREE_CODE (op0
) == INTEGER_CST
)
6061 std::swap (op0
, op1
);
6066 if (TREE_CODE (op1
) != INTEGER_CST
)
6069 /* If either OP1 or C are negative, this optimization is not safe for
6070 some of the division and remainder types while for others we need
6071 to change the code. */
6072 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6074 if (code
== CEIL_DIV_EXPR
)
6075 code
= FLOOR_DIV_EXPR
;
6076 else if (code
== FLOOR_DIV_EXPR
)
6077 code
= CEIL_DIV_EXPR
;
6078 else if (code
!= MULT_EXPR
6079 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6083 /* If it's a multiply or a division/modulus operation of a multiple
6084 of our constant, do the operation and verify it doesn't overflow. */
6085 if (code
== MULT_EXPR
6086 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6088 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6089 fold_convert (ctype
, c
));
6090 /* We allow the constant to overflow with wrapping semantics. */
6092 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6098 /* If we have an unsigned type, we cannot widen the operation since it
6099 will change the result if the original computation overflowed. */
6100 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6103 /* If we were able to eliminate our operation from the first side,
6104 apply our operation to the second side and reform the PLUS. */
6105 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6106 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6108 /* The last case is if we are a multiply. In that case, we can
6109 apply the distributive law to commute the multiply and addition
6110 if the multiplication of the constants doesn't overflow
6111 and overflow is defined. With undefined overflow
6112 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6113 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6114 return fold_build2 (tcode
, ctype
,
6115 fold_build2 (code
, ctype
,
6116 fold_convert (ctype
, op0
),
6117 fold_convert (ctype
, c
)),
6123 /* We have a special case here if we are doing something like
6124 (C * 8) % 4 since we know that's zero. */
6125 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6126 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6127 /* If the multiplication can overflow we cannot optimize this. */
6128 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6129 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6130 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6132 *strict_overflow_p
= true;
6133 return omit_one_operand (type
, integer_zero_node
, op0
);
6136 /* ... fall through ... */
6138 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6139 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6140 /* If we can extract our operation from the LHS, do so and return a
6141 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6142 do something only if the second operand is a constant. */
6144 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6145 strict_overflow_p
)) != 0)
6146 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6147 fold_convert (ctype
, op1
));
6148 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6149 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6150 strict_overflow_p
)) != 0)
6151 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6152 fold_convert (ctype
, t1
));
6153 else if (TREE_CODE (op1
) != INTEGER_CST
)
6156 /* If these are the same operation types, we can associate them
6157 assuming no overflow. */
6160 bool overflow_p
= false;
6161 bool overflow_mul_p
;
6162 signop sign
= TYPE_SIGN (ctype
);
6163 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6164 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6166 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6169 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6170 wide_int_to_tree (ctype
, mul
));
6173 /* If these operations "cancel" each other, we have the main
6174 optimizations of this pass, which occur when either constant is a
6175 multiple of the other, in which case we replace this with either an
6176 operation or CODE or TCODE.
6178 If we have an unsigned type, we cannot do this since it will change
6179 the result if the original computation overflowed. */
6180 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6181 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6182 || (tcode
== MULT_EXPR
6183 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6184 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6185 && code
!= MULT_EXPR
)))
6187 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6189 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6190 *strict_overflow_p
= true;
6191 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6192 fold_convert (ctype
,
6193 const_binop (TRUNC_DIV_EXPR
,
6196 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6198 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6199 *strict_overflow_p
= true;
6200 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6201 fold_convert (ctype
,
6202 const_binop (TRUNC_DIV_EXPR
,
6215 /* Return a node which has the indicated constant VALUE (either 0 or
6216 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6217 and is of the indicated TYPE. */
6220 constant_boolean_node (bool value
, tree type
)
6222 if (type
== integer_type_node
)
6223 return value
? integer_one_node
: integer_zero_node
;
6224 else if (type
== boolean_type_node
)
6225 return value
? boolean_true_node
: boolean_false_node
;
6226 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6227 return build_vector_from_val (type
,
6228 build_int_cst (TREE_TYPE (type
),
6231 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6235 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6236 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6237 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6238 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6239 COND is the first argument to CODE; otherwise (as in the example
6240 given here), it is the second argument. TYPE is the type of the
6241 original expression. Return NULL_TREE if no simplification is
6245 fold_binary_op_with_conditional_arg (location_t loc
,
6246 enum tree_code code
,
6247 tree type
, tree op0
, tree op1
,
6248 tree cond
, tree arg
, int cond_first_p
)
6250 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6251 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6252 tree test
, true_value
, false_value
;
6253 tree lhs
= NULL_TREE
;
6254 tree rhs
= NULL_TREE
;
6255 enum tree_code cond_code
= COND_EXPR
;
6257 if (TREE_CODE (cond
) == COND_EXPR
6258 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6260 test
= TREE_OPERAND (cond
, 0);
6261 true_value
= TREE_OPERAND (cond
, 1);
6262 false_value
= TREE_OPERAND (cond
, 2);
6263 /* If this operand throws an expression, then it does not make
6264 sense to try to perform a logical or arithmetic operation
6266 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6268 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6273 tree testtype
= TREE_TYPE (cond
);
6275 true_value
= constant_boolean_node (true, testtype
);
6276 false_value
= constant_boolean_node (false, testtype
);
6279 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6280 cond_code
= VEC_COND_EXPR
;
6282 /* This transformation is only worthwhile if we don't have to wrap ARG
6283 in a SAVE_EXPR and the operation can be simplified without recursing
6284 on at least one of the branches once its pushed inside the COND_EXPR. */
6285 if (!TREE_CONSTANT (arg
)
6286 && (TREE_SIDE_EFFECTS (arg
)
6287 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6288 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6291 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6294 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6296 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6298 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6302 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6304 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6306 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6309 /* Check that we have simplified at least one of the branches. */
6310 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6313 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6317 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6319 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6320 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6321 ADDEND is the same as X.
6323 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6324 and finite. The problematic cases are when X is zero, and its mode
6325 has signed zeros. In the case of rounding towards -infinity,
6326 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6327 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6330 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6332 if (!real_zerop (addend
))
6335 /* Don't allow the fold with -fsignaling-nans. */
6336 if (HONOR_SNANS (element_mode (type
)))
6339 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6340 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6343 /* In a vector or complex, we would need to check the sign of all zeros. */
6344 if (TREE_CODE (addend
) != REAL_CST
)
6347 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6348 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6351 /* The mode has signed zeros, and we have to honor their sign.
6352 In this situation, there is only one case we can return true for.
6353 X - 0 is the same as X unless rounding towards -infinity is
6355 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6358 /* Subroutine of fold() that optimizes comparisons of a division by
6359 a nonzero integer constant against an integer constant, i.e.
6362 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6363 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6364 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6366 The function returns the constant folded tree if a simplification
6367 can be made, and NULL_TREE otherwise. */
6370 fold_div_compare (location_t loc
,
6371 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6373 tree prod
, tmp
, hi
, lo
;
6374 tree arg00
= TREE_OPERAND (arg0
, 0);
6375 tree arg01
= TREE_OPERAND (arg0
, 1);
6376 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6377 bool neg_overflow
= false;
6380 /* We have to do this the hard way to detect unsigned overflow.
6381 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6382 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6383 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6384 neg_overflow
= false;
6386 if (sign
== UNSIGNED
)
6388 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6389 build_int_cst (TREE_TYPE (arg01
), 1));
6392 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6393 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6394 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6395 -1, overflow
| TREE_OVERFLOW (prod
));
6397 else if (tree_int_cst_sgn (arg01
) >= 0)
6399 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6400 build_int_cst (TREE_TYPE (arg01
), 1));
6401 switch (tree_int_cst_sgn (arg1
))
6404 neg_overflow
= true;
6405 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6410 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6415 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6425 /* A negative divisor reverses the relational operators. */
6426 code
= swap_tree_comparison (code
);
6428 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6429 build_int_cst (TREE_TYPE (arg01
), 1));
6430 switch (tree_int_cst_sgn (arg1
))
6433 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6438 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6443 neg_overflow
= true;
6444 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6456 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6457 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6458 if (TREE_OVERFLOW (hi
))
6459 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6460 if (TREE_OVERFLOW (lo
))
6461 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6462 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6465 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6466 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6467 if (TREE_OVERFLOW (hi
))
6468 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6469 if (TREE_OVERFLOW (lo
))
6470 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6471 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6474 if (TREE_OVERFLOW (lo
))
6476 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6477 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6479 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6482 if (TREE_OVERFLOW (hi
))
6484 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6485 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6487 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6490 if (TREE_OVERFLOW (hi
))
6492 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6493 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6495 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6498 if (TREE_OVERFLOW (lo
))
6500 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6501 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6503 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6513 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6514 equality/inequality test, then return a simplified form of the test
6515 using a sign testing. Otherwise return NULL. TYPE is the desired
6519 fold_single_bit_test_into_sign_test (location_t loc
,
6520 enum tree_code code
, tree arg0
, tree arg1
,
6523 /* If this is testing a single bit, we can optimize the test. */
6524 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6525 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6526 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6528 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6529 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6530 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6532 if (arg00
!= NULL_TREE
6533 /* This is only a win if casting to a signed type is cheap,
6534 i.e. when arg00's type is not a partial mode. */
6535 && TYPE_PRECISION (TREE_TYPE (arg00
))
6536 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6538 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6539 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6541 fold_convert_loc (loc
, stype
, arg00
),
6542 build_int_cst (stype
, 0));
6549 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6550 equality/inequality test, then return a simplified form of
6551 the test using shifts and logical operations. Otherwise return
6552 NULL. TYPE is the desired result type. */
6555 fold_single_bit_test (location_t loc
, enum tree_code code
,
6556 tree arg0
, tree arg1
, tree result_type
)
6558 /* If this is testing a single bit, we can optimize the test. */
6559 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6560 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6561 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6563 tree inner
= TREE_OPERAND (arg0
, 0);
6564 tree type
= TREE_TYPE (arg0
);
6565 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6566 machine_mode operand_mode
= TYPE_MODE (type
);
6568 tree signed_type
, unsigned_type
, intermediate_type
;
6571 /* First, see if we can fold the single bit test into a sign-bit
6573 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6578 /* Otherwise we have (A & C) != 0 where C is a single bit,
6579 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6580 Similarly for (A & C) == 0. */
6582 /* If INNER is a right shift of a constant and it plus BITNUM does
6583 not overflow, adjust BITNUM and INNER. */
6584 if (TREE_CODE (inner
) == RSHIFT_EXPR
6585 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6586 && bitnum
< TYPE_PRECISION (type
)
6587 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6588 TYPE_PRECISION (type
) - bitnum
))
6590 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6591 inner
= TREE_OPERAND (inner
, 0);
6594 /* If we are going to be able to omit the AND below, we must do our
6595 operations as unsigned. If we must use the AND, we have a choice.
6596 Normally unsigned is faster, but for some machines signed is. */
6597 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6598 && !flag_syntax_only
) ? 0 : 1;
6600 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6601 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6602 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6603 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6606 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6607 inner
, size_int (bitnum
));
6609 one
= build_int_cst (intermediate_type
, 1);
6611 if (code
== EQ_EXPR
)
6612 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6614 /* Put the AND last so it can combine with more things. */
6615 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6617 /* Make sure to return the proper type. */
6618 inner
= fold_convert_loc (loc
, result_type
, inner
);
6625 /* Check whether we are allowed to reorder operands arg0 and arg1,
6626 such that the evaluation of arg1 occurs before arg0. */
6629 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6631 if (! flag_evaluation_order
)
6633 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6635 return ! TREE_SIDE_EFFECTS (arg0
)
6636 && ! TREE_SIDE_EFFECTS (arg1
);
6639 /* Test whether it is preferable two swap two operands, ARG0 and
6640 ARG1, for example because ARG0 is an integer constant and ARG1
6641 isn't. If REORDER is true, only recommend swapping if we can
6642 evaluate the operands in reverse order. */
6645 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6647 if (CONSTANT_CLASS_P (arg1
))
6649 if (CONSTANT_CLASS_P (arg0
))
6655 if (TREE_CONSTANT (arg1
))
6657 if (TREE_CONSTANT (arg0
))
6660 if (reorder
&& flag_evaluation_order
6661 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6664 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6665 for commutative and comparison operators. Ensuring a canonical
6666 form allows the optimizers to find additional redundancies without
6667 having to explicitly check for both orderings. */
6668 if (TREE_CODE (arg0
) == SSA_NAME
6669 && TREE_CODE (arg1
) == SSA_NAME
6670 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6673 /* Put SSA_NAMEs last. */
6674 if (TREE_CODE (arg1
) == SSA_NAME
)
6676 if (TREE_CODE (arg0
) == SSA_NAME
)
6679 /* Put variables last. */
6689 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6690 means A >= Y && A != MAX, but in this case we know that
6691 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6694 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6696 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6698 if (TREE_CODE (bound
) == LT_EXPR
)
6699 a
= TREE_OPERAND (bound
, 0);
6700 else if (TREE_CODE (bound
) == GT_EXPR
)
6701 a
= TREE_OPERAND (bound
, 1);
6705 typea
= TREE_TYPE (a
);
6706 if (!INTEGRAL_TYPE_P (typea
)
6707 && !POINTER_TYPE_P (typea
))
6710 if (TREE_CODE (ineq
) == LT_EXPR
)
6712 a1
= TREE_OPERAND (ineq
, 1);
6713 y
= TREE_OPERAND (ineq
, 0);
6715 else if (TREE_CODE (ineq
) == GT_EXPR
)
6717 a1
= TREE_OPERAND (ineq
, 0);
6718 y
= TREE_OPERAND (ineq
, 1);
6723 if (TREE_TYPE (a1
) != typea
)
6726 if (POINTER_TYPE_P (typea
))
6728 /* Convert the pointer types into integer before taking the difference. */
6729 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6730 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6731 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6734 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6736 if (!diff
|| !integer_onep (diff
))
6739 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6742 /* Fold a sum or difference of at least one multiplication.
6743 Returns the folded tree or NULL if no simplification could be made. */
6746 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6747 tree arg0
, tree arg1
)
6749 tree arg00
, arg01
, arg10
, arg11
;
6750 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6752 /* (A * C) +- (B * C) -> (A+-B) * C.
6753 (A * C) +- A -> A * (C+-1).
6754 We are most concerned about the case where C is a constant,
6755 but other combinations show up during loop reduction. Since
6756 it is not difficult, try all four possibilities. */
6758 if (TREE_CODE (arg0
) == MULT_EXPR
)
6760 arg00
= TREE_OPERAND (arg0
, 0);
6761 arg01
= TREE_OPERAND (arg0
, 1);
6763 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6765 arg00
= build_one_cst (type
);
6770 /* We cannot generate constant 1 for fract. */
6771 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6774 arg01
= build_one_cst (type
);
6776 if (TREE_CODE (arg1
) == MULT_EXPR
)
6778 arg10
= TREE_OPERAND (arg1
, 0);
6779 arg11
= TREE_OPERAND (arg1
, 1);
6781 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6783 arg10
= build_one_cst (type
);
6784 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6785 the purpose of this canonicalization. */
6786 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6787 && negate_expr_p (arg1
)
6788 && code
== PLUS_EXPR
)
6790 arg11
= negate_expr (arg1
);
6798 /* We cannot generate constant 1 for fract. */
6799 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6802 arg11
= build_one_cst (type
);
6806 if (operand_equal_p (arg01
, arg11
, 0))
6807 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6808 else if (operand_equal_p (arg00
, arg10
, 0))
6809 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6810 else if (operand_equal_p (arg00
, arg11
, 0))
6811 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6812 else if (operand_equal_p (arg01
, arg10
, 0))
6813 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6815 /* No identical multiplicands; see if we can find a common
6816 power-of-two factor in non-power-of-two multiplies. This
6817 can help in multi-dimensional array access. */
6818 else if (tree_fits_shwi_p (arg01
)
6819 && tree_fits_shwi_p (arg11
))
6821 HOST_WIDE_INT int01
, int11
, tmp
;
6824 int01
= tree_to_shwi (arg01
);
6825 int11
= tree_to_shwi (arg11
);
6827 /* Move min of absolute values to int11. */
6828 if (absu_hwi (int01
) < absu_hwi (int11
))
6830 tmp
= int01
, int01
= int11
, int11
= tmp
;
6831 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6838 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6839 /* The remainder should not be a constant, otherwise we
6840 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6841 increased the number of multiplications necessary. */
6842 && TREE_CODE (arg10
) != INTEGER_CST
)
6844 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6845 build_int_cst (TREE_TYPE (arg00
),
6850 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6855 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6856 fold_build2_loc (loc
, code
, type
,
6857 fold_convert_loc (loc
, type
, alt0
),
6858 fold_convert_loc (loc
, type
, alt1
)),
6859 fold_convert_loc (loc
, type
, same
));
6864 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6865 specified by EXPR into the buffer PTR of length LEN bytes.
6866 Return the number of bytes placed in the buffer, or zero
6870 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6872 tree type
= TREE_TYPE (expr
);
6873 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6874 int byte
, offset
, word
, words
;
6875 unsigned char value
;
6877 if ((off
== -1 && total_bytes
> len
)
6878 || off
>= total_bytes
)
6882 words
= total_bytes
/ UNITS_PER_WORD
;
6884 for (byte
= 0; byte
< total_bytes
; byte
++)
6886 int bitpos
= byte
* BITS_PER_UNIT
;
6887 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6889 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
6891 if (total_bytes
> UNITS_PER_WORD
)
6893 word
= byte
/ UNITS_PER_WORD
;
6894 if (WORDS_BIG_ENDIAN
)
6895 word
= (words
- 1) - word
;
6896 offset
= word
* UNITS_PER_WORD
;
6897 if (BYTES_BIG_ENDIAN
)
6898 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6900 offset
+= byte
% UNITS_PER_WORD
;
6903 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6905 && offset
- off
< len
)
6906 ptr
[offset
- off
] = value
;
6908 return MIN (len
, total_bytes
- off
);
6912 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6913 specified by EXPR into the buffer PTR of length LEN bytes.
6914 Return the number of bytes placed in the buffer, or zero
6918 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6920 tree type
= TREE_TYPE (expr
);
6921 machine_mode mode
= TYPE_MODE (type
);
6922 int total_bytes
= GET_MODE_SIZE (mode
);
6923 FIXED_VALUE_TYPE value
;
6924 tree i_value
, i_type
;
6926 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
6929 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
6931 if (NULL_TREE
== i_type
6932 || TYPE_PRECISION (i_type
) != total_bytes
)
6935 value
= TREE_FIXED_CST (expr
);
6936 i_value
= double_int_to_tree (i_type
, value
.data
);
6938 return native_encode_int (i_value
, ptr
, len
, off
);
6942 /* Subroutine of native_encode_expr. Encode the REAL_CST
6943 specified by EXPR into the buffer PTR of length LEN bytes.
6944 Return the number of bytes placed in the buffer, or zero
6948 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6950 tree type
= TREE_TYPE (expr
);
6951 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6952 int byte
, offset
, word
, words
, bitpos
;
6953 unsigned char value
;
6955 /* There are always 32 bits in each long, no matter the size of
6956 the hosts long. We handle floating point representations with
6960 if ((off
== -1 && total_bytes
> len
)
6961 || off
>= total_bytes
)
6965 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
6967 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
6969 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
6970 bitpos
+= BITS_PER_UNIT
)
6972 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
6973 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
6975 if (UNITS_PER_WORD
< 4)
6977 word
= byte
/ UNITS_PER_WORD
;
6978 if (WORDS_BIG_ENDIAN
)
6979 word
= (words
- 1) - word
;
6980 offset
= word
* UNITS_PER_WORD
;
6981 if (BYTES_BIG_ENDIAN
)
6982 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6984 offset
+= byte
% UNITS_PER_WORD
;
6987 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
6988 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
6990 && offset
- off
< len
)
6991 ptr
[offset
- off
] = value
;
6993 return MIN (len
, total_bytes
- off
);
6996 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6997 specified by EXPR into the buffer PTR of length LEN bytes.
6998 Return the number of bytes placed in the buffer, or zero
7002 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7007 part
= TREE_REALPART (expr
);
7008 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7012 part
= TREE_IMAGPART (expr
);
7014 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7015 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7019 return rsize
+ isize
;
7023 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7024 specified by EXPR into the buffer PTR of length LEN bytes.
7025 Return the number of bytes placed in the buffer, or zero
7029 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7036 count
= VECTOR_CST_NELTS (expr
);
7037 itype
= TREE_TYPE (TREE_TYPE (expr
));
7038 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7039 for (i
= 0; i
< count
; i
++)
7046 elem
= VECTOR_CST_ELT (expr
, i
);
7047 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7048 if ((off
== -1 && res
!= size
)
7061 /* Subroutine of native_encode_expr. Encode the STRING_CST
7062 specified by EXPR into the buffer PTR of length LEN bytes.
7063 Return the number of bytes placed in the buffer, or zero
7067 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7069 tree type
= TREE_TYPE (expr
);
7070 HOST_WIDE_INT total_bytes
;
7072 if (TREE_CODE (type
) != ARRAY_TYPE
7073 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7074 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7075 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7077 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7078 if ((off
== -1 && total_bytes
> len
)
7079 || off
>= total_bytes
)
7083 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7086 if (off
< TREE_STRING_LENGTH (expr
))
7088 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7089 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7091 memset (ptr
+ written
, 0,
7092 MIN (total_bytes
- written
, len
- written
));
7095 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7096 return MIN (total_bytes
- off
, len
);
7100 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7101 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7102 buffer PTR of length LEN bytes. If OFF is not -1 then start
7103 the encoding at byte offset OFF and encode at most LEN bytes.
7104 Return the number of bytes placed in the buffer, or zero upon failure. */
7107 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7109 switch (TREE_CODE (expr
))
7112 return native_encode_int (expr
, ptr
, len
, off
);
7115 return native_encode_real (expr
, ptr
, len
, off
);
7118 return native_encode_fixed (expr
, ptr
, len
, off
);
7121 return native_encode_complex (expr
, ptr
, len
, off
);
7124 return native_encode_vector (expr
, ptr
, len
, off
);
7127 return native_encode_string (expr
, ptr
, len
, off
);
7135 /* Subroutine of native_interpret_expr. Interpret the contents of
7136 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7137 If the buffer cannot be interpreted, return NULL_TREE. */
7140 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7142 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7144 if (total_bytes
> len
7145 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7148 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7150 return wide_int_to_tree (type
, result
);
7154 /* Subroutine of native_interpret_expr. Interpret the contents of
7155 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7156 If the buffer cannot be interpreted, return NULL_TREE. */
7159 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7161 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7163 FIXED_VALUE_TYPE fixed_value
;
7165 if (total_bytes
> len
7166 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7169 result
= double_int::from_buffer (ptr
, total_bytes
);
7170 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7172 return build_fixed (type
, fixed_value
);
7176 /* Subroutine of native_interpret_expr. Interpret the contents of
7177 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7178 If the buffer cannot be interpreted, return NULL_TREE. */
7181 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7183 machine_mode mode
= TYPE_MODE (type
);
7184 int total_bytes
= GET_MODE_SIZE (mode
);
7185 int byte
, offset
, word
, words
, bitpos
;
7186 unsigned char value
;
7187 /* There are always 32 bits in each long, no matter the size of
7188 the hosts long. We handle floating point representations with
7193 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7194 if (total_bytes
> len
|| total_bytes
> 24)
7196 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7198 memset (tmp
, 0, sizeof (tmp
));
7199 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7200 bitpos
+= BITS_PER_UNIT
)
7202 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7203 if (UNITS_PER_WORD
< 4)
7205 word
= byte
/ UNITS_PER_WORD
;
7206 if (WORDS_BIG_ENDIAN
)
7207 word
= (words
- 1) - word
;
7208 offset
= word
* UNITS_PER_WORD
;
7209 if (BYTES_BIG_ENDIAN
)
7210 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7212 offset
+= byte
% UNITS_PER_WORD
;
7215 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7216 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7218 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7221 real_from_target (&r
, tmp
, mode
);
7222 return build_real (type
, r
);
7226 /* Subroutine of native_interpret_expr. Interpret the contents of
7227 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7228 If the buffer cannot be interpreted, return NULL_TREE. */
7231 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7233 tree etype
, rpart
, ipart
;
7236 etype
= TREE_TYPE (type
);
7237 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7240 rpart
= native_interpret_expr (etype
, ptr
, size
);
7243 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7246 return build_complex (type
, rpart
, ipart
);
7250 /* Subroutine of native_interpret_expr. Interpret the contents of
7251 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7252 If the buffer cannot be interpreted, return NULL_TREE. */
7255 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7261 etype
= TREE_TYPE (type
);
7262 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7263 count
= TYPE_VECTOR_SUBPARTS (type
);
7264 if (size
* count
> len
)
7267 elements
= XALLOCAVEC (tree
, count
);
7268 for (i
= count
- 1; i
>= 0; i
--)
7270 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7275 return build_vector (type
, elements
);
7279 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7280 the buffer PTR of length LEN as a constant of type TYPE. For
7281 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7282 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7283 return NULL_TREE. */
7286 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7288 switch (TREE_CODE (type
))
7294 case REFERENCE_TYPE
:
7295 return native_interpret_int (type
, ptr
, len
);
7298 return native_interpret_real (type
, ptr
, len
);
7300 case FIXED_POINT_TYPE
:
7301 return native_interpret_fixed (type
, ptr
, len
);
7304 return native_interpret_complex (type
, ptr
, len
);
7307 return native_interpret_vector (type
, ptr
, len
);
7314 /* Returns true if we can interpret the contents of a native encoding
7318 can_native_interpret_type_p (tree type
)
7320 switch (TREE_CODE (type
))
7326 case REFERENCE_TYPE
:
7327 case FIXED_POINT_TYPE
:
7337 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7338 TYPE at compile-time. If we're unable to perform the conversion
7339 return NULL_TREE. */
7342 fold_view_convert_expr (tree type
, tree expr
)
7344 /* We support up to 512-bit values (for V8DFmode). */
7345 unsigned char buffer
[64];
7348 /* Check that the host and target are sane. */
7349 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7352 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7356 return native_interpret_expr (type
, buffer
, len
);
7359 /* Build an expression for the address of T. Folds away INDIRECT_REF
7360 to avoid confusing the gimplify process. */
7363 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7365 /* The size of the object is not relevant when talking about its address. */
7366 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7367 t
= TREE_OPERAND (t
, 0);
7369 if (TREE_CODE (t
) == INDIRECT_REF
)
7371 t
= TREE_OPERAND (t
, 0);
7373 if (TREE_TYPE (t
) != ptrtype
)
7374 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7376 else if (TREE_CODE (t
) == MEM_REF
7377 && integer_zerop (TREE_OPERAND (t
, 1)))
7378 return TREE_OPERAND (t
, 0);
7379 else if (TREE_CODE (t
) == MEM_REF
7380 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7381 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7382 TREE_OPERAND (t
, 0),
7383 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7384 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7386 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7388 if (TREE_TYPE (t
) != ptrtype
)
7389 t
= fold_convert_loc (loc
, ptrtype
, t
);
7392 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7397 /* Build an expression for the address of T. */
7400 build_fold_addr_expr_loc (location_t loc
, tree t
)
7402 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7404 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7407 /* Fold a unary expression of code CODE and type TYPE with operand
7408 OP0. Return the folded expression if folding is successful.
7409 Otherwise, return NULL_TREE. */
7412 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7416 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7418 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7419 && TREE_CODE_LENGTH (code
) == 1);
7424 if (CONVERT_EXPR_CODE_P (code
)
7425 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7427 /* Don't use STRIP_NOPS, because signedness of argument type
7429 STRIP_SIGN_NOPS (arg0
);
7433 /* Strip any conversions that don't change the mode. This
7434 is safe for every expression, except for a comparison
7435 expression because its signedness is derived from its
7438 Note that this is done as an internal manipulation within
7439 the constant folder, in order to find the simplest
7440 representation of the arguments so that their form can be
7441 studied. In any cases, the appropriate type conversions
7442 should be put back in the tree that will get out of the
7447 if (CONSTANT_CLASS_P (arg0
))
7449 tree tem
= const_unop (code
, type
, arg0
);
7452 if (TREE_TYPE (tem
) != type
)
7453 tem
= fold_convert_loc (loc
, type
, tem
);
7459 tem
= generic_simplify (loc
, code
, type
, op0
);
7463 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7465 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7466 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7467 fold_build1_loc (loc
, code
, type
,
7468 fold_convert_loc (loc
, TREE_TYPE (op0
),
7469 TREE_OPERAND (arg0
, 1))));
7470 else if (TREE_CODE (arg0
) == COND_EXPR
)
7472 tree arg01
= TREE_OPERAND (arg0
, 1);
7473 tree arg02
= TREE_OPERAND (arg0
, 2);
7474 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7475 arg01
= fold_build1_loc (loc
, code
, type
,
7476 fold_convert_loc (loc
,
7477 TREE_TYPE (op0
), arg01
));
7478 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7479 arg02
= fold_build1_loc (loc
, code
, type
,
7480 fold_convert_loc (loc
,
7481 TREE_TYPE (op0
), arg02
));
7482 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7485 /* If this was a conversion, and all we did was to move into
7486 inside the COND_EXPR, bring it back out. But leave it if
7487 it is a conversion from integer to integer and the
7488 result precision is no wider than a word since such a
7489 conversion is cheap and may be optimized away by combine,
7490 while it couldn't if it were outside the COND_EXPR. Then return
7491 so we don't get into an infinite recursion loop taking the
7492 conversion out and then back in. */
7494 if ((CONVERT_EXPR_CODE_P (code
)
7495 || code
== NON_LVALUE_EXPR
)
7496 && TREE_CODE (tem
) == COND_EXPR
7497 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7498 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7499 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7500 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7501 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7502 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7503 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7505 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7506 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7507 || flag_syntax_only
))
7508 tem
= build1_loc (loc
, code
, type
,
7510 TREE_TYPE (TREE_OPERAND
7511 (TREE_OPERAND (tem
, 1), 0)),
7512 TREE_OPERAND (tem
, 0),
7513 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7514 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7522 case NON_LVALUE_EXPR
:
7523 if (!maybe_lvalue_p (op0
))
7524 return fold_convert_loc (loc
, type
, op0
);
7529 case FIX_TRUNC_EXPR
:
7530 if (COMPARISON_CLASS_P (op0
))
7532 /* If we have (type) (a CMP b) and type is an integral type, return
7533 new expression involving the new type. Canonicalize
7534 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7536 Do not fold the result as that would not simplify further, also
7537 folding again results in recursions. */
7538 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7539 return build2_loc (loc
, TREE_CODE (op0
), type
,
7540 TREE_OPERAND (op0
, 0),
7541 TREE_OPERAND (op0
, 1));
7542 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7543 && TREE_CODE (type
) != VECTOR_TYPE
)
7544 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7545 constant_boolean_node (true, type
),
7546 constant_boolean_node (false, type
));
7549 /* Handle (T *)&A.B.C for A being of type T and B and C
7550 living at offset zero. This occurs frequently in
7551 C++ upcasting and then accessing the base. */
7552 if (TREE_CODE (op0
) == ADDR_EXPR
7553 && POINTER_TYPE_P (type
)
7554 && handled_component_p (TREE_OPERAND (op0
, 0)))
7556 HOST_WIDE_INT bitsize
, bitpos
;
7559 int unsignedp
, volatilep
;
7560 tree base
= TREE_OPERAND (op0
, 0);
7561 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7562 &mode
, &unsignedp
, &volatilep
, false);
7563 /* If the reference was to a (constant) zero offset, we can use
7564 the address of the base if it has the same base type
7565 as the result type and the pointer type is unqualified. */
7566 if (! offset
&& bitpos
== 0
7567 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7568 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7569 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7570 return fold_convert_loc (loc
, type
,
7571 build_fold_addr_expr_loc (loc
, base
));
7574 if (TREE_CODE (op0
) == MODIFY_EXPR
7575 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7576 /* Detect assigning a bitfield. */
7577 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7579 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7581 /* Don't leave an assignment inside a conversion
7582 unless assigning a bitfield. */
7583 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7584 /* First do the assignment, then return converted constant. */
7585 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7586 TREE_NO_WARNING (tem
) = 1;
7587 TREE_USED (tem
) = 1;
7591 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7592 constants (if x has signed type, the sign bit cannot be set
7593 in c). This folds extension into the BIT_AND_EXPR.
7594 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7595 very likely don't have maximal range for their precision and this
7596 transformation effectively doesn't preserve non-maximal ranges. */
7597 if (TREE_CODE (type
) == INTEGER_TYPE
7598 && TREE_CODE (op0
) == BIT_AND_EXPR
7599 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7601 tree and_expr
= op0
;
7602 tree and0
= TREE_OPERAND (and_expr
, 0);
7603 tree and1
= TREE_OPERAND (and_expr
, 1);
7606 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7607 || (TYPE_PRECISION (type
)
7608 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7610 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7611 <= HOST_BITS_PER_WIDE_INT
7612 && tree_fits_uhwi_p (and1
))
7614 unsigned HOST_WIDE_INT cst
;
7616 cst
= tree_to_uhwi (and1
);
7617 cst
&= HOST_WIDE_INT_M1U
7618 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7619 change
= (cst
== 0);
7621 && !flag_syntax_only
7622 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7625 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7626 and0
= fold_convert_loc (loc
, uns
, and0
);
7627 and1
= fold_convert_loc (loc
, uns
, and1
);
7632 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7633 TREE_OVERFLOW (and1
));
7634 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7635 fold_convert_loc (loc
, type
, and0
), tem
);
7639 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7640 when one of the new casts will fold away. Conservatively we assume
7641 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7642 if (POINTER_TYPE_P (type
)
7643 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7644 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7645 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7646 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7647 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7649 tree arg00
= TREE_OPERAND (arg0
, 0);
7650 tree arg01
= TREE_OPERAND (arg0
, 1);
7652 return fold_build_pointer_plus_loc
7653 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7656 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7657 of the same precision, and X is an integer type not narrower than
7658 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7659 if (INTEGRAL_TYPE_P (type
)
7660 && TREE_CODE (op0
) == BIT_NOT_EXPR
7661 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7662 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7663 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7665 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7666 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7667 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7668 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7669 fold_convert_loc (loc
, type
, tem
));
7672 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7673 type of X and Y (integer types only). */
7674 if (INTEGRAL_TYPE_P (type
)
7675 && TREE_CODE (op0
) == MULT_EXPR
7676 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7677 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7679 /* Be careful not to introduce new overflows. */
7681 if (TYPE_OVERFLOW_WRAPS (type
))
7684 mult_type
= unsigned_type_for (type
);
7686 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7688 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7689 fold_convert_loc (loc
, mult_type
,
7690 TREE_OPERAND (op0
, 0)),
7691 fold_convert_loc (loc
, mult_type
,
7692 TREE_OPERAND (op0
, 1)));
7693 return fold_convert_loc (loc
, type
, tem
);
7699 case VIEW_CONVERT_EXPR
:
7700 if (TREE_CODE (op0
) == MEM_REF
)
7701 return fold_build2_loc (loc
, MEM_REF
, type
,
7702 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7707 tem
= fold_negate_expr (loc
, arg0
);
7709 return fold_convert_loc (loc
, type
, tem
);
7713 /* Convert fabs((double)float) into (double)fabsf(float). */
7714 if (TREE_CODE (arg0
) == NOP_EXPR
7715 && TREE_CODE (type
) == REAL_TYPE
)
7717 tree targ0
= strip_float_extensions (arg0
);
7719 return fold_convert_loc (loc
, type
,
7720 fold_build1_loc (loc
, ABS_EXPR
,
7725 /* Strip sign ops from argument. */
7726 if (TREE_CODE (type
) == REAL_TYPE
)
7728 tem
= fold_strip_sign_ops (arg0
);
7730 return fold_build1_loc (loc
, ABS_EXPR
, type
,
7731 fold_convert_loc (loc
, type
, tem
));
7736 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7737 return fold_convert_loc (loc
, type
, arg0
);
7738 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7740 tree itype
= TREE_TYPE (type
);
7741 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
7742 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
7743 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
7744 negate_expr (ipart
));
7746 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7747 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
7751 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7752 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7753 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7754 fold_convert_loc (loc
, type
,
7755 TREE_OPERAND (arg0
, 0)))))
7756 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7757 fold_convert_loc (loc
, type
,
7758 TREE_OPERAND (arg0
, 1)));
7759 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7760 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7761 fold_convert_loc (loc
, type
,
7762 TREE_OPERAND (arg0
, 1)))))
7763 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7764 fold_convert_loc (loc
, type
,
7765 TREE_OPERAND (arg0
, 0)), tem
);
7769 case TRUTH_NOT_EXPR
:
7770 /* Note that the operand of this must be an int
7771 and its values must be 0 or 1.
7772 ("true" is a fixed value perhaps depending on the language,
7773 but we don't handle values other than 1 correctly yet.) */
7774 tem
= fold_truth_not_expr (loc
, arg0
);
7777 return fold_convert_loc (loc
, type
, tem
);
7780 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7781 return fold_convert_loc (loc
, type
, arg0
);
7782 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7784 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7785 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7786 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7787 TREE_OPERAND (arg0
, 0)),
7788 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7789 TREE_OPERAND (arg0
, 1)));
7790 return fold_convert_loc (loc
, type
, tem
);
7792 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7794 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7795 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7796 TREE_OPERAND (arg0
, 0));
7797 return fold_convert_loc (loc
, type
, tem
);
7799 if (TREE_CODE (arg0
) == CALL_EXPR
)
7801 tree fn
= get_callee_fndecl (arg0
);
7802 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7803 switch (DECL_FUNCTION_CODE (fn
))
7805 CASE_FLT_FN (BUILT_IN_CEXPI
):
7806 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
7808 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
7818 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7819 return build_zero_cst (type
);
7820 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7822 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7823 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7824 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
7825 TREE_OPERAND (arg0
, 0)),
7826 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
7827 TREE_OPERAND (arg0
, 1)));
7828 return fold_convert_loc (loc
, type
, tem
);
7830 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7832 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7833 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7834 return fold_convert_loc (loc
, type
, negate_expr (tem
));
7836 if (TREE_CODE (arg0
) == CALL_EXPR
)
7838 tree fn
= get_callee_fndecl (arg0
);
7839 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7840 switch (DECL_FUNCTION_CODE (fn
))
7842 CASE_FLT_FN (BUILT_IN_CEXPI
):
7843 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7845 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
7855 /* Fold *&X to X if X is an lvalue. */
7856 if (TREE_CODE (op0
) == ADDR_EXPR
)
7858 tree op00
= TREE_OPERAND (op0
, 0);
7859 if ((TREE_CODE (op00
) == VAR_DECL
7860 || TREE_CODE (op00
) == PARM_DECL
7861 || TREE_CODE (op00
) == RESULT_DECL
)
7862 && !TREE_READONLY (op00
))
7869 } /* switch (code) */
7873 /* If the operation was a conversion do _not_ mark a resulting constant
7874 with TREE_OVERFLOW if the original constant was not. These conversions
7875 have implementation defined behavior and retaining the TREE_OVERFLOW
7876 flag here would confuse later passes such as VRP. */
7878 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
7879 tree type
, tree op0
)
7881 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
7883 && TREE_CODE (res
) == INTEGER_CST
7884 && TREE_CODE (op0
) == INTEGER_CST
7885 && CONVERT_EXPR_CODE_P (code
))
7886 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
7891 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7892 operands OP0 and OP1. LOC is the location of the resulting expression.
7893 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7894 Return the folded expression if folding is successful. Otherwise,
7895 return NULL_TREE. */
7897 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
7898 tree arg0
, tree arg1
, tree op0
, tree op1
)
7902 /* We only do these simplifications if we are optimizing. */
7906 /* Check for things like (A || B) && (A || C). We can convert this
7907 to A || (B && C). Note that either operator can be any of the four
7908 truth and/or operations and the transformation will still be
7909 valid. Also note that we only care about order for the
7910 ANDIF and ORIF operators. If B contains side effects, this
7911 might change the truth-value of A. */
7912 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7913 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7914 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7915 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7916 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7917 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7919 tree a00
= TREE_OPERAND (arg0
, 0);
7920 tree a01
= TREE_OPERAND (arg0
, 1);
7921 tree a10
= TREE_OPERAND (arg1
, 0);
7922 tree a11
= TREE_OPERAND (arg1
, 1);
7923 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7924 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7925 && (code
== TRUTH_AND_EXPR
7926 || code
== TRUTH_OR_EXPR
));
7928 if (operand_equal_p (a00
, a10
, 0))
7929 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
7930 fold_build2_loc (loc
, code
, type
, a01
, a11
));
7931 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7932 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
7933 fold_build2_loc (loc
, code
, type
, a01
, a10
));
7934 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7935 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
7936 fold_build2_loc (loc
, code
, type
, a00
, a11
));
7938 /* This case if tricky because we must either have commutative
7939 operators or else A10 must not have side-effects. */
7941 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
7942 && operand_equal_p (a01
, a11
, 0))
7943 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
7944 fold_build2_loc (loc
, code
, type
, a00
, a10
),
7948 /* See if we can build a range comparison. */
7949 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
7952 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
7953 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
7955 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
7957 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
7960 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
7961 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
7963 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
7965 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
7968 /* Check for the possibility of merging component references. If our
7969 lhs is another similar operation, try to merge its rhs with our
7970 rhs. Then try to merge our lhs and rhs. */
7971 if (TREE_CODE (arg0
) == code
7972 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
7973 TREE_OPERAND (arg0
, 1), arg1
)))
7974 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
7976 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
7979 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7980 && (code
== TRUTH_AND_EXPR
7981 || code
== TRUTH_ANDIF_EXPR
7982 || code
== TRUTH_OR_EXPR
7983 || code
== TRUTH_ORIF_EXPR
))
7985 enum tree_code ncode
, icode
;
7987 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
7988 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
7989 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
7991 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
7992 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
7993 We don't want to pack more than two leafs to a non-IF AND/OR
7995 If tree-code of left-hand operand isn't an AND/OR-IF code and not
7996 equal to IF-CODE, then we don't want to add right-hand operand.
7997 If the inner right-hand side of left-hand operand has
7998 side-effects, or isn't simple, then we can't add to it,
7999 as otherwise we might destroy if-sequence. */
8000 if (TREE_CODE (arg0
) == icode
8001 && simple_operand_p_2 (arg1
)
8002 /* Needed for sequence points to handle trappings, and
8004 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8006 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8008 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8011 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8012 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8013 else if (TREE_CODE (arg1
) == icode
8014 && simple_operand_p_2 (arg0
)
8015 /* Needed for sequence points to handle trappings, and
8017 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8019 tem
= fold_build2_loc (loc
, ncode
, type
,
8020 arg0
, TREE_OPERAND (arg1
, 0));
8021 return fold_build2_loc (loc
, icode
, type
, tem
,
8022 TREE_OPERAND (arg1
, 1));
8024 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8026 For sequence point consistancy, we need to check for trapping,
8027 and side-effects. */
8028 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8029 && simple_operand_p_2 (arg1
))
8030 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8036 /* Fold a binary expression of code CODE and type TYPE with operands
8037 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8038 Return the folded expression if folding is successful. Otherwise,
8039 return NULL_TREE. */
8042 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8044 enum tree_code compl_code
;
8046 if (code
== MIN_EXPR
)
8047 compl_code
= MAX_EXPR
;
8048 else if (code
== MAX_EXPR
)
8049 compl_code
= MIN_EXPR
;
8053 /* MIN (MAX (a, b), b) == b. */
8054 if (TREE_CODE (op0
) == compl_code
8055 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8056 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8058 /* MIN (MAX (b, a), b) == b. */
8059 if (TREE_CODE (op0
) == compl_code
8060 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8061 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8062 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8064 /* MIN (a, MAX (a, b)) == a. */
8065 if (TREE_CODE (op1
) == compl_code
8066 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8067 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8068 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8070 /* MIN (a, MAX (b, a)) == a. */
8071 if (TREE_CODE (op1
) == compl_code
8072 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8073 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8074 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8079 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8080 by changing CODE to reduce the magnitude of constants involved in
8081 ARG0 of the comparison.
8082 Returns a canonicalized comparison tree if a simplification was
8083 possible, otherwise returns NULL_TREE.
8084 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8085 valid if signed overflow is undefined. */
8088 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8089 tree arg0
, tree arg1
,
8090 bool *strict_overflow_p
)
8092 enum tree_code code0
= TREE_CODE (arg0
);
8093 tree t
, cst0
= NULL_TREE
;
8096 /* Match A +- CST code arg1. We can change this only if overflow
8098 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8099 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8100 /* In principle pointers also have undefined overflow behavior,
8101 but that causes problems elsewhere. */
8102 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8103 && (code0
== MINUS_EXPR
8104 || code0
== PLUS_EXPR
)
8105 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8108 /* Identify the constant in arg0 and its sign. */
8109 cst0
= TREE_OPERAND (arg0
, 1);
8110 sgn0
= tree_int_cst_sgn (cst0
);
8112 /* Overflowed constants and zero will cause problems. */
8113 if (integer_zerop (cst0
)
8114 || TREE_OVERFLOW (cst0
))
8117 /* See if we can reduce the magnitude of the constant in
8118 arg0 by changing the comparison code. */
8119 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8121 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8123 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8124 else if (code
== GT_EXPR
8125 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8127 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8128 else if (code
== LE_EXPR
8129 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8131 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8132 else if (code
== GE_EXPR
8133 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8137 *strict_overflow_p
= true;
8139 /* Now build the constant reduced in magnitude. But not if that
8140 would produce one outside of its types range. */
8141 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8143 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8144 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8146 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8147 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8150 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8151 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8152 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8153 t
= fold_convert (TREE_TYPE (arg1
), t
);
8155 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8158 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8159 overflow further. Try to decrease the magnitude of constants involved
8160 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8161 and put sole constants at the second argument position.
8162 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8165 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8166 tree arg0
, tree arg1
)
8169 bool strict_overflow_p
;
8170 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8171 "when reducing constant in comparison");
8173 /* Try canonicalization by simplifying arg0. */
8174 strict_overflow_p
= false;
8175 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8176 &strict_overflow_p
);
8179 if (strict_overflow_p
)
8180 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8184 /* Try canonicalization by simplifying arg1 using the swapped
8186 code
= swap_tree_comparison (code
);
8187 strict_overflow_p
= false;
8188 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8189 &strict_overflow_p
);
8190 if (t
&& strict_overflow_p
)
8191 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8195 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8196 space. This is used to avoid issuing overflow warnings for
8197 expressions like &p->x which can not wrap. */
8200 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8202 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8209 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8210 if (offset
== NULL_TREE
)
8211 wi_offset
= wi::zero (precision
);
8212 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8218 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8219 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8223 if (!wi::fits_uhwi_p (total
))
8226 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8230 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8232 if (TREE_CODE (base
) == ADDR_EXPR
)
8234 HOST_WIDE_INT base_size
;
8236 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8237 if (base_size
> 0 && size
< base_size
)
8241 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8244 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8245 kind INTEGER_CST. This makes sure to properly sign-extend the
8248 static HOST_WIDE_INT
8249 size_low_cst (const_tree t
)
8251 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8252 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8253 if (prec
< HOST_BITS_PER_WIDE_INT
)
8254 return sext_hwi (w
, prec
);
8258 /* Subroutine of fold_binary. This routine performs all of the
8259 transformations that are common to the equality/inequality
8260 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8261 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8262 fold_binary should call fold_binary. Fold a comparison with
8263 tree code CODE and type TYPE with operands OP0 and OP1. Return
8264 the folded comparison or NULL_TREE. */
8267 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8270 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8271 tree arg0
, arg1
, tem
;
8276 STRIP_SIGN_NOPS (arg0
);
8277 STRIP_SIGN_NOPS (arg1
);
8279 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8280 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8282 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8283 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8284 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8285 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8286 && TREE_CODE (arg1
) == INTEGER_CST
8287 && !TREE_OVERFLOW (arg1
))
8289 const enum tree_code
8290 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8291 tree const1
= TREE_OPERAND (arg0
, 1);
8292 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8293 tree variable
= TREE_OPERAND (arg0
, 0);
8294 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8296 /* If the constant operation overflowed this can be
8297 simplified as a comparison against INT_MAX/INT_MIN. */
8298 if (TREE_OVERFLOW (new_const
)
8299 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8301 int const1_sgn
= tree_int_cst_sgn (const1
);
8302 enum tree_code code2
= code
;
8304 /* Get the sign of the constant on the lhs if the
8305 operation were VARIABLE + CONST1. */
8306 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8307 const1_sgn
= -const1_sgn
;
8309 /* The sign of the constant determines if we overflowed
8310 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8311 Canonicalize to the INT_MIN overflow by swapping the comparison
8313 if (const1_sgn
== -1)
8314 code2
= swap_tree_comparison (code
);
8316 /* We now can look at the canonicalized case
8317 VARIABLE + 1 CODE2 INT_MIN
8318 and decide on the result. */
8325 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8331 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8340 fold_overflow_warning ("assuming signed overflow does not occur "
8341 "when changing X +- C1 cmp C2 to "
8343 WARN_STRICT_OVERFLOW_COMPARISON
);
8344 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8348 /* For comparisons of pointers we can decompose it to a compile time
8349 comparison of the base objects and the offsets into the object.
8350 This requires at least one operand being an ADDR_EXPR or a
8351 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8352 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8353 && (TREE_CODE (arg0
) == ADDR_EXPR
8354 || TREE_CODE (arg1
) == ADDR_EXPR
8355 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8356 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8358 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8359 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8361 int volatilep
, unsignedp
;
8362 bool indirect_base0
= false, indirect_base1
= false;
8364 /* Get base and offset for the access. Strip ADDR_EXPR for
8365 get_inner_reference, but put it back by stripping INDIRECT_REF
8366 off the base object if possible. indirect_baseN will be true
8367 if baseN is not an address but refers to the object itself. */
8369 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8371 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8372 &bitsize
, &bitpos0
, &offset0
, &mode
,
8373 &unsignedp
, &volatilep
, false);
8374 if (TREE_CODE (base0
) == INDIRECT_REF
)
8375 base0
= TREE_OPERAND (base0
, 0);
8377 indirect_base0
= true;
8379 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8381 base0
= TREE_OPERAND (arg0
, 0);
8382 STRIP_SIGN_NOPS (base0
);
8383 if (TREE_CODE (base0
) == ADDR_EXPR
)
8385 base0
= TREE_OPERAND (base0
, 0);
8386 indirect_base0
= true;
8388 offset0
= TREE_OPERAND (arg0
, 1);
8389 if (tree_fits_shwi_p (offset0
))
8391 HOST_WIDE_INT off
= size_low_cst (offset0
);
8392 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8394 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8396 bitpos0
= off
* BITS_PER_UNIT
;
8397 offset0
= NULL_TREE
;
8403 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8405 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8406 &bitsize
, &bitpos1
, &offset1
, &mode
,
8407 &unsignedp
, &volatilep
, false);
8408 if (TREE_CODE (base1
) == INDIRECT_REF
)
8409 base1
= TREE_OPERAND (base1
, 0);
8411 indirect_base1
= true;
8413 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8415 base1
= TREE_OPERAND (arg1
, 0);
8416 STRIP_SIGN_NOPS (base1
);
8417 if (TREE_CODE (base1
) == ADDR_EXPR
)
8419 base1
= TREE_OPERAND (base1
, 0);
8420 indirect_base1
= true;
8422 offset1
= TREE_OPERAND (arg1
, 1);
8423 if (tree_fits_shwi_p (offset1
))
8425 HOST_WIDE_INT off
= size_low_cst (offset1
);
8426 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8428 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8430 bitpos1
= off
* BITS_PER_UNIT
;
8431 offset1
= NULL_TREE
;
8436 /* If we have equivalent bases we might be able to simplify. */
8437 if (indirect_base0
== indirect_base1
8438 && operand_equal_p (base0
, base1
, 0))
8440 /* We can fold this expression to a constant if the non-constant
8441 offset parts are equal. */
8442 if ((offset0
== offset1
8443 || (offset0
&& offset1
8444 && operand_equal_p (offset0
, offset1
, 0)))
8447 || (indirect_base0
&& DECL_P (base0
))
8448 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8452 && bitpos0
!= bitpos1
8453 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8454 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8455 fold_overflow_warning (("assuming pointer wraparound does not "
8456 "occur when comparing P +- C1 with "
8458 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8463 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8465 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8467 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8469 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8471 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8473 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8477 /* We can simplify the comparison to a comparison of the variable
8478 offset parts if the constant offset parts are equal.
8479 Be careful to use signed sizetype here because otherwise we
8480 mess with array offsets in the wrong way. This is possible
8481 because pointer arithmetic is restricted to retain within an
8482 object and overflow on pointer differences is undefined as of
8483 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8484 else if (bitpos0
== bitpos1
8486 || (indirect_base0
&& DECL_P (base0
))
8487 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8489 /* By converting to signed sizetype we cover middle-end pointer
8490 arithmetic which operates on unsigned pointer types of size
8491 type size and ARRAY_REF offsets which are properly sign or
8492 zero extended from their type in case it is narrower than
8494 if (offset0
== NULL_TREE
)
8495 offset0
= build_int_cst (ssizetype
, 0);
8497 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8498 if (offset1
== NULL_TREE
)
8499 offset1
= build_int_cst (ssizetype
, 0);
8501 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8504 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8505 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8506 fold_overflow_warning (("assuming pointer wraparound does not "
8507 "occur when comparing P +- C1 with "
8509 WARN_STRICT_OVERFLOW_COMPARISON
);
8511 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8514 /* For equal offsets we can simplify to a comparison of the
8516 else if (bitpos0
== bitpos1
8518 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8520 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8521 && ((offset0
== offset1
)
8522 || (offset0
&& offset1
8523 && operand_equal_p (offset0
, offset1
, 0))))
8526 base0
= build_fold_addr_expr_loc (loc
, base0
);
8528 base1
= build_fold_addr_expr_loc (loc
, base1
);
8529 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8533 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8534 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8535 the resulting offset is smaller in absolute value than the
8536 original one and has the same sign. */
8537 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8538 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8539 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8540 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8541 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8542 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8543 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8544 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8546 tree const1
= TREE_OPERAND (arg0
, 1);
8547 tree const2
= TREE_OPERAND (arg1
, 1);
8548 tree variable1
= TREE_OPERAND (arg0
, 0);
8549 tree variable2
= TREE_OPERAND (arg1
, 0);
8551 const char * const warnmsg
= G_("assuming signed overflow does not "
8552 "occur when combining constants around "
8555 /* Put the constant on the side where it doesn't overflow and is
8556 of lower absolute value and of same sign than before. */
8557 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8558 ? MINUS_EXPR
: PLUS_EXPR
,
8560 if (!TREE_OVERFLOW (cst
)
8561 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8562 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8564 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8565 return fold_build2_loc (loc
, code
, type
,
8567 fold_build2_loc (loc
, TREE_CODE (arg1
),
8572 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8573 ? MINUS_EXPR
: PLUS_EXPR
,
8575 if (!TREE_OVERFLOW (cst
)
8576 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8577 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8579 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8580 return fold_build2_loc (loc
, code
, type
,
8581 fold_build2_loc (loc
, TREE_CODE (arg0
),
8588 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8592 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8593 constant, we can simplify it. */
8594 if (TREE_CODE (arg1
) == INTEGER_CST
8595 && (TREE_CODE (arg0
) == MIN_EXPR
8596 || TREE_CODE (arg0
) == MAX_EXPR
)
8597 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8599 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
8604 /* If we are comparing an expression that just has comparisons
8605 of two integer values, arithmetic expressions of those comparisons,
8606 and constants, we can simplify it. There are only three cases
8607 to check: the two values can either be equal, the first can be
8608 greater, or the second can be greater. Fold the expression for
8609 those three values. Since each value must be 0 or 1, we have
8610 eight possibilities, each of which corresponds to the constant 0
8611 or 1 or one of the six possible comparisons.
8613 This handles common cases like (a > b) == 0 but also handles
8614 expressions like ((x > y) - (y > x)) > 0, which supposedly
8615 occur in macroized code. */
8617 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8619 tree cval1
= 0, cval2
= 0;
8622 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8623 /* Don't handle degenerate cases here; they should already
8624 have been handled anyway. */
8625 && cval1
!= 0 && cval2
!= 0
8626 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8627 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8628 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8629 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8630 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8631 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8632 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8634 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8635 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8637 /* We can't just pass T to eval_subst in case cval1 or cval2
8638 was the same as ARG1. */
8641 = fold_build2_loc (loc
, code
, type
,
8642 eval_subst (loc
, arg0
, cval1
, maxval
,
8646 = fold_build2_loc (loc
, code
, type
,
8647 eval_subst (loc
, arg0
, cval1
, maxval
,
8651 = fold_build2_loc (loc
, code
, type
,
8652 eval_subst (loc
, arg0
, cval1
, minval
,
8656 /* All three of these results should be 0 or 1. Confirm they are.
8657 Then use those values to select the proper code to use. */
8659 if (TREE_CODE (high_result
) == INTEGER_CST
8660 && TREE_CODE (equal_result
) == INTEGER_CST
8661 && TREE_CODE (low_result
) == INTEGER_CST
)
8663 /* Make a 3-bit mask with the high-order bit being the
8664 value for `>', the next for '=', and the low for '<'. */
8665 switch ((integer_onep (high_result
) * 4)
8666 + (integer_onep (equal_result
) * 2)
8667 + integer_onep (low_result
))
8671 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8692 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8697 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8698 SET_EXPR_LOCATION (tem
, loc
);
8701 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8706 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8707 into a single range test. */
8708 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8709 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8710 && TREE_CODE (arg1
) == INTEGER_CST
8711 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8712 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8713 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8714 && !TREE_OVERFLOW (arg1
))
8716 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
8717 if (tem
!= NULL_TREE
)
8725 /* Subroutine of fold_binary. Optimize complex multiplications of the
8726 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8727 argument EXPR represents the expression "z" of type TYPE. */
8730 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8732 tree itype
= TREE_TYPE (type
);
8733 tree rpart
, ipart
, tem
;
8735 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8737 rpart
= TREE_OPERAND (expr
, 0);
8738 ipart
= TREE_OPERAND (expr
, 1);
8740 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8742 rpart
= TREE_REALPART (expr
);
8743 ipart
= TREE_IMAGPART (expr
);
8747 expr
= save_expr (expr
);
8748 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8749 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8752 rpart
= save_expr (rpart
);
8753 ipart
= save_expr (ipart
);
8754 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8755 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8756 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8757 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8758 build_zero_cst (itype
));
8762 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8763 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8766 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
8768 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
8770 if (TREE_CODE (arg
) == VECTOR_CST
)
8772 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8773 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8775 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8777 constructor_elt
*elt
;
8779 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8780 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8783 elts
[i
] = elt
->value
;
8787 for (; i
< nelts
; i
++)
8789 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8793 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8794 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8795 NULL_TREE otherwise. */
8798 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
8800 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8802 bool need_ctor
= false;
8804 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8805 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8806 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8807 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8810 elts
= XALLOCAVEC (tree
, nelts
* 3);
8811 if (!vec_cst_ctor_to_array (arg0
, elts
)
8812 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
8815 for (i
= 0; i
< nelts
; i
++)
8817 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
8819 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
8824 vec
<constructor_elt
, va_gc
> *v
;
8825 vec_alloc (v
, nelts
);
8826 for (i
= 0; i
< nelts
; i
++)
8827 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
8828 return build_constructor (type
, v
);
8831 return build_vector (type
, &elts
[2 * nelts
]);
8834 /* Try to fold a pointer difference of type TYPE two address expressions of
8835 array references AREF0 and AREF1 using location LOC. Return a
8836 simplified expression for the difference or NULL_TREE. */
8839 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8840 tree aref0
, tree aref1
)
8842 tree base0
= TREE_OPERAND (aref0
, 0);
8843 tree base1
= TREE_OPERAND (aref1
, 0);
8844 tree base_offset
= build_int_cst (type
, 0);
8846 /* If the bases are array references as well, recurse. If the bases
8847 are pointer indirections compute the difference of the pointers.
8848 If the bases are equal, we are set. */
8849 if ((TREE_CODE (base0
) == ARRAY_REF
8850 && TREE_CODE (base1
) == ARRAY_REF
8852 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
8853 || (INDIRECT_REF_P (base0
)
8854 && INDIRECT_REF_P (base1
)
8855 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
8856 TREE_OPERAND (base0
, 0),
8857 TREE_OPERAND (base1
, 0))))
8858 || operand_equal_p (base0
, base1
, 0))
8860 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8861 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8862 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8863 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8864 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8866 fold_build2_loc (loc
, MULT_EXPR
, type
,
8872 /* If the real or vector real constant CST of type TYPE has an exact
8873 inverse, return it, else return NULL. */
8876 exact_inverse (tree type
, tree cst
)
8879 tree unit_type
, *elts
;
8881 unsigned vec_nelts
, i
;
8883 switch (TREE_CODE (cst
))
8886 r
= TREE_REAL_CST (cst
);
8888 if (exact_real_inverse (TYPE_MODE (type
), &r
))
8889 return build_real (type
, r
);
8894 vec_nelts
= VECTOR_CST_NELTS (cst
);
8895 elts
= XALLOCAVEC (tree
, vec_nelts
);
8896 unit_type
= TREE_TYPE (type
);
8897 mode
= TYPE_MODE (unit_type
);
8899 for (i
= 0; i
< vec_nelts
; i
++)
8901 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
8902 if (!exact_real_inverse (mode
, &r
))
8904 elts
[i
] = build_real (unit_type
, r
);
8907 return build_vector (type
, elts
);
8914 /* Mask out the tz least significant bits of X of type TYPE where
8915 tz is the number of trailing zeroes in Y. */
8917 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
8919 int tz
= wi::ctz (y
);
8921 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
8925 /* Return true when T is an address and is known to be nonzero.
8926 For floating point we further ensure that T is not denormal.
8927 Similar logic is present in nonzero_address in rtlanal.h.
8929 If the return value is based on the assumption that signed overflow
8930 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8931 change *STRICT_OVERFLOW_P. */
8934 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
8936 tree type
= TREE_TYPE (t
);
8937 enum tree_code code
;
8939 /* Doing something useful for floating point would need more work. */
8940 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
8943 code
= TREE_CODE (t
);
8944 switch (TREE_CODE_CLASS (code
))
8947 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8950 case tcc_comparison
:
8951 return tree_binary_nonzero_warnv_p (code
, type
,
8952 TREE_OPERAND (t
, 0),
8953 TREE_OPERAND (t
, 1),
8956 case tcc_declaration
:
8958 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8966 case TRUTH_NOT_EXPR
:
8967 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8970 case TRUTH_AND_EXPR
:
8972 case TRUTH_XOR_EXPR
:
8973 return tree_binary_nonzero_warnv_p (code
, type
,
8974 TREE_OPERAND (t
, 0),
8975 TREE_OPERAND (t
, 1),
8983 case WITH_SIZE_EXPR
:
8985 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8990 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
8994 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
8999 tree fndecl
= get_callee_fndecl (t
);
9000 if (!fndecl
) return false;
9001 if (flag_delete_null_pointer_checks
&& !flag_check_new
9002 && DECL_IS_OPERATOR_NEW (fndecl
)
9003 && !TREE_NOTHROW (fndecl
))
9005 if (flag_delete_null_pointer_checks
9006 && lookup_attribute ("returns_nonnull",
9007 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9009 return alloca_call_p (t
);
9018 /* Return true when T is an address and is known to be nonzero.
9019 Handle warnings about undefined signed overflow. */
9022 tree_expr_nonzero_p (tree t
)
9024 bool ret
, strict_overflow_p
;
9026 strict_overflow_p
= false;
9027 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9028 if (strict_overflow_p
)
9029 fold_overflow_warning (("assuming signed overflow does not occur when "
9030 "determining that expression is always "
9032 WARN_STRICT_OVERFLOW_MISC
);
9036 /* Fold a binary expression of code CODE and type TYPE with operands
9037 OP0 and OP1. LOC is the location of the resulting expression.
9038 Return the folded expression if folding is successful. Otherwise,
9039 return NULL_TREE. */
9042 fold_binary_loc (location_t loc
,
9043 enum tree_code code
, tree type
, tree op0
, tree op1
)
9045 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9046 tree arg0
, arg1
, tem
;
9047 tree t1
= NULL_TREE
;
9048 bool strict_overflow_p
;
9051 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9052 && TREE_CODE_LENGTH (code
) == 2
9054 && op1
!= NULL_TREE
);
9059 /* Strip any conversions that don't change the mode. This is
9060 safe for every expression, except for a comparison expression
9061 because its signedness is derived from its operands. So, in
9062 the latter case, only strip conversions that don't change the
9063 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9066 Note that this is done as an internal manipulation within the
9067 constant folder, in order to find the simplest representation
9068 of the arguments so that their form can be studied. In any
9069 cases, the appropriate type conversions should be put back in
9070 the tree that will get out of the constant folder. */
9072 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9074 STRIP_SIGN_NOPS (arg0
);
9075 STRIP_SIGN_NOPS (arg1
);
9083 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9084 constant but we can't do arithmetic on them. */
9085 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9087 tem
= const_binop (code
, type
, arg0
, arg1
);
9088 if (tem
!= NULL_TREE
)
9090 if (TREE_TYPE (tem
) != type
)
9091 tem
= fold_convert_loc (loc
, type
, tem
);
9096 /* If this is a commutative operation, and ARG0 is a constant, move it
9097 to ARG1 to reduce the number of tests below. */
9098 if (commutative_tree_code (code
)
9099 && tree_swap_operands_p (arg0
, arg1
, true))
9100 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9102 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9103 to ARG1 to reduce the number of tests below. */
9104 if (kind
== tcc_comparison
9105 && tree_swap_operands_p (arg0
, arg1
, true))
9106 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9108 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9112 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9114 First check for cases where an arithmetic operation is applied to a
9115 compound, conditional, or comparison operation. Push the arithmetic
9116 operation inside the compound or conditional to see if any folding
9117 can then be done. Convert comparison to conditional for this purpose.
9118 The also optimizes non-constant cases that used to be done in
9121 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9122 one of the operands is a comparison and the other is a comparison, a
9123 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9124 code below would make the expression more complex. Change it to a
9125 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9126 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9128 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9129 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9130 && TREE_CODE (type
) != VECTOR_TYPE
9131 && ((truth_value_p (TREE_CODE (arg0
))
9132 && (truth_value_p (TREE_CODE (arg1
))
9133 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9134 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9135 || (truth_value_p (TREE_CODE (arg1
))
9136 && (truth_value_p (TREE_CODE (arg0
))
9137 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9138 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9140 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9141 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9144 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9145 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9147 if (code
== EQ_EXPR
)
9148 tem
= invert_truthvalue_loc (loc
, tem
);
9150 return fold_convert_loc (loc
, type
, tem
);
9153 if (TREE_CODE_CLASS (code
) == tcc_binary
9154 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9156 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9158 tem
= fold_build2_loc (loc
, code
, type
,
9159 fold_convert_loc (loc
, TREE_TYPE (op0
),
9160 TREE_OPERAND (arg0
, 1)), op1
);
9161 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9164 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9165 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9167 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9168 fold_convert_loc (loc
, TREE_TYPE (op1
),
9169 TREE_OPERAND (arg1
, 1)));
9170 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9174 if (TREE_CODE (arg0
) == COND_EXPR
9175 || TREE_CODE (arg0
) == VEC_COND_EXPR
9176 || COMPARISON_CLASS_P (arg0
))
9178 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9180 /*cond_first_p=*/1);
9181 if (tem
!= NULL_TREE
)
9185 if (TREE_CODE (arg1
) == COND_EXPR
9186 || TREE_CODE (arg1
) == VEC_COND_EXPR
9187 || COMPARISON_CLASS_P (arg1
))
9189 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9191 /*cond_first_p=*/0);
9192 if (tem
!= NULL_TREE
)
9200 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9201 if (TREE_CODE (arg0
) == ADDR_EXPR
9202 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9204 tree iref
= TREE_OPERAND (arg0
, 0);
9205 return fold_build2 (MEM_REF
, type
,
9206 TREE_OPERAND (iref
, 0),
9207 int_const_binop (PLUS_EXPR
, arg1
,
9208 TREE_OPERAND (iref
, 1)));
9211 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9212 if (TREE_CODE (arg0
) == ADDR_EXPR
9213 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9216 HOST_WIDE_INT coffset
;
9217 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9221 return fold_build2 (MEM_REF
, type
,
9222 build_fold_addr_expr (base
),
9223 int_const_binop (PLUS_EXPR
, arg1
,
9224 size_int (coffset
)));
9229 case POINTER_PLUS_EXPR
:
9230 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9231 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9232 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9233 return fold_convert_loc (loc
, type
,
9234 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9235 fold_convert_loc (loc
, sizetype
,
9237 fold_convert_loc (loc
, sizetype
,
9243 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9245 /* X + (X / CST) * -CST is X % CST. */
9246 if (TREE_CODE (arg1
) == MULT_EXPR
9247 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9248 && operand_equal_p (arg0
,
9249 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9251 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9252 tree cst1
= TREE_OPERAND (arg1
, 1);
9253 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9255 if (sum
&& integer_zerop (sum
))
9256 return fold_convert_loc (loc
, type
,
9257 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9258 TREE_TYPE (arg0
), arg0
,
9263 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9264 one. Make sure the type is not saturating and has the signedness of
9265 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9266 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9267 if ((TREE_CODE (arg0
) == MULT_EXPR
9268 || TREE_CODE (arg1
) == MULT_EXPR
)
9269 && !TYPE_SATURATING (type
)
9270 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9271 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9272 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9274 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9279 if (! FLOAT_TYPE_P (type
))
9281 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9282 (plus (plus (mult) (mult)) (foo)) so that we can
9283 take advantage of the factoring cases below. */
9284 if (ANY_INTEGRAL_TYPE_P (type
)
9285 && TYPE_OVERFLOW_WRAPS (type
)
9286 && (((TREE_CODE (arg0
) == PLUS_EXPR
9287 || TREE_CODE (arg0
) == MINUS_EXPR
)
9288 && TREE_CODE (arg1
) == MULT_EXPR
)
9289 || ((TREE_CODE (arg1
) == PLUS_EXPR
9290 || TREE_CODE (arg1
) == MINUS_EXPR
)
9291 && TREE_CODE (arg0
) == MULT_EXPR
)))
9293 tree parg0
, parg1
, parg
, marg
;
9294 enum tree_code pcode
;
9296 if (TREE_CODE (arg1
) == MULT_EXPR
)
9297 parg
= arg0
, marg
= arg1
;
9299 parg
= arg1
, marg
= arg0
;
9300 pcode
= TREE_CODE (parg
);
9301 parg0
= TREE_OPERAND (parg
, 0);
9302 parg1
= TREE_OPERAND (parg
, 1);
9306 if (TREE_CODE (parg0
) == MULT_EXPR
9307 && TREE_CODE (parg1
) != MULT_EXPR
)
9308 return fold_build2_loc (loc
, pcode
, type
,
9309 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9310 fold_convert_loc (loc
, type
,
9312 fold_convert_loc (loc
, type
,
9314 fold_convert_loc (loc
, type
, parg1
));
9315 if (TREE_CODE (parg0
) != MULT_EXPR
9316 && TREE_CODE (parg1
) == MULT_EXPR
)
9318 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9319 fold_convert_loc (loc
, type
, parg0
),
9320 fold_build2_loc (loc
, pcode
, type
,
9321 fold_convert_loc (loc
, type
, marg
),
9322 fold_convert_loc (loc
, type
,
9328 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9329 to __complex__ ( x, y ). This is not the same for SNaNs or
9330 if signed zeros are involved. */
9331 if (!HONOR_SNANS (element_mode (arg0
))
9332 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9333 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9335 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9336 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9337 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9338 bool arg0rz
= false, arg0iz
= false;
9339 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9340 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9342 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9343 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9344 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9346 tree rp
= arg1r
? arg1r
9347 : build1 (REALPART_EXPR
, rtype
, arg1
);
9348 tree ip
= arg0i
? arg0i
9349 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9350 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9352 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9354 tree rp
= arg0r
? arg0r
9355 : build1 (REALPART_EXPR
, rtype
, arg0
);
9356 tree ip
= arg1i
? arg1i
9357 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9358 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9363 if (flag_unsafe_math_optimizations
9364 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9365 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9366 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9369 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9370 We associate floats only if the user has specified
9371 -fassociative-math. */
9372 if (flag_associative_math
9373 && TREE_CODE (arg1
) == PLUS_EXPR
9374 && TREE_CODE (arg0
) != MULT_EXPR
)
9376 tree tree10
= TREE_OPERAND (arg1
, 0);
9377 tree tree11
= TREE_OPERAND (arg1
, 1);
9378 if (TREE_CODE (tree11
) == MULT_EXPR
9379 && TREE_CODE (tree10
) == MULT_EXPR
)
9382 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9383 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9386 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9387 We associate floats only if the user has specified
9388 -fassociative-math. */
9389 if (flag_associative_math
9390 && TREE_CODE (arg0
) == PLUS_EXPR
9391 && TREE_CODE (arg1
) != MULT_EXPR
)
9393 tree tree00
= TREE_OPERAND (arg0
, 0);
9394 tree tree01
= TREE_OPERAND (arg0
, 1);
9395 if (TREE_CODE (tree01
) == MULT_EXPR
9396 && TREE_CODE (tree00
) == MULT_EXPR
)
9399 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9400 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9406 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9407 is a rotate of A by C1 bits. */
9408 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9409 is a rotate of A by B bits. */
9411 enum tree_code code0
, code1
;
9413 code0
= TREE_CODE (arg0
);
9414 code1
= TREE_CODE (arg1
);
9415 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9416 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9417 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9418 TREE_OPERAND (arg1
, 0), 0)
9419 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9420 TYPE_UNSIGNED (rtype
))
9421 /* Only create rotates in complete modes. Other cases are not
9422 expanded properly. */
9423 && (element_precision (rtype
)
9424 == GET_MODE_PRECISION (GET_MODE_INNER (TYPE_MODE (rtype
)))))
9426 tree tree01
, tree11
;
9427 enum tree_code code01
, code11
;
9429 tree01
= TREE_OPERAND (arg0
, 1);
9430 tree11
= TREE_OPERAND (arg1
, 1);
9431 STRIP_NOPS (tree01
);
9432 STRIP_NOPS (tree11
);
9433 code01
= TREE_CODE (tree01
);
9434 code11
= TREE_CODE (tree11
);
9435 if (code01
== INTEGER_CST
9436 && code11
== INTEGER_CST
9437 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9438 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9440 tem
= build2_loc (loc
, LROTATE_EXPR
,
9441 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9442 TREE_OPERAND (arg0
, 0),
9443 code0
== LSHIFT_EXPR
9444 ? TREE_OPERAND (arg0
, 1)
9445 : TREE_OPERAND (arg1
, 1));
9446 return fold_convert_loc (loc
, type
, tem
);
9448 else if (code11
== MINUS_EXPR
)
9450 tree tree110
, tree111
;
9451 tree110
= TREE_OPERAND (tree11
, 0);
9452 tree111
= TREE_OPERAND (tree11
, 1);
9453 STRIP_NOPS (tree110
);
9454 STRIP_NOPS (tree111
);
9455 if (TREE_CODE (tree110
) == INTEGER_CST
9456 && 0 == compare_tree_int (tree110
,
9458 (TREE_TYPE (TREE_OPERAND
9460 && operand_equal_p (tree01
, tree111
, 0))
9462 fold_convert_loc (loc
, type
,
9463 build2 ((code0
== LSHIFT_EXPR
9466 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9467 TREE_OPERAND (arg0
, 0),
9468 TREE_OPERAND (arg0
, 1)));
9470 else if (code01
== MINUS_EXPR
)
9472 tree tree010
, tree011
;
9473 tree010
= TREE_OPERAND (tree01
, 0);
9474 tree011
= TREE_OPERAND (tree01
, 1);
9475 STRIP_NOPS (tree010
);
9476 STRIP_NOPS (tree011
);
9477 if (TREE_CODE (tree010
) == INTEGER_CST
9478 && 0 == compare_tree_int (tree010
,
9480 (TREE_TYPE (TREE_OPERAND
9482 && operand_equal_p (tree11
, tree011
, 0))
9483 return fold_convert_loc
9485 build2 ((code0
!= LSHIFT_EXPR
9488 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9489 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9495 /* In most languages, can't associate operations on floats through
9496 parentheses. Rather than remember where the parentheses were, we
9497 don't associate floats at all, unless the user has specified
9499 And, we need to make sure type is not saturating. */
9501 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9502 && !TYPE_SATURATING (type
))
9504 tree var0
, con0
, lit0
, minus_lit0
;
9505 tree var1
, con1
, lit1
, minus_lit1
;
9509 /* Split both trees into variables, constants, and literals. Then
9510 associate each group together, the constants with literals,
9511 then the result with variables. This increases the chances of
9512 literals being recombined later and of generating relocatable
9513 expressions for the sum of a constant and literal. */
9514 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9515 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9516 code
== MINUS_EXPR
);
9518 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9519 if (code
== MINUS_EXPR
)
9522 /* With undefined overflow prefer doing association in a type
9523 which wraps on overflow, if that is one of the operand types. */
9524 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9525 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9527 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9528 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9529 atype
= TREE_TYPE (arg0
);
9530 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9531 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9532 atype
= TREE_TYPE (arg1
);
9533 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9536 /* With undefined overflow we can only associate constants with one
9537 variable, and constants whose association doesn't overflow. */
9538 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9539 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9546 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9547 tmp0
= TREE_OPERAND (tmp0
, 0);
9548 if (CONVERT_EXPR_P (tmp0
)
9549 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9550 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9551 <= TYPE_PRECISION (atype
)))
9552 tmp0
= TREE_OPERAND (tmp0
, 0);
9553 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9554 tmp1
= TREE_OPERAND (tmp1
, 0);
9555 if (CONVERT_EXPR_P (tmp1
)
9556 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9557 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9558 <= TYPE_PRECISION (atype
)))
9559 tmp1
= TREE_OPERAND (tmp1
, 0);
9560 /* The only case we can still associate with two variables
9561 is if they are the same, modulo negation and bit-pattern
9562 preserving conversions. */
9563 if (!operand_equal_p (tmp0
, tmp1
, 0))
9568 /* Only do something if we found more than two objects. Otherwise,
9569 nothing has changed and we risk infinite recursion. */
9571 && (2 < ((var0
!= 0) + (var1
!= 0)
9572 + (con0
!= 0) + (con1
!= 0)
9573 + (lit0
!= 0) + (lit1
!= 0)
9574 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9576 bool any_overflows
= false;
9577 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9578 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9579 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9580 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9581 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9582 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9583 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9584 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9587 /* Preserve the MINUS_EXPR if the negative part of the literal is
9588 greater than the positive part. Otherwise, the multiplicative
9589 folding code (i.e extract_muldiv) may be fooled in case
9590 unsigned constants are subtracted, like in the following
9591 example: ((X*2 + 4) - 8U)/2. */
9592 if (minus_lit0
&& lit0
)
9594 if (TREE_CODE (lit0
) == INTEGER_CST
9595 && TREE_CODE (minus_lit0
) == INTEGER_CST
9596 && tree_int_cst_lt (lit0
, minus_lit0
))
9598 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9604 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9610 /* Don't introduce overflows through reassociation. */
9612 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9613 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9620 fold_convert_loc (loc
, type
,
9621 associate_trees (loc
, var0
, minus_lit0
,
9622 MINUS_EXPR
, atype
));
9625 con0
= associate_trees (loc
, con0
, minus_lit0
,
9628 fold_convert_loc (loc
, type
,
9629 associate_trees (loc
, var0
, con0
,
9634 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9636 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9644 /* Pointer simplifications for subtraction, simple reassociations. */
9645 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
9647 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9648 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9649 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9651 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9652 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
9653 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
9654 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
9655 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9656 fold_build2_loc (loc
, MINUS_EXPR
, type
,
9658 fold_build2_loc (loc
, MINUS_EXPR
, type
,
9661 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9662 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9664 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9665 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
9666 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
9667 fold_convert_loc (loc
, type
, arg1
));
9669 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
9671 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9673 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9675 tree arg10
= fold_convert_loc (loc
, type
,
9676 TREE_OPERAND (arg1
, 0));
9677 tree arg11
= fold_convert_loc (loc
, type
,
9678 TREE_OPERAND (arg1
, 1));
9679 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9680 fold_convert_loc (loc
, type
, arg0
),
9683 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
9686 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9687 if (TREE_CODE (arg0
) == NEGATE_EXPR
9688 && negate_expr_p (arg1
)
9689 && reorder_operands_p (arg0
, arg1
))
9690 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9691 fold_convert_loc (loc
, type
,
9692 negate_expr (arg1
)),
9693 fold_convert_loc (loc
, type
,
9694 TREE_OPERAND (arg0
, 0)));
9696 if (! FLOAT_TYPE_P (type
))
9698 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9699 any power of 2 minus 1. */
9700 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9701 && TREE_CODE (arg1
) == BIT_AND_EXPR
9702 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9703 TREE_OPERAND (arg1
, 0), 0))
9705 tree mask0
= TREE_OPERAND (arg0
, 1);
9706 tree mask1
= TREE_OPERAND (arg1
, 1);
9707 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
9709 if (operand_equal_p (tem
, mask1
, 0))
9711 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
9712 TREE_OPERAND (arg0
, 0), mask1
);
9713 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
9718 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9719 __complex__ ( x, -y ). This is not the same for SNaNs or if
9720 signed zeros are involved. */
9721 if (!HONOR_SNANS (element_mode (arg0
))
9722 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9723 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9725 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9726 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9727 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9728 bool arg0rz
= false, arg0iz
= false;
9729 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9730 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9732 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9733 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9734 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9736 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9738 : build1 (REALPART_EXPR
, rtype
, arg1
));
9739 tree ip
= arg0i
? arg0i
9740 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9741 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9743 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9745 tree rp
= arg0r
? arg0r
9746 : build1 (REALPART_EXPR
, rtype
, arg0
);
9747 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9749 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9750 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9755 /* A - B -> A + (-B) if B is easily negatable. */
9756 if (negate_expr_p (arg1
)
9757 && !TYPE_OVERFLOW_SANITIZED (type
)
9758 && ((FLOAT_TYPE_P (type
)
9759 /* Avoid this transformation if B is a positive REAL_CST. */
9760 && (TREE_CODE (arg1
) != REAL_CST
9761 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
9762 || INTEGRAL_TYPE_P (type
)))
9763 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9764 fold_convert_loc (loc
, type
, arg0
),
9765 fold_convert_loc (loc
, type
,
9766 negate_expr (arg1
)));
9768 /* Fold &a[i] - &a[j] to i-j. */
9769 if (TREE_CODE (arg0
) == ADDR_EXPR
9770 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9771 && TREE_CODE (arg1
) == ADDR_EXPR
9772 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9774 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9775 TREE_OPERAND (arg0
, 0),
9776 TREE_OPERAND (arg1
, 0));
9781 if (FLOAT_TYPE_P (type
)
9782 && flag_unsafe_math_optimizations
9783 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9784 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9785 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9788 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9789 one. Make sure the type is not saturating and has the signedness of
9790 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9791 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9792 if ((TREE_CODE (arg0
) == MULT_EXPR
9793 || TREE_CODE (arg1
) == MULT_EXPR
)
9794 && !TYPE_SATURATING (type
)
9795 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9796 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9797 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9799 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9807 /* (-A) * (-B) -> A * B */
9808 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
9809 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9810 fold_convert_loc (loc
, type
,
9811 TREE_OPERAND (arg0
, 0)),
9812 fold_convert_loc (loc
, type
,
9813 negate_expr (arg1
)));
9814 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
9815 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9816 fold_convert_loc (loc
, type
,
9817 negate_expr (arg0
)),
9818 fold_convert_loc (loc
, type
,
9819 TREE_OPERAND (arg1
, 0)));
9821 if (! FLOAT_TYPE_P (type
))
9823 /* Transform x * -C into -x * C if x is easily negatable. */
9824 if (TREE_CODE (arg1
) == INTEGER_CST
9825 && tree_int_cst_sgn (arg1
) == -1
9826 && negate_expr_p (arg0
)
9827 && (tem
= negate_expr (arg1
)) != arg1
9828 && !TREE_OVERFLOW (tem
))
9829 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9830 fold_convert_loc (loc
, type
,
9831 negate_expr (arg0
)),
9834 /* (a * (1 << b)) is (a << b) */
9835 if (TREE_CODE (arg1
) == LSHIFT_EXPR
9836 && integer_onep (TREE_OPERAND (arg1
, 0)))
9837 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
9838 TREE_OPERAND (arg1
, 1));
9839 if (TREE_CODE (arg0
) == LSHIFT_EXPR
9840 && integer_onep (TREE_OPERAND (arg0
, 0)))
9841 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
9842 TREE_OPERAND (arg0
, 1));
9844 /* (A + A) * C -> A * 2 * C */
9845 if (TREE_CODE (arg0
) == PLUS_EXPR
9846 && TREE_CODE (arg1
) == INTEGER_CST
9847 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9848 TREE_OPERAND (arg0
, 1), 0))
9849 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9850 omit_one_operand_loc (loc
, type
,
9851 TREE_OPERAND (arg0
, 0),
9852 TREE_OPERAND (arg0
, 1)),
9853 fold_build2_loc (loc
, MULT_EXPR
, type
,
9854 build_int_cst (type
, 2) , arg1
));
9856 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9857 sign-changing only. */
9858 if (TREE_CODE (arg1
) == INTEGER_CST
9859 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
9860 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
9861 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9863 strict_overflow_p
= false;
9864 if (TREE_CODE (arg1
) == INTEGER_CST
9865 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9866 &strict_overflow_p
)))
9868 if (strict_overflow_p
)
9869 fold_overflow_warning (("assuming signed overflow does not "
9870 "occur when simplifying "
9872 WARN_STRICT_OVERFLOW_MISC
);
9873 return fold_convert_loc (loc
, type
, tem
);
9876 /* Optimize z * conj(z) for integer complex numbers. */
9877 if (TREE_CODE (arg0
) == CONJ_EXPR
9878 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9879 return fold_mult_zconjz (loc
, type
, arg1
);
9880 if (TREE_CODE (arg1
) == CONJ_EXPR
9881 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9882 return fold_mult_zconjz (loc
, type
, arg0
);
9886 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
9887 the result for floating point types due to rounding so it is applied
9888 only if -fassociative-math was specify. */
9889 if (flag_associative_math
9890 && TREE_CODE (arg0
) == RDIV_EXPR
9891 && TREE_CODE (arg1
) == REAL_CST
9892 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
9894 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
9897 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
9898 TREE_OPERAND (arg0
, 1));
9901 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9902 if (operand_equal_p (arg0
, arg1
, 0))
9904 tree tem
= fold_strip_sign_ops (arg0
);
9905 if (tem
!= NULL_TREE
)
9907 tem
= fold_convert_loc (loc
, type
, tem
);
9908 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
9912 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9913 This is not the same for NaNs or if signed zeros are
9915 if (!HONOR_NANS (arg0
)
9916 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9917 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9918 && TREE_CODE (arg1
) == COMPLEX_CST
9919 && real_zerop (TREE_REALPART (arg1
)))
9921 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9922 if (real_onep (TREE_IMAGPART (arg1
)))
9924 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9925 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9927 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9928 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9930 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9931 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9932 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9936 /* Optimize z * conj(z) for floating point complex numbers.
9937 Guarded by flag_unsafe_math_optimizations as non-finite
9938 imaginary components don't produce scalar results. */
9939 if (flag_unsafe_math_optimizations
9940 && TREE_CODE (arg0
) == CONJ_EXPR
9941 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9942 return fold_mult_zconjz (loc
, type
, arg1
);
9943 if (flag_unsafe_math_optimizations
9944 && TREE_CODE (arg1
) == CONJ_EXPR
9945 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9946 return fold_mult_zconjz (loc
, type
, arg0
);
9948 if (flag_unsafe_math_optimizations
)
9950 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
9951 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
9953 /* Optimizations of root(...)*root(...). */
9954 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
9957 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
9958 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
9960 /* Optimize sqrt(x)*sqrt(x) as x. */
9961 if (BUILTIN_SQRT_P (fcode0
)
9962 && operand_equal_p (arg00
, arg10
, 0)
9963 && ! HONOR_SNANS (element_mode (type
)))
9966 /* Optimize root(x)*root(y) as root(x*y). */
9967 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9968 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
9969 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
9972 /* Optimize expN(x)*expN(y) as expN(x+y). */
9973 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
9975 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9976 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
9977 CALL_EXPR_ARG (arg0
, 0),
9978 CALL_EXPR_ARG (arg1
, 0));
9979 return build_call_expr_loc (loc
, expfn
, 1, arg
);
9982 /* Optimizations of pow(...)*pow(...). */
9983 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
9984 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
9985 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
9987 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
9988 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
9989 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
9990 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
9992 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9993 if (operand_equal_p (arg01
, arg11
, 0))
9995 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9996 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
9998 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10001 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10002 if (operand_equal_p (arg00
, arg10
, 0))
10004 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10005 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10007 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10011 /* Optimize tan(x)*cos(x) as sin(x). */
10012 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10013 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10014 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10015 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10016 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10017 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10018 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10019 CALL_EXPR_ARG (arg1
, 0), 0))
10021 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10023 if (sinfn
!= NULL_TREE
)
10024 return build_call_expr_loc (loc
, sinfn
, 1,
10025 CALL_EXPR_ARG (arg0
, 0));
10028 /* Optimize x*pow(x,c) as pow(x,c+1). */
10029 if (fcode1
== BUILT_IN_POW
10030 || fcode1
== BUILT_IN_POWF
10031 || fcode1
== BUILT_IN_POWL
)
10033 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10034 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10035 if (TREE_CODE (arg11
) == REAL_CST
10036 && !TREE_OVERFLOW (arg11
)
10037 && operand_equal_p (arg0
, arg10
, 0))
10039 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10043 c
= TREE_REAL_CST (arg11
);
10044 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10045 arg
= build_real (type
, c
);
10046 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10050 /* Optimize pow(x,c)*x as pow(x,c+1). */
10051 if (fcode0
== BUILT_IN_POW
10052 || fcode0
== BUILT_IN_POWF
10053 || fcode0
== BUILT_IN_POWL
)
10055 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10056 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10057 if (TREE_CODE (arg01
) == REAL_CST
10058 && !TREE_OVERFLOW (arg01
)
10059 && operand_equal_p (arg1
, arg00
, 0))
10061 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10065 c
= TREE_REAL_CST (arg01
);
10066 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10067 arg
= build_real (type
, c
);
10068 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10072 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10073 if (!in_gimple_form
10075 && operand_equal_p (arg0
, arg1
, 0))
10077 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10081 tree arg
= build_real (type
, dconst2
);
10082 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10090 /* Canonicalize (X & C1) | C2. */
10091 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10092 && TREE_CODE (arg1
) == INTEGER_CST
10093 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10095 int width
= TYPE_PRECISION (type
), w
;
10096 wide_int c1
= TREE_OPERAND (arg0
, 1);
10097 wide_int c2
= arg1
;
10099 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10100 if ((c1
& c2
) == c1
)
10101 return omit_one_operand_loc (loc
, type
, arg1
,
10102 TREE_OPERAND (arg0
, 0));
10104 wide_int msk
= wi::mask (width
, false,
10105 TYPE_PRECISION (TREE_TYPE (arg1
)));
10107 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10108 if (msk
.and_not (c1
| c2
) == 0)
10109 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10110 TREE_OPERAND (arg0
, 0), arg1
);
10112 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10113 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10114 mode which allows further optimizations. */
10117 wide_int c3
= c1
.and_not (c2
);
10118 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10120 wide_int mask
= wi::mask (w
, false,
10121 TYPE_PRECISION (type
));
10122 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10130 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10131 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10132 TREE_OPERAND (arg0
, 0),
10133 wide_int_to_tree (type
,
10138 /* (X & ~Y) | (~X & Y) is X ^ Y */
10139 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10140 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10142 tree a0
, a1
, l0
, l1
, n0
, n1
;
10144 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10145 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10147 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10148 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10150 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10151 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10153 if ((operand_equal_p (n0
, a0
, 0)
10154 && operand_equal_p (n1
, a1
, 0))
10155 || (operand_equal_p (n0
, a1
, 0)
10156 && operand_equal_p (n1
, a0
, 0)))
10157 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10160 /* See if this can be simplified into a rotate first. If that
10161 is unsuccessful continue in the association code. */
10165 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10166 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10167 && INTEGRAL_TYPE_P (type
)
10168 && integer_onep (TREE_OPERAND (arg0
, 1))
10169 && integer_onep (arg1
))
10170 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10171 build_zero_cst (TREE_TYPE (arg0
)));
10173 /* See if this can be simplified into a rotate first. If that
10174 is unsuccessful continue in the association code. */
10178 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10179 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
10180 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10181 || (TREE_CODE (arg0
) == EQ_EXPR
10182 && integer_zerop (TREE_OPERAND (arg0
, 1))))
10183 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10184 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10186 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10187 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
10188 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10189 || (TREE_CODE (arg1
) == EQ_EXPR
10190 && integer_zerop (TREE_OPERAND (arg1
, 1))))
10191 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10192 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10194 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10195 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10196 && INTEGRAL_TYPE_P (type
)
10197 && integer_onep (TREE_OPERAND (arg0
, 1))
10198 && integer_onep (arg1
))
10201 tem
= TREE_OPERAND (arg0
, 0);
10202 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10203 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10205 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10206 build_zero_cst (TREE_TYPE (tem
)));
10208 /* Fold ~X & 1 as (X & 1) == 0. */
10209 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10210 && INTEGRAL_TYPE_P (type
)
10211 && integer_onep (arg1
))
10214 tem
= TREE_OPERAND (arg0
, 0);
10215 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10216 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10218 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10219 build_zero_cst (TREE_TYPE (tem
)));
10221 /* Fold !X & 1 as X == 0. */
10222 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10223 && integer_onep (arg1
))
10225 tem
= TREE_OPERAND (arg0
, 0);
10226 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
10227 build_zero_cst (TREE_TYPE (tem
)));
10230 /* Fold (X ^ Y) & Y as ~X & Y. */
10231 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10232 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10234 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10235 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10236 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10237 fold_convert_loc (loc
, type
, arg1
));
10239 /* Fold (X ^ Y) & X as ~Y & X. */
10240 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10241 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10242 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10244 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10245 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10246 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10247 fold_convert_loc (loc
, type
, arg1
));
10249 /* Fold X & (X ^ Y) as X & ~Y. */
10250 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10251 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10253 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10254 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10255 fold_convert_loc (loc
, type
, arg0
),
10256 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10258 /* Fold X & (Y ^ X) as ~Y & X. */
10259 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10260 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10261 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10263 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10264 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10265 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10266 fold_convert_loc (loc
, type
, arg0
));
10269 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10270 multiple of 1 << CST. */
10271 if (TREE_CODE (arg1
) == INTEGER_CST
)
10273 wide_int cst1
= arg1
;
10274 wide_int ncst1
= -cst1
;
10275 if ((cst1
& ncst1
) == ncst1
10276 && multiple_of_p (type
, arg0
,
10277 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10278 return fold_convert_loc (loc
, type
, arg0
);
10281 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10283 if (TREE_CODE (arg1
) == INTEGER_CST
10284 && TREE_CODE (arg0
) == MULT_EXPR
10285 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10287 wide_int warg1
= arg1
;
10288 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10291 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10293 else if (masked
!= warg1
)
10295 /* Avoid the transform if arg1 is a mask of some
10296 mode which allows further optimizations. */
10297 int pop
= wi::popcount (warg1
);
10298 if (!(pop
>= BITS_PER_UNIT
10299 && exact_log2 (pop
) != -1
10300 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10301 return fold_build2_loc (loc
, code
, type
, op0
,
10302 wide_int_to_tree (type
, masked
));
10306 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10307 ((A & N) + B) & M -> (A + B) & M
10308 Similarly if (N & M) == 0,
10309 ((A | N) + B) & M -> (A + B) & M
10310 and for - instead of + (or unary - instead of +)
10311 and/or ^ instead of |.
10312 If B is constant and (B & M) == 0, fold into A & M. */
10313 if (TREE_CODE (arg1
) == INTEGER_CST
)
10315 wide_int cst1
= arg1
;
10316 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10317 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10318 && (TREE_CODE (arg0
) == PLUS_EXPR
10319 || TREE_CODE (arg0
) == MINUS_EXPR
10320 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10321 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10322 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10328 /* Now we know that arg0 is (C + D) or (C - D) or
10329 -C and arg1 (M) is == (1LL << cst) - 1.
10330 Store C into PMOP[0] and D into PMOP[1]. */
10331 pmop
[0] = TREE_OPERAND (arg0
, 0);
10333 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10335 pmop
[1] = TREE_OPERAND (arg0
, 1);
10339 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10342 for (; which
>= 0; which
--)
10343 switch (TREE_CODE (pmop
[which
]))
10348 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10351 cst0
= TREE_OPERAND (pmop
[which
], 1);
10353 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10358 else if (cst0
!= 0)
10360 /* If C or D is of the form (A & N) where
10361 (N & M) == M, or of the form (A | N) or
10362 (A ^ N) where (N & M) == 0, replace it with A. */
10363 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10366 /* If C or D is a N where (N & M) == 0, it can be
10367 omitted (assumed 0). */
10368 if ((TREE_CODE (arg0
) == PLUS_EXPR
10369 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10370 && (cst1
& pmop
[which
]) == 0)
10371 pmop
[which
] = NULL
;
10377 /* Only build anything new if we optimized one or both arguments
10379 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10380 || (TREE_CODE (arg0
) != NEGATE_EXPR
10381 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10383 tree utype
= TREE_TYPE (arg0
);
10384 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10386 /* Perform the operations in a type that has defined
10387 overflow behavior. */
10388 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10389 if (pmop
[0] != NULL
)
10390 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10391 if (pmop
[1] != NULL
)
10392 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10395 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10396 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10397 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10399 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10400 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10402 else if (pmop
[0] != NULL
)
10404 else if (pmop
[1] != NULL
)
10407 return build_int_cst (type
, 0);
10409 else if (pmop
[0] == NULL
)
10410 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10412 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10414 /* TEM is now the new binary +, - or unary - replacement. */
10415 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10416 fold_convert_loc (loc
, utype
, arg1
));
10417 return fold_convert_loc (loc
, type
, tem
);
10422 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10423 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10424 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10426 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10428 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10431 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10437 /* Don't touch a floating-point divide by zero unless the mode
10438 of the constant can represent infinity. */
10439 if (TREE_CODE (arg1
) == REAL_CST
10440 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10441 && real_zerop (arg1
))
10444 /* (-A) / (-B) -> A / B */
10445 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10446 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10447 TREE_OPERAND (arg0
, 0),
10448 negate_expr (arg1
));
10449 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10450 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10451 negate_expr (arg0
),
10452 TREE_OPERAND (arg1
, 0));
10454 /* Convert A/B/C to A/(B*C). */
10455 if (flag_reciprocal_math
10456 && TREE_CODE (arg0
) == RDIV_EXPR
)
10457 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10458 fold_build2_loc (loc
, MULT_EXPR
, type
,
10459 TREE_OPERAND (arg0
, 1), arg1
));
10461 /* Convert A/(B/C) to (A/B)*C. */
10462 if (flag_reciprocal_math
10463 && TREE_CODE (arg1
) == RDIV_EXPR
)
10464 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10465 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
10466 TREE_OPERAND (arg1
, 0)),
10467 TREE_OPERAND (arg1
, 1));
10469 /* Convert C1/(X*C2) into (C1/C2)/X. */
10470 if (flag_reciprocal_math
10471 && TREE_CODE (arg1
) == MULT_EXPR
10472 && TREE_CODE (arg0
) == REAL_CST
10473 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10475 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10476 TREE_OPERAND (arg1
, 1));
10478 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10479 TREE_OPERAND (arg1
, 0));
10482 if (flag_unsafe_math_optimizations
)
10484 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10485 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10487 /* Optimize sin(x)/cos(x) as tan(x). */
10488 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10489 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10490 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10491 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10492 CALL_EXPR_ARG (arg1
, 0), 0))
10494 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10496 if (tanfn
!= NULL_TREE
)
10497 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10500 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10501 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10502 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10503 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10504 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10505 CALL_EXPR_ARG (arg1
, 0), 0))
10507 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10509 if (tanfn
!= NULL_TREE
)
10511 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
10512 CALL_EXPR_ARG (arg0
, 0));
10513 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10514 build_real (type
, dconst1
), tmp
);
10518 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10519 NaNs or Infinities. */
10520 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10521 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10522 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
10524 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10525 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10527 if (! HONOR_NANS (arg00
)
10528 && ! HONOR_INFINITIES (element_mode (arg00
))
10529 && operand_equal_p (arg00
, arg01
, 0))
10531 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10533 if (cosfn
!= NULL_TREE
)
10534 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
10538 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10539 NaNs or Infinities. */
10540 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
10541 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
10542 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
10544 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10545 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10547 if (! HONOR_NANS (arg00
)
10548 && ! HONOR_INFINITIES (element_mode (arg00
))
10549 && operand_equal_p (arg00
, arg01
, 0))
10551 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10553 if (cosfn
!= NULL_TREE
)
10555 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
10556 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10557 build_real (type
, dconst1
),
10563 /* Optimize pow(x,c)/x as pow(x,c-1). */
10564 if (fcode0
== BUILT_IN_POW
10565 || fcode0
== BUILT_IN_POWF
10566 || fcode0
== BUILT_IN_POWL
)
10568 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10569 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10570 if (TREE_CODE (arg01
) == REAL_CST
10571 && !TREE_OVERFLOW (arg01
)
10572 && operand_equal_p (arg1
, arg00
, 0))
10574 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10578 c
= TREE_REAL_CST (arg01
);
10579 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
10580 arg
= build_real (type
, c
);
10581 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10585 /* Optimize a/root(b/c) into a*root(c/b). */
10586 if (BUILTIN_ROOT_P (fcode1
))
10588 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
10590 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
10592 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10593 tree b
= TREE_OPERAND (rootarg
, 0);
10594 tree c
= TREE_OPERAND (rootarg
, 1);
10596 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
10598 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
10599 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
10603 /* Optimize x/expN(y) into x*expN(-y). */
10604 if (BUILTIN_EXPONENT_P (fcode1
))
10606 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10607 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
10608 arg1
= build_call_expr_loc (loc
,
10610 fold_convert_loc (loc
, type
, arg
));
10611 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
10614 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10615 if (fcode1
== BUILT_IN_POW
10616 || fcode1
== BUILT_IN_POWF
10617 || fcode1
== BUILT_IN_POWL
)
10619 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10620 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10621 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10622 tree neg11
= fold_convert_loc (loc
, type
,
10623 negate_expr (arg11
));
10624 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
10625 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
10630 case TRUNC_DIV_EXPR
:
10631 /* Optimize (X & (-A)) / A where A is a power of 2,
10633 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10634 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
10635 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
10637 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
10638 arg1
, TREE_OPERAND (arg0
, 1));
10639 if (sum
&& integer_zerop (sum
)) {
10640 tree pow2
= build_int_cst (integer_type_node
,
10641 wi::exact_log2 (arg1
));
10642 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10643 TREE_OPERAND (arg0
, 0), pow2
);
10649 case FLOOR_DIV_EXPR
:
10650 /* Simplify A / (B << N) where A and B are positive and B is
10651 a power of 2, to A >> (N + log2(B)). */
10652 strict_overflow_p
= false;
10653 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10654 && (TYPE_UNSIGNED (type
)
10655 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10657 tree sval
= TREE_OPERAND (arg1
, 0);
10658 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10660 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10661 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10662 wi::exact_log2 (sval
));
10664 if (strict_overflow_p
)
10665 fold_overflow_warning (("assuming signed overflow does not "
10666 "occur when simplifying A / (B << N)"),
10667 WARN_STRICT_OVERFLOW_MISC
);
10669 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10671 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10672 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10678 case ROUND_DIV_EXPR
:
10679 case CEIL_DIV_EXPR
:
10680 case EXACT_DIV_EXPR
:
10681 if (integer_zerop (arg1
))
10684 /* Convert -A / -B to A / B when the type is signed and overflow is
10686 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10687 && TREE_CODE (arg0
) == NEGATE_EXPR
10688 && negate_expr_p (arg1
))
10690 if (INTEGRAL_TYPE_P (type
))
10691 fold_overflow_warning (("assuming signed overflow does not occur "
10692 "when distributing negation across "
10694 WARN_STRICT_OVERFLOW_MISC
);
10695 return fold_build2_loc (loc
, code
, type
,
10696 fold_convert_loc (loc
, type
,
10697 TREE_OPERAND (arg0
, 0)),
10698 fold_convert_loc (loc
, type
,
10699 negate_expr (arg1
)));
10701 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10702 && TREE_CODE (arg1
) == NEGATE_EXPR
10703 && negate_expr_p (arg0
))
10705 if (INTEGRAL_TYPE_P (type
))
10706 fold_overflow_warning (("assuming signed overflow does not occur "
10707 "when distributing negation across "
10709 WARN_STRICT_OVERFLOW_MISC
);
10710 return fold_build2_loc (loc
, code
, type
,
10711 fold_convert_loc (loc
, type
,
10712 negate_expr (arg0
)),
10713 fold_convert_loc (loc
, type
,
10714 TREE_OPERAND (arg1
, 0)));
10717 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10718 operation, EXACT_DIV_EXPR.
10720 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10721 At one time others generated faster code, it's not clear if they do
10722 after the last round to changes to the DIV code in expmed.c. */
10723 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10724 && multiple_of_p (type
, arg0
, arg1
))
10725 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10726 fold_convert (type
, arg0
),
10727 fold_convert (type
, arg1
));
10729 strict_overflow_p
= false;
10730 if (TREE_CODE (arg1
) == INTEGER_CST
10731 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10732 &strict_overflow_p
)))
10734 if (strict_overflow_p
)
10735 fold_overflow_warning (("assuming signed overflow does not occur "
10736 "when simplifying division"),
10737 WARN_STRICT_OVERFLOW_MISC
);
10738 return fold_convert_loc (loc
, type
, tem
);
10743 case CEIL_MOD_EXPR
:
10744 case FLOOR_MOD_EXPR
:
10745 case ROUND_MOD_EXPR
:
10746 case TRUNC_MOD_EXPR
:
10747 strict_overflow_p
= false;
10748 if (TREE_CODE (arg1
) == INTEGER_CST
10749 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10750 &strict_overflow_p
)))
10752 if (strict_overflow_p
)
10753 fold_overflow_warning (("assuming signed overflow does not occur "
10754 "when simplifying modulus"),
10755 WARN_STRICT_OVERFLOW_MISC
);
10756 return fold_convert_loc (loc
, type
, tem
);
10765 /* Since negative shift count is not well-defined,
10766 don't try to compute it in the compiler. */
10767 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10770 prec
= element_precision (type
);
10772 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10773 into x & ((unsigned)-1 >> c) for unsigned types. */
10774 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
10775 || (TYPE_UNSIGNED (type
)
10776 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
10777 && tree_fits_uhwi_p (arg1
)
10778 && tree_to_uhwi (arg1
) < prec
10779 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
10780 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
10782 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
10783 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
10789 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10791 lshift
= build_minus_one_cst (type
);
10792 lshift
= const_binop (code
, lshift
, arg1
);
10794 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
10798 /* If we have a rotate of a bit operation with the rotate count and
10799 the second operand of the bit operation both constant,
10800 permute the two operations. */
10801 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10802 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10803 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10804 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10805 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10806 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10807 fold_build2_loc (loc
, code
, type
,
10808 TREE_OPERAND (arg0
, 0), arg1
),
10809 fold_build2_loc (loc
, code
, type
,
10810 TREE_OPERAND (arg0
, 1), arg1
));
10812 /* Two consecutive rotates adding up to the some integer
10813 multiple of the precision of the type can be ignored. */
10814 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10815 && TREE_CODE (arg0
) == RROTATE_EXPR
10816 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10817 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
10819 return TREE_OPERAND (arg0
, 0);
10824 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
10830 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
10835 case TRUTH_ANDIF_EXPR
:
10836 /* Note that the operands of this must be ints
10837 and their values must be 0 or 1.
10838 ("true" is a fixed value perhaps depending on the language.) */
10839 /* If first arg is constant zero, return it. */
10840 if (integer_zerop (arg0
))
10841 return fold_convert_loc (loc
, type
, arg0
);
10842 case TRUTH_AND_EXPR
:
10843 /* If either arg is constant true, drop it. */
10844 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10845 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10846 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10847 /* Preserve sequence points. */
10848 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10849 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10850 /* If second arg is constant zero, result is zero, but first arg
10851 must be evaluated. */
10852 if (integer_zerop (arg1
))
10853 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10854 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10855 case will be handled here. */
10856 if (integer_zerop (arg0
))
10857 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10859 /* !X && X is always false. */
10860 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10862 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10863 /* X && !X is always false. */
10864 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10865 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10866 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10868 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10869 means A >= Y && A != MAX, but in this case we know that
10872 if (!TREE_SIDE_EFFECTS (arg0
)
10873 && !TREE_SIDE_EFFECTS (arg1
))
10875 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10876 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10877 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10879 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10880 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10881 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10884 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10890 case TRUTH_ORIF_EXPR
:
10891 /* Note that the operands of this must be ints
10892 and their values must be 0 or true.
10893 ("true" is a fixed value perhaps depending on the language.) */
10894 /* If first arg is constant true, return it. */
10895 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10896 return fold_convert_loc (loc
, type
, arg0
);
10897 case TRUTH_OR_EXPR
:
10898 /* If either arg is constant zero, drop it. */
10899 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10900 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10901 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10902 /* Preserve sequence points. */
10903 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10904 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10905 /* If second arg is constant true, result is true, but we must
10906 evaluate first arg. */
10907 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10908 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10909 /* Likewise for first arg, but note this only occurs here for
10911 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10912 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10914 /* !X || X is always true. */
10915 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10916 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10917 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10918 /* X || !X is always true. */
10919 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10920 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10921 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10923 /* (X && !Y) || (!X && Y) is X ^ Y */
10924 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10925 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10927 tree a0
, a1
, l0
, l1
, n0
, n1
;
10929 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10930 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10932 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10933 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10935 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10936 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10938 if ((operand_equal_p (n0
, a0
, 0)
10939 && operand_equal_p (n1
, a1
, 0))
10940 || (operand_equal_p (n0
, a1
, 0)
10941 && operand_equal_p (n1
, a0
, 0)))
10942 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10945 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10951 case TRUTH_XOR_EXPR
:
10952 /* If the second arg is constant zero, drop it. */
10953 if (integer_zerop (arg1
))
10954 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10955 /* If the second arg is constant true, this is a logical inversion. */
10956 if (integer_onep (arg1
))
10958 tem
= invert_truthvalue_loc (loc
, arg0
);
10959 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10961 /* Identical arguments cancel to zero. */
10962 if (operand_equal_p (arg0
, arg1
, 0))
10963 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10965 /* !X ^ X is always true. */
10966 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10968 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10970 /* X ^ !X is always true. */
10971 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10972 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10973 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10982 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10983 if (tem
!= NULL_TREE
)
10986 /* bool_var != 1 becomes !bool_var. */
10987 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10988 && code
== NE_EXPR
)
10989 return fold_convert_loc (loc
, type
,
10990 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10991 TREE_TYPE (arg0
), arg0
));
10993 /* bool_var == 0 becomes !bool_var. */
10994 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10995 && code
== EQ_EXPR
)
10996 return fold_convert_loc (loc
, type
,
10997 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10998 TREE_TYPE (arg0
), arg0
));
11000 /* !exp != 0 becomes !exp */
11001 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
11002 && code
== NE_EXPR
)
11003 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11005 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
11006 if ((TREE_CODE (arg0
) == PLUS_EXPR
11007 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
11008 || TREE_CODE (arg0
) == MINUS_EXPR
)
11009 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
11012 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11013 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11015 tree val
= TREE_OPERAND (arg0
, 1);
11016 return omit_two_operands_loc (loc
, type
,
11017 fold_build2_loc (loc
, code
, type
,
11019 build_int_cst (TREE_TYPE (val
),
11021 TREE_OPERAND (arg0
, 0), arg1
);
11024 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
11025 if (TREE_CODE (arg0
) == MINUS_EXPR
11026 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
11027 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
11030 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
11032 return omit_two_operands_loc (loc
, type
,
11034 ? boolean_true_node
: boolean_false_node
,
11035 TREE_OPERAND (arg0
, 1), arg1
);
11038 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11039 if (TREE_CODE (arg0
) == ABS_EXPR
11040 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11041 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11043 /* If this is an EQ or NE comparison with zero and ARG0 is
11044 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11045 two operations, but the latter can be done in one less insn
11046 on machines that have only two-operand insns or on which a
11047 constant cannot be the first operand. */
11048 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11049 && integer_zerop (arg1
))
11051 tree arg00
= TREE_OPERAND (arg0
, 0);
11052 tree arg01
= TREE_OPERAND (arg0
, 1);
11053 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11054 && integer_onep (TREE_OPERAND (arg00
, 0)))
11056 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
11057 arg01
, TREE_OPERAND (arg00
, 1));
11058 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11059 build_int_cst (TREE_TYPE (arg0
), 1));
11060 return fold_build2_loc (loc
, code
, type
,
11061 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11064 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11065 && integer_onep (TREE_OPERAND (arg01
, 0)))
11067 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
11068 arg00
, TREE_OPERAND (arg01
, 1));
11069 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11070 build_int_cst (TREE_TYPE (arg0
), 1));
11071 return fold_build2_loc (loc
, code
, type
,
11072 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11077 /* If this is an NE or EQ comparison of zero against the result of a
11078 signed MOD operation whose second operand is a power of 2, make
11079 the MOD operation unsigned since it is simpler and equivalent. */
11080 if (integer_zerop (arg1
)
11081 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11082 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11083 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11084 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11085 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11086 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11088 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
11089 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
11090 fold_convert_loc (loc
, newtype
,
11091 TREE_OPERAND (arg0
, 0)),
11092 fold_convert_loc (loc
, newtype
,
11093 TREE_OPERAND (arg0
, 1)));
11095 return fold_build2_loc (loc
, code
, type
, newmod
,
11096 fold_convert_loc (loc
, newtype
, arg1
));
11099 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11100 C1 is a valid shift constant, and C2 is a power of two, i.e.
11102 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11103 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11104 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11106 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11107 && integer_zerop (arg1
))
11109 tree itype
= TREE_TYPE (arg0
);
11110 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11111 prec
= TYPE_PRECISION (itype
);
11113 /* Check for a valid shift count. */
11114 if (wi::ltu_p (arg001
, prec
))
11116 tree arg01
= TREE_OPERAND (arg0
, 1);
11117 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11118 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11119 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11120 can be rewritten as (X & (C2 << C1)) != 0. */
11121 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11123 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
11124 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
11125 return fold_build2_loc (loc
, code
, type
, tem
,
11126 fold_convert_loc (loc
, itype
, arg1
));
11128 /* Otherwise, for signed (arithmetic) shifts,
11129 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11130 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11131 else if (!TYPE_UNSIGNED (itype
))
11132 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11133 arg000
, build_int_cst (itype
, 0));
11134 /* Otherwise, of unsigned (logical) shifts,
11135 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11136 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11138 return omit_one_operand_loc (loc
, type
,
11139 code
== EQ_EXPR
? integer_one_node
11140 : integer_zero_node
,
11145 /* If we have (A & C) == C where C is a power of 2, convert this into
11146 (A & C) != 0. Similarly for NE_EXPR. */
11147 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11148 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11149 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11150 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11151 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
11152 integer_zero_node
));
11154 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11155 bit, then fold the expression into A < 0 or A >= 0. */
11156 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
11160 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11161 Similarly for NE_EXPR. */
11162 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11163 && TREE_CODE (arg1
) == INTEGER_CST
11164 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11166 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
11167 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11168 TREE_OPERAND (arg0
, 1));
11170 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11171 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
11173 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11174 if (integer_nonzerop (dandnotc
))
11175 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
11178 /* If this is a comparison of a field, we may be able to simplify it. */
11179 if ((TREE_CODE (arg0
) == COMPONENT_REF
11180 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11181 /* Handle the constant case even without -O
11182 to make sure the warnings are given. */
11183 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11185 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
11190 /* Optimize comparisons of strlen vs zero to a compare of the
11191 first character of the string vs zero. To wit,
11192 strlen(ptr) == 0 => *ptr == 0
11193 strlen(ptr) != 0 => *ptr != 0
11194 Other cases should reduce to one of these two (or a constant)
11195 due to the return value of strlen being unsigned. */
11196 if (TREE_CODE (arg0
) == CALL_EXPR
11197 && integer_zerop (arg1
))
11199 tree fndecl
= get_callee_fndecl (arg0
);
11202 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11203 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11204 && call_expr_nargs (arg0
) == 1
11205 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
11207 tree iref
= build_fold_indirect_ref_loc (loc
,
11208 CALL_EXPR_ARG (arg0
, 0));
11209 return fold_build2_loc (loc
, code
, type
, iref
,
11210 build_int_cst (TREE_TYPE (iref
), 0));
11214 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11215 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11216 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11217 && integer_zerop (arg1
)
11218 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11220 tree arg00
= TREE_OPERAND (arg0
, 0);
11221 tree arg01
= TREE_OPERAND (arg0
, 1);
11222 tree itype
= TREE_TYPE (arg00
);
11223 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
11225 if (TYPE_UNSIGNED (itype
))
11227 itype
= signed_type_for (itype
);
11228 arg00
= fold_convert_loc (loc
, itype
, arg00
);
11230 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11231 type
, arg00
, build_zero_cst (itype
));
11235 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11236 (X & C) == 0 when C is a single bit. */
11237 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11238 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11239 && integer_zerop (arg1
)
11240 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11242 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11243 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11244 TREE_OPERAND (arg0
, 1));
11245 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11247 fold_convert_loc (loc
, TREE_TYPE (arg0
),
11251 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11252 constant C is a power of two, i.e. a single bit. */
11253 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11254 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11255 && integer_zerop (arg1
)
11256 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11257 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11258 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11260 tree arg00
= TREE_OPERAND (arg0
, 0);
11261 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11262 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11265 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11266 when is C is a power of two, i.e. a single bit. */
11267 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11268 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11269 && integer_zerop (arg1
)
11270 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11271 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11272 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11274 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11275 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
11276 arg000
, TREE_OPERAND (arg0
, 1));
11277 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11278 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11281 if (integer_zerop (arg1
)
11282 && tree_expr_nonzero_p (arg0
))
11284 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11285 return omit_one_operand_loc (loc
, type
, res
, arg0
);
11288 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11289 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11290 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11292 tree arg00
= TREE_OPERAND (arg0
, 0);
11293 tree arg01
= TREE_OPERAND (arg0
, 1);
11294 tree arg10
= TREE_OPERAND (arg1
, 0);
11295 tree arg11
= TREE_OPERAND (arg1
, 1);
11296 tree itype
= TREE_TYPE (arg0
);
11298 if (operand_equal_p (arg01
, arg11
, 0))
11299 return fold_build2_loc (loc
, code
, type
,
11300 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11301 fold_build2_loc (loc
,
11302 BIT_XOR_EXPR
, itype
,
11305 build_zero_cst (itype
));
11307 if (operand_equal_p (arg01
, arg10
, 0))
11308 return fold_build2_loc (loc
, code
, type
,
11309 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11310 fold_build2_loc (loc
,
11311 BIT_XOR_EXPR
, itype
,
11314 build_zero_cst (itype
));
11316 if (operand_equal_p (arg00
, arg11
, 0))
11317 return fold_build2_loc (loc
, code
, type
,
11318 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11319 fold_build2_loc (loc
,
11320 BIT_XOR_EXPR
, itype
,
11323 build_zero_cst (itype
));
11325 if (operand_equal_p (arg00
, arg10
, 0))
11326 return fold_build2_loc (loc
, code
, type
,
11327 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11328 fold_build2_loc (loc
,
11329 BIT_XOR_EXPR
, itype
,
11332 build_zero_cst (itype
));
11335 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11336 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11338 tree arg00
= TREE_OPERAND (arg0
, 0);
11339 tree arg01
= TREE_OPERAND (arg0
, 1);
11340 tree arg10
= TREE_OPERAND (arg1
, 0);
11341 tree arg11
= TREE_OPERAND (arg1
, 1);
11342 tree itype
= TREE_TYPE (arg0
);
11344 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11345 operand_equal_p guarantees no side-effects so we don't need
11346 to use omit_one_operand on Z. */
11347 if (operand_equal_p (arg01
, arg11
, 0))
11348 return fold_build2_loc (loc
, code
, type
, arg00
,
11349 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11351 if (operand_equal_p (arg01
, arg10
, 0))
11352 return fold_build2_loc (loc
, code
, type
, arg00
,
11353 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11355 if (operand_equal_p (arg00
, arg11
, 0))
11356 return fold_build2_loc (loc
, code
, type
, arg01
,
11357 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11359 if (operand_equal_p (arg00
, arg10
, 0))
11360 return fold_build2_loc (loc
, code
, type
, arg01
,
11361 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11364 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11365 if (TREE_CODE (arg01
) == INTEGER_CST
11366 && TREE_CODE (arg11
) == INTEGER_CST
)
11368 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
11369 fold_convert_loc (loc
, itype
, arg11
));
11370 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
11371 return fold_build2_loc (loc
, code
, type
, tem
,
11372 fold_convert_loc (loc
, itype
, arg10
));
11376 /* Attempt to simplify equality/inequality comparisons of complex
11377 values. Only lower the comparison if the result is known or
11378 can be simplified to a single scalar comparison. */
11379 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
11380 || TREE_CODE (arg0
) == COMPLEX_CST
)
11381 && (TREE_CODE (arg1
) == COMPLEX_EXPR
11382 || TREE_CODE (arg1
) == COMPLEX_CST
))
11384 tree real0
, imag0
, real1
, imag1
;
11387 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
11389 real0
= TREE_OPERAND (arg0
, 0);
11390 imag0
= TREE_OPERAND (arg0
, 1);
11394 real0
= TREE_REALPART (arg0
);
11395 imag0
= TREE_IMAGPART (arg0
);
11398 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
11400 real1
= TREE_OPERAND (arg1
, 0);
11401 imag1
= TREE_OPERAND (arg1
, 1);
11405 real1
= TREE_REALPART (arg1
);
11406 imag1
= TREE_IMAGPART (arg1
);
11409 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
11410 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
11412 if (integer_zerop (rcond
))
11414 if (code
== EQ_EXPR
)
11415 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11417 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
11421 if (code
== NE_EXPR
)
11422 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11424 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11428 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11429 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11431 if (integer_zerop (icond
))
11433 if (code
== EQ_EXPR
)
11434 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11436 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11440 if (code
== NE_EXPR
)
11441 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11443 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11454 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11455 if (tem
!= NULL_TREE
)
11458 /* Transform comparisons of the form X +- C CMP X. */
11459 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11460 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11461 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11462 && !HONOR_SNANS (arg0
))
11463 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11464 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11466 tree arg01
= TREE_OPERAND (arg0
, 1);
11467 enum tree_code code0
= TREE_CODE (arg0
);
11470 if (TREE_CODE (arg01
) == REAL_CST
)
11471 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11473 is_positive
= tree_int_cst_sgn (arg01
);
11475 /* (X - c) > X becomes false. */
11476 if (code
== GT_EXPR
11477 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11478 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11480 if (TREE_CODE (arg01
) == INTEGER_CST
11481 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11482 fold_overflow_warning (("assuming signed overflow does not "
11483 "occur when assuming that (X - c) > X "
11484 "is always false"),
11485 WARN_STRICT_OVERFLOW_ALL
);
11486 return constant_boolean_node (0, type
);
11489 /* Likewise (X + c) < X becomes false. */
11490 if (code
== LT_EXPR
11491 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11492 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11494 if (TREE_CODE (arg01
) == INTEGER_CST
11495 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11496 fold_overflow_warning (("assuming signed overflow does not "
11497 "occur when assuming that "
11498 "(X + c) < X is always false"),
11499 WARN_STRICT_OVERFLOW_ALL
);
11500 return constant_boolean_node (0, type
);
11503 /* Convert (X - c) <= X to true. */
11504 if (!HONOR_NANS (arg1
)
11506 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11507 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11509 if (TREE_CODE (arg01
) == INTEGER_CST
11510 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11511 fold_overflow_warning (("assuming signed overflow does not "
11512 "occur when assuming that "
11513 "(X - c) <= X is always true"),
11514 WARN_STRICT_OVERFLOW_ALL
);
11515 return constant_boolean_node (1, type
);
11518 /* Convert (X + c) >= X to true. */
11519 if (!HONOR_NANS (arg1
)
11521 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11522 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11524 if (TREE_CODE (arg01
) == INTEGER_CST
11525 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11526 fold_overflow_warning (("assuming signed overflow does not "
11527 "occur when assuming that "
11528 "(X + c) >= X is always true"),
11529 WARN_STRICT_OVERFLOW_ALL
);
11530 return constant_boolean_node (1, type
);
11533 if (TREE_CODE (arg01
) == INTEGER_CST
)
11535 /* Convert X + c > X and X - c < X to true for integers. */
11536 if (code
== GT_EXPR
11537 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11538 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11540 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11541 fold_overflow_warning (("assuming signed overflow does "
11542 "not occur when assuming that "
11543 "(X + c) > X is always true"),
11544 WARN_STRICT_OVERFLOW_ALL
);
11545 return constant_boolean_node (1, type
);
11548 if (code
== LT_EXPR
11549 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11550 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11552 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11553 fold_overflow_warning (("assuming signed overflow does "
11554 "not occur when assuming that "
11555 "(X - c) < X is always true"),
11556 WARN_STRICT_OVERFLOW_ALL
);
11557 return constant_boolean_node (1, type
);
11560 /* Convert X + c <= X and X - c >= X to false for integers. */
11561 if (code
== LE_EXPR
11562 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11563 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11565 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11566 fold_overflow_warning (("assuming signed overflow does "
11567 "not occur when assuming that "
11568 "(X + c) <= X is always false"),
11569 WARN_STRICT_OVERFLOW_ALL
);
11570 return constant_boolean_node (0, type
);
11573 if (code
== GE_EXPR
11574 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11575 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11577 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11578 fold_overflow_warning (("assuming signed overflow does "
11579 "not occur when assuming that "
11580 "(X - c) >= X is always false"),
11581 WARN_STRICT_OVERFLOW_ALL
);
11582 return constant_boolean_node (0, type
);
11587 /* If we are comparing an ABS_EXPR with a constant, we can
11588 convert all the cases into explicit comparisons, but they may
11589 well not be faster than doing the ABS and one comparison.
11590 But ABS (X) <= C is a range comparison, which becomes a subtraction
11591 and a comparison, and is probably faster. */
11592 if (code
== LE_EXPR
11593 && TREE_CODE (arg1
) == INTEGER_CST
11594 && TREE_CODE (arg0
) == ABS_EXPR
11595 && ! TREE_SIDE_EFFECTS (arg0
)
11596 && (0 != (tem
= negate_expr (arg1
)))
11597 && TREE_CODE (tem
) == INTEGER_CST
11598 && !TREE_OVERFLOW (tem
))
11599 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11600 build2 (GE_EXPR
, type
,
11601 TREE_OPERAND (arg0
, 0), tem
),
11602 build2 (LE_EXPR
, type
,
11603 TREE_OPERAND (arg0
, 0), arg1
));
11605 /* Convert ABS_EXPR<x> >= 0 to true. */
11606 strict_overflow_p
= false;
11607 if (code
== GE_EXPR
11608 && (integer_zerop (arg1
)
11609 || (! HONOR_NANS (arg0
)
11610 && real_zerop (arg1
)))
11611 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11613 if (strict_overflow_p
)
11614 fold_overflow_warning (("assuming signed overflow does not occur "
11615 "when simplifying comparison of "
11616 "absolute value and zero"),
11617 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11618 return omit_one_operand_loc (loc
, type
,
11619 constant_boolean_node (true, type
),
11623 /* Convert ABS_EXPR<x> < 0 to false. */
11624 strict_overflow_p
= false;
11625 if (code
== LT_EXPR
11626 && (integer_zerop (arg1
) || real_zerop (arg1
))
11627 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11629 if (strict_overflow_p
)
11630 fold_overflow_warning (("assuming signed overflow does not occur "
11631 "when simplifying comparison of "
11632 "absolute value and zero"),
11633 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11634 return omit_one_operand_loc (loc
, type
,
11635 constant_boolean_node (false, type
),
11639 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11640 and similarly for >= into !=. */
11641 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11642 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11643 && TREE_CODE (arg1
) == LSHIFT_EXPR
11644 && integer_onep (TREE_OPERAND (arg1
, 0)))
11645 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11646 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11647 TREE_OPERAND (arg1
, 1)),
11648 build_zero_cst (TREE_TYPE (arg0
)));
11650 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11651 otherwise Y might be >= # of bits in X's type and thus e.g.
11652 (unsigned char) (1 << Y) for Y 15 might be 0.
11653 If the cast is widening, then 1 << Y should have unsigned type,
11654 otherwise if Y is number of bits in the signed shift type minus 1,
11655 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11656 31 might be 0xffffffff80000000. */
11657 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11658 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11659 && CONVERT_EXPR_P (arg1
)
11660 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11661 && (element_precision (TREE_TYPE (arg1
))
11662 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11663 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11664 || (element_precision (TREE_TYPE (arg1
))
11665 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11666 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11668 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11669 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11670 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11671 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11672 build_zero_cst (TREE_TYPE (arg0
)));
11677 case UNORDERED_EXPR
:
11685 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11687 tree targ0
= strip_float_extensions (arg0
);
11688 tree targ1
= strip_float_extensions (arg1
);
11689 tree newtype
= TREE_TYPE (targ0
);
11691 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11692 newtype
= TREE_TYPE (targ1
);
11694 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11695 return fold_build2_loc (loc
, code
, type
,
11696 fold_convert_loc (loc
, newtype
, targ0
),
11697 fold_convert_loc (loc
, newtype
, targ1
));
11702 case COMPOUND_EXPR
:
11703 /* When pedantic, a compound expression can be neither an lvalue
11704 nor an integer constant expression. */
11705 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11707 /* Don't let (0, 0) be null pointer constant. */
11708 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11709 : fold_convert_loc (loc
, type
, arg1
);
11710 return pedantic_non_lvalue_loc (loc
, tem
);
11713 /* An ASSERT_EXPR should never be passed to fold_binary. */
11714 gcc_unreachable ();
11718 } /* switch (code) */
11721 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11722 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11726 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11728 switch (TREE_CODE (*tp
))
11734 *walk_subtrees
= 0;
11736 /* ... fall through ... */
11743 /* Return whether the sub-tree ST contains a label which is accessible from
11744 outside the sub-tree. */
11747 contains_label_p (tree st
)
11750 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11753 /* Fold a ternary expression of code CODE and type TYPE with operands
11754 OP0, OP1, and OP2. Return the folded expression if folding is
11755 successful. Otherwise, return NULL_TREE. */
11758 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11759 tree op0
, tree op1
, tree op2
)
11762 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11763 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11765 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11766 && TREE_CODE_LENGTH (code
) == 3);
11768 /* If this is a commutative operation, and OP0 is a constant, move it
11769 to OP1 to reduce the number of tests below. */
11770 if (commutative_ternary_tree_code (code
)
11771 && tree_swap_operands_p (op0
, op1
, true))
11772 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11774 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11778 /* Strip any conversions that don't change the mode. This is safe
11779 for every expression, except for a comparison expression because
11780 its signedness is derived from its operands. So, in the latter
11781 case, only strip conversions that don't change the signedness.
11783 Note that this is done as an internal manipulation within the
11784 constant folder, in order to find the simplest representation of
11785 the arguments so that their form can be studied. In any cases,
11786 the appropriate type conversions should be put back in the tree
11787 that will get out of the constant folder. */
11808 case COMPONENT_REF
:
11809 if (TREE_CODE (arg0
) == CONSTRUCTOR
11810 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11812 unsigned HOST_WIDE_INT idx
;
11814 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11821 case VEC_COND_EXPR
:
11822 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11823 so all simple results must be passed through pedantic_non_lvalue. */
11824 if (TREE_CODE (arg0
) == INTEGER_CST
)
11826 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11827 tem
= integer_zerop (arg0
) ? op2
: op1
;
11828 /* Only optimize constant conditions when the selected branch
11829 has the same type as the COND_EXPR. This avoids optimizing
11830 away "c ? x : throw", where the throw has a void type.
11831 Avoid throwing away that operand which contains label. */
11832 if ((!TREE_SIDE_EFFECTS (unused_op
)
11833 || !contains_label_p (unused_op
))
11834 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11835 || VOID_TYPE_P (type
)))
11836 return pedantic_non_lvalue_loc (loc
, tem
);
11839 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11841 if ((TREE_CODE (arg1
) == VECTOR_CST
11842 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11843 && (TREE_CODE (arg2
) == VECTOR_CST
11844 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11846 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
11847 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
11848 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
11849 for (i
= 0; i
< nelts
; i
++)
11851 tree val
= VECTOR_CST_ELT (arg0
, i
);
11852 if (integer_all_onesp (val
))
11854 else if (integer_zerop (val
))
11855 sel
[i
] = nelts
+ i
;
11856 else /* Currently unreachable. */
11859 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11860 if (t
!= NULL_TREE
)
11865 /* If we have A op B ? A : C, we may be able to convert this to a
11866 simpler expression, depending on the operation and the values
11867 of B and C. Signed zeros prevent all of these transformations,
11868 for reasons given above each one.
11870 Also try swapping the arguments and inverting the conditional. */
11871 if (COMPARISON_CLASS_P (arg0
)
11872 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11873 arg1
, TREE_OPERAND (arg0
, 1))
11874 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
11876 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11881 if (COMPARISON_CLASS_P (arg0
)
11882 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11884 TREE_OPERAND (arg0
, 1))
11885 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11887 location_t loc0
= expr_location_or (arg0
, loc
);
11888 tem
= fold_invert_truthvalue (loc0
, arg0
);
11889 if (tem
&& COMPARISON_CLASS_P (tem
))
11891 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11897 /* If the second operand is simpler than the third, swap them
11898 since that produces better jump optimization results. */
11899 if (truth_value_p (TREE_CODE (arg0
))
11900 && tree_swap_operands_p (op1
, op2
, false))
11902 location_t loc0
= expr_location_or (arg0
, loc
);
11903 /* See if this can be inverted. If it can't, possibly because
11904 it was a floating-point inequality comparison, don't do
11906 tem
= fold_invert_truthvalue (loc0
, arg0
);
11908 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11911 /* Convert A ? 1 : 0 to simply A. */
11912 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11913 : (integer_onep (op1
)
11914 && !VECTOR_TYPE_P (type
)))
11915 && integer_zerop (op2
)
11916 /* If we try to convert OP0 to our type, the
11917 call to fold will try to move the conversion inside
11918 a COND, which will recurse. In that case, the COND_EXPR
11919 is probably the best choice, so leave it alone. */
11920 && type
== TREE_TYPE (arg0
))
11921 return pedantic_non_lvalue_loc (loc
, arg0
);
11923 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11924 over COND_EXPR in cases such as floating point comparisons. */
11925 if (integer_zerop (op1
)
11926 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
11927 : (integer_onep (op2
)
11928 && !VECTOR_TYPE_P (type
)))
11929 && truth_value_p (TREE_CODE (arg0
)))
11930 return pedantic_non_lvalue_loc (loc
,
11931 fold_convert_loc (loc
, type
,
11932 invert_truthvalue_loc (loc
,
11935 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11936 if (TREE_CODE (arg0
) == LT_EXPR
11937 && integer_zerop (TREE_OPERAND (arg0
, 1))
11938 && integer_zerop (op2
)
11939 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11941 /* sign_bit_p looks through both zero and sign extensions,
11942 but for this optimization only sign extensions are
11944 tree tem2
= TREE_OPERAND (arg0
, 0);
11945 while (tem
!= tem2
)
11947 if (TREE_CODE (tem2
) != NOP_EXPR
11948 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11953 tem2
= TREE_OPERAND (tem2
, 0);
11955 /* sign_bit_p only checks ARG1 bits within A's precision.
11956 If <sign bit of A> has wider type than A, bits outside
11957 of A's precision in <sign bit of A> need to be checked.
11958 If they are all 0, this optimization needs to be done
11959 in unsigned A's type, if they are all 1 in signed A's type,
11960 otherwise this can't be done. */
11962 && TYPE_PRECISION (TREE_TYPE (tem
))
11963 < TYPE_PRECISION (TREE_TYPE (arg1
))
11964 && TYPE_PRECISION (TREE_TYPE (tem
))
11965 < TYPE_PRECISION (type
))
11967 int inner_width
, outer_width
;
11970 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11971 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11972 if (outer_width
> TYPE_PRECISION (type
))
11973 outer_width
= TYPE_PRECISION (type
);
11975 wide_int mask
= wi::shifted_mask
11976 (inner_width
, outer_width
- inner_width
, false,
11977 TYPE_PRECISION (TREE_TYPE (arg1
)));
11979 wide_int common
= mask
& arg1
;
11980 if (common
== mask
)
11982 tem_type
= signed_type_for (TREE_TYPE (tem
));
11983 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11985 else if (common
== 0)
11987 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11988 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11996 fold_convert_loc (loc
, type
,
11997 fold_build2_loc (loc
, BIT_AND_EXPR
,
11998 TREE_TYPE (tem
), tem
,
11999 fold_convert_loc (loc
,
12004 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12005 already handled above. */
12006 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12007 && integer_onep (TREE_OPERAND (arg0
, 1))
12008 && integer_zerop (op2
)
12009 && integer_pow2p (arg1
))
12011 tree tem
= TREE_OPERAND (arg0
, 0);
12013 if (TREE_CODE (tem
) == RSHIFT_EXPR
12014 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
12015 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
12016 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
12017 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12018 TREE_OPERAND (tem
, 0), arg1
);
12021 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12022 is probably obsolete because the first operand should be a
12023 truth value (that's why we have the two cases above), but let's
12024 leave it in until we can confirm this for all front-ends. */
12025 if (integer_zerop (op2
)
12026 && TREE_CODE (arg0
) == NE_EXPR
12027 && integer_zerop (TREE_OPERAND (arg0
, 1))
12028 && integer_pow2p (arg1
)
12029 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12030 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12031 arg1
, OEP_ONLY_CONST
))
12032 return pedantic_non_lvalue_loc (loc
,
12033 fold_convert_loc (loc
, type
,
12034 TREE_OPERAND (arg0
, 0)));
12036 /* Disable the transformations below for vectors, since
12037 fold_binary_op_with_conditional_arg may undo them immediately,
12038 yielding an infinite loop. */
12039 if (code
== VEC_COND_EXPR
)
12042 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12043 if (integer_zerop (op2
)
12044 && truth_value_p (TREE_CODE (arg0
))
12045 && truth_value_p (TREE_CODE (arg1
))
12046 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12047 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
12048 : TRUTH_ANDIF_EXPR
,
12049 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
12051 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12052 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
12053 && truth_value_p (TREE_CODE (arg0
))
12054 && truth_value_p (TREE_CODE (arg1
))
12055 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12057 location_t loc0
= expr_location_or (arg0
, loc
);
12058 /* Only perform transformation if ARG0 is easily inverted. */
12059 tem
= fold_invert_truthvalue (loc0
, arg0
);
12061 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12064 type
, fold_convert_loc (loc
, type
, tem
),
12068 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12069 if (integer_zerop (arg1
)
12070 && truth_value_p (TREE_CODE (arg0
))
12071 && truth_value_p (TREE_CODE (op2
))
12072 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12074 location_t loc0
= expr_location_or (arg0
, loc
);
12075 /* Only perform transformation if ARG0 is easily inverted. */
12076 tem
= fold_invert_truthvalue (loc0
, arg0
);
12078 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12079 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
12080 type
, fold_convert_loc (loc
, type
, tem
),
12084 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12085 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
12086 && truth_value_p (TREE_CODE (arg0
))
12087 && truth_value_p (TREE_CODE (op2
))
12088 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12089 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12090 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
12091 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
12096 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12097 of fold_ternary on them. */
12098 gcc_unreachable ();
12100 case BIT_FIELD_REF
:
12101 if ((TREE_CODE (arg0
) == VECTOR_CST
12102 || (TREE_CODE (arg0
) == CONSTRUCTOR
12103 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
12104 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
12105 || (TREE_CODE (type
) == VECTOR_TYPE
12106 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
12108 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
12109 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
12110 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
12111 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
12114 && (idx
% width
) == 0
12115 && (n
% width
) == 0
12116 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
12121 if (TREE_CODE (arg0
) == VECTOR_CST
)
12124 return VECTOR_CST_ELT (arg0
, idx
);
12126 tree
*vals
= XALLOCAVEC (tree
, n
);
12127 for (unsigned i
= 0; i
< n
; ++i
)
12128 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
12129 return build_vector (type
, vals
);
12132 /* Constructor elements can be subvectors. */
12133 unsigned HOST_WIDE_INT k
= 1;
12134 if (CONSTRUCTOR_NELTS (arg0
) != 0)
12136 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
12137 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
12138 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
12141 /* We keep an exact subset of the constructor elements. */
12142 if ((idx
% k
) == 0 && (n
% k
) == 0)
12144 if (CONSTRUCTOR_NELTS (arg0
) == 0)
12145 return build_constructor (type
, NULL
);
12150 if (idx
< CONSTRUCTOR_NELTS (arg0
))
12151 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
12152 return build_zero_cst (type
);
12155 vec
<constructor_elt
, va_gc
> *vals
;
12156 vec_alloc (vals
, n
);
12157 for (unsigned i
= 0;
12158 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
12160 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
12162 (arg0
, idx
+ i
)->value
);
12163 return build_constructor (type
, vals
);
12165 /* The bitfield references a single constructor element. */
12166 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
12168 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
12169 return build_zero_cst (type
);
12171 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
12173 return fold_build3_loc (loc
, code
, type
,
12174 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
12175 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
12180 /* A bit-field-ref that referenced the full argument can be stripped. */
12181 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12182 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
12183 && integer_zerop (op2
))
12184 return fold_convert_loc (loc
, type
, arg0
);
12186 /* On constants we can use native encode/interpret to constant
12187 fold (nearly) all BIT_FIELD_REFs. */
12188 if (CONSTANT_CLASS_P (arg0
)
12189 && can_native_interpret_type_p (type
)
12190 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
12191 /* This limitation should not be necessary, we just need to
12192 round this up to mode size. */
12193 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
12194 /* Need bit-shifting of the buffer to relax the following. */
12195 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
12197 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12198 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
12199 unsigned HOST_WIDE_INT clen
;
12200 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
12201 /* ??? We cannot tell native_encode_expr to start at
12202 some random byte only. So limit us to a reasonable amount
12206 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
12207 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
12209 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
12211 tree v
= native_interpret_expr (type
,
12212 b
+ bitpos
/ BITS_PER_UNIT
,
12213 bitsize
/ BITS_PER_UNIT
);
12223 /* For integers we can decompose the FMA if possible. */
12224 if (TREE_CODE (arg0
) == INTEGER_CST
12225 && TREE_CODE (arg1
) == INTEGER_CST
)
12226 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
12227 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
12228 if (integer_zerop (arg2
))
12229 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12231 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
12233 case VEC_PERM_EXPR
:
12234 if (TREE_CODE (arg2
) == VECTOR_CST
)
12236 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
12237 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
12238 unsigned char *sel2
= sel
+ nelts
;
12239 bool need_mask_canon
= false;
12240 bool need_mask_canon2
= false;
12241 bool all_in_vec0
= true;
12242 bool all_in_vec1
= true;
12243 bool maybe_identity
= true;
12244 bool single_arg
= (op0
== op1
);
12245 bool changed
= false;
12247 mask2
= 2 * nelts
- 1;
12248 mask
= single_arg
? (nelts
- 1) : mask2
;
12249 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
12250 for (i
= 0; i
< nelts
; i
++)
12252 tree val
= VECTOR_CST_ELT (arg2
, i
);
12253 if (TREE_CODE (val
) != INTEGER_CST
)
12256 /* Make sure that the perm value is in an acceptable
12259 need_mask_canon
|= wi::gtu_p (t
, mask
);
12260 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
12261 sel
[i
] = t
.to_uhwi () & mask
;
12262 sel2
[i
] = t
.to_uhwi () & mask2
;
12264 if (sel
[i
] < nelts
)
12265 all_in_vec1
= false;
12267 all_in_vec0
= false;
12269 if ((sel
[i
] & (nelts
-1)) != i
)
12270 maybe_identity
= false;
12273 if (maybe_identity
)
12283 else if (all_in_vec1
)
12286 for (i
= 0; i
< nelts
; i
++)
12288 need_mask_canon
= true;
12291 if ((TREE_CODE (op0
) == VECTOR_CST
12292 || TREE_CODE (op0
) == CONSTRUCTOR
)
12293 && (TREE_CODE (op1
) == VECTOR_CST
12294 || TREE_CODE (op1
) == CONSTRUCTOR
))
12296 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
12297 if (t
!= NULL_TREE
)
12301 if (op0
== op1
&& !single_arg
)
12304 /* Some targets are deficient and fail to expand a single
12305 argument permutation while still allowing an equivalent
12306 2-argument version. */
12307 if (need_mask_canon
&& arg2
== op2
12308 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
12309 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
12311 need_mask_canon
= need_mask_canon2
;
12315 if (need_mask_canon
&& arg2
== op2
)
12317 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
12318 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
12319 for (i
= 0; i
< nelts
; i
++)
12320 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
12321 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
12326 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
12332 } /* switch (code) */
12335 /* Perform constant folding and related simplification of EXPR.
12336 The related simplifications include x*1 => x, x*0 => 0, etc.,
12337 and application of the associative law.
12338 NOP_EXPR conversions may be removed freely (as long as we
12339 are careful not to change the type of the overall expression).
12340 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12341 but we can constant-fold them if they have constant operands. */
12343 #ifdef ENABLE_FOLD_CHECKING
12344 # define fold(x) fold_1 (x)
12345 static tree
fold_1 (tree
);
12351 const tree t
= expr
;
12352 enum tree_code code
= TREE_CODE (t
);
12353 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12355 location_t loc
= EXPR_LOCATION (expr
);
12357 /* Return right away if a constant. */
12358 if (kind
== tcc_constant
)
12361 /* CALL_EXPR-like objects with variable numbers of operands are
12362 treated specially. */
12363 if (kind
== tcc_vl_exp
)
12365 if (code
== CALL_EXPR
)
12367 tem
= fold_call_expr (loc
, expr
, false);
12368 return tem
? tem
: expr
;
12373 if (IS_EXPR_CODE_CLASS (kind
))
12375 tree type
= TREE_TYPE (t
);
12376 tree op0
, op1
, op2
;
12378 switch (TREE_CODE_LENGTH (code
))
12381 op0
= TREE_OPERAND (t
, 0);
12382 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12383 return tem
? tem
: expr
;
12385 op0
= TREE_OPERAND (t
, 0);
12386 op1
= TREE_OPERAND (t
, 1);
12387 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12388 return tem
? tem
: expr
;
12390 op0
= TREE_OPERAND (t
, 0);
12391 op1
= TREE_OPERAND (t
, 1);
12392 op2
= TREE_OPERAND (t
, 2);
12393 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12394 return tem
? tem
: expr
;
12404 tree op0
= TREE_OPERAND (t
, 0);
12405 tree op1
= TREE_OPERAND (t
, 1);
12407 if (TREE_CODE (op1
) == INTEGER_CST
12408 && TREE_CODE (op0
) == CONSTRUCTOR
12409 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
12411 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
12412 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
12413 unsigned HOST_WIDE_INT begin
= 0;
12415 /* Find a matching index by means of a binary search. */
12416 while (begin
!= end
)
12418 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
12419 tree index
= (*elts
)[middle
].index
;
12421 if (TREE_CODE (index
) == INTEGER_CST
12422 && tree_int_cst_lt (index
, op1
))
12423 begin
= middle
+ 1;
12424 else if (TREE_CODE (index
) == INTEGER_CST
12425 && tree_int_cst_lt (op1
, index
))
12427 else if (TREE_CODE (index
) == RANGE_EXPR
12428 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
12429 begin
= middle
+ 1;
12430 else if (TREE_CODE (index
) == RANGE_EXPR
12431 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
12434 return (*elts
)[middle
].value
;
12441 /* Return a VECTOR_CST if possible. */
12444 tree type
= TREE_TYPE (t
);
12445 if (TREE_CODE (type
) != VECTOR_TYPE
)
12448 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
12449 unsigned HOST_WIDE_INT idx
, pos
= 0;
12452 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
12454 if (!CONSTANT_CLASS_P (value
))
12456 if (TREE_CODE (value
) == VECTOR_CST
)
12458 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
12459 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
12462 vec
[pos
++] = value
;
12464 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
12465 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
12467 return build_vector (type
, vec
);
12471 return fold (DECL_INITIAL (t
));
12475 } /* switch (code) */
12478 #ifdef ENABLE_FOLD_CHECKING
12481 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
12482 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
12483 static void fold_check_failed (const_tree
, const_tree
);
12484 void print_fold_checksum (const_tree
);
12486 /* When --enable-checking=fold, compute a digest of expr before
12487 and after actual fold call to see if fold did not accidentally
12488 change original expr. */
12494 struct md5_ctx ctx
;
12495 unsigned char checksum_before
[16], checksum_after
[16];
12496 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12498 md5_init_ctx (&ctx
);
12499 fold_checksum_tree (expr
, &ctx
, &ht
);
12500 md5_finish_ctx (&ctx
, checksum_before
);
12503 ret
= fold_1 (expr
);
12505 md5_init_ctx (&ctx
);
12506 fold_checksum_tree (expr
, &ctx
, &ht
);
12507 md5_finish_ctx (&ctx
, checksum_after
);
12509 if (memcmp (checksum_before
, checksum_after
, 16))
12510 fold_check_failed (expr
, ret
);
12516 print_fold_checksum (const_tree expr
)
12518 struct md5_ctx ctx
;
12519 unsigned char checksum
[16], cnt
;
12520 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12522 md5_init_ctx (&ctx
);
12523 fold_checksum_tree (expr
, &ctx
, &ht
);
12524 md5_finish_ctx (&ctx
, checksum
);
12525 for (cnt
= 0; cnt
< 16; ++cnt
)
12526 fprintf (stderr
, "%02x", checksum
[cnt
]);
12527 putc ('\n', stderr
);
12531 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12533 internal_error ("fold check: original tree changed by fold");
12537 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12538 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12540 const tree_node
**slot
;
12541 enum tree_code code
;
12542 union tree_node buf
;
12548 slot
= ht
->find_slot (expr
, INSERT
);
12552 code
= TREE_CODE (expr
);
12553 if (TREE_CODE_CLASS (code
) == tcc_declaration
12554 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
12556 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12557 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12558 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
12559 buf
.decl_with_vis
.symtab_node
= NULL
;
12560 expr
= (tree
) &buf
;
12562 else if (TREE_CODE_CLASS (code
) == tcc_type
12563 && (TYPE_POINTER_TO (expr
)
12564 || TYPE_REFERENCE_TO (expr
)
12565 || TYPE_CACHED_VALUES_P (expr
)
12566 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
12567 || TYPE_NEXT_VARIANT (expr
)))
12569 /* Allow these fields to be modified. */
12571 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12572 expr
= tmp
= (tree
) &buf
;
12573 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
12574 TYPE_POINTER_TO (tmp
) = NULL
;
12575 TYPE_REFERENCE_TO (tmp
) = NULL
;
12576 TYPE_NEXT_VARIANT (tmp
) = NULL
;
12577 if (TYPE_CACHED_VALUES_P (tmp
))
12579 TYPE_CACHED_VALUES_P (tmp
) = 0;
12580 TYPE_CACHED_VALUES (tmp
) = NULL
;
12583 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12584 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
12585 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12586 if (TREE_CODE_CLASS (code
) != tcc_type
12587 && TREE_CODE_CLASS (code
) != tcc_declaration
12588 && code
!= TREE_LIST
12589 && code
!= SSA_NAME
12590 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12591 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12592 switch (TREE_CODE_CLASS (code
))
12598 md5_process_bytes (TREE_STRING_POINTER (expr
),
12599 TREE_STRING_LENGTH (expr
), ctx
);
12602 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12603 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12606 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
12607 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
12613 case tcc_exceptional
:
12617 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12618 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12619 expr
= TREE_CHAIN (expr
);
12620 goto recursive_label
;
12623 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12624 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12630 case tcc_expression
:
12631 case tcc_reference
:
12632 case tcc_comparison
:
12635 case tcc_statement
:
12637 len
= TREE_OPERAND_LENGTH (expr
);
12638 for (i
= 0; i
< len
; ++i
)
12639 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12641 case tcc_declaration
:
12642 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12643 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12644 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12646 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12647 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12648 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12649 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12650 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12653 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12655 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12657 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12658 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12660 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12664 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12665 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12666 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12667 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12668 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12669 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12670 if (INTEGRAL_TYPE_P (expr
)
12671 || SCALAR_FLOAT_TYPE_P (expr
))
12673 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12674 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12676 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12677 if (TREE_CODE (expr
) == RECORD_TYPE
12678 || TREE_CODE (expr
) == UNION_TYPE
12679 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12680 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12681 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12688 /* Helper function for outputting the checksum of a tree T. When
12689 debugging with gdb, you can "define mynext" to be "next" followed
12690 by "call debug_fold_checksum (op0)", then just trace down till the
12693 DEBUG_FUNCTION
void
12694 debug_fold_checksum (const_tree t
)
12697 unsigned char checksum
[16];
12698 struct md5_ctx ctx
;
12699 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12701 md5_init_ctx (&ctx
);
12702 fold_checksum_tree (t
, &ctx
, &ht
);
12703 md5_finish_ctx (&ctx
, checksum
);
12706 for (i
= 0; i
< 16; i
++)
12707 fprintf (stderr
, "%d ", checksum
[i
]);
12709 fprintf (stderr
, "\n");
12714 /* Fold a unary tree expression with code CODE of type TYPE with an
12715 operand OP0. LOC is the location of the resulting expression.
12716 Return a folded expression if successful. Otherwise, return a tree
12717 expression with code CODE of type TYPE with an operand OP0. */
12720 fold_build1_stat_loc (location_t loc
,
12721 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12724 #ifdef ENABLE_FOLD_CHECKING
12725 unsigned char checksum_before
[16], checksum_after
[16];
12726 struct md5_ctx ctx
;
12727 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12729 md5_init_ctx (&ctx
);
12730 fold_checksum_tree (op0
, &ctx
, &ht
);
12731 md5_finish_ctx (&ctx
, checksum_before
);
12735 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12737 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12739 #ifdef ENABLE_FOLD_CHECKING
12740 md5_init_ctx (&ctx
);
12741 fold_checksum_tree (op0
, &ctx
, &ht
);
12742 md5_finish_ctx (&ctx
, checksum_after
);
12744 if (memcmp (checksum_before
, checksum_after
, 16))
12745 fold_check_failed (op0
, tem
);
12750 /* Fold a binary tree expression with code CODE of type TYPE with
12751 operands OP0 and OP1. LOC is the location of the resulting
12752 expression. Return a folded expression if successful. Otherwise,
12753 return a tree expression with code CODE of type TYPE with operands
12757 fold_build2_stat_loc (location_t loc
,
12758 enum tree_code code
, tree type
, tree op0
, tree op1
12762 #ifdef ENABLE_FOLD_CHECKING
12763 unsigned char checksum_before_op0
[16],
12764 checksum_before_op1
[16],
12765 checksum_after_op0
[16],
12766 checksum_after_op1
[16];
12767 struct md5_ctx ctx
;
12768 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12770 md5_init_ctx (&ctx
);
12771 fold_checksum_tree (op0
, &ctx
, &ht
);
12772 md5_finish_ctx (&ctx
, checksum_before_op0
);
12775 md5_init_ctx (&ctx
);
12776 fold_checksum_tree (op1
, &ctx
, &ht
);
12777 md5_finish_ctx (&ctx
, checksum_before_op1
);
12781 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12783 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12785 #ifdef ENABLE_FOLD_CHECKING
12786 md5_init_ctx (&ctx
);
12787 fold_checksum_tree (op0
, &ctx
, &ht
);
12788 md5_finish_ctx (&ctx
, checksum_after_op0
);
12791 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12792 fold_check_failed (op0
, tem
);
12794 md5_init_ctx (&ctx
);
12795 fold_checksum_tree (op1
, &ctx
, &ht
);
12796 md5_finish_ctx (&ctx
, checksum_after_op1
);
12798 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12799 fold_check_failed (op1
, tem
);
12804 /* Fold a ternary tree expression with code CODE of type TYPE with
12805 operands OP0, OP1, and OP2. Return a folded expression if
12806 successful. Otherwise, return a tree expression with code CODE of
12807 type TYPE with operands OP0, OP1, and OP2. */
12810 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
12811 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12814 #ifdef ENABLE_FOLD_CHECKING
12815 unsigned char checksum_before_op0
[16],
12816 checksum_before_op1
[16],
12817 checksum_before_op2
[16],
12818 checksum_after_op0
[16],
12819 checksum_after_op1
[16],
12820 checksum_after_op2
[16];
12821 struct md5_ctx ctx
;
12822 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12824 md5_init_ctx (&ctx
);
12825 fold_checksum_tree (op0
, &ctx
, &ht
);
12826 md5_finish_ctx (&ctx
, checksum_before_op0
);
12829 md5_init_ctx (&ctx
);
12830 fold_checksum_tree (op1
, &ctx
, &ht
);
12831 md5_finish_ctx (&ctx
, checksum_before_op1
);
12834 md5_init_ctx (&ctx
);
12835 fold_checksum_tree (op2
, &ctx
, &ht
);
12836 md5_finish_ctx (&ctx
, checksum_before_op2
);
12840 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12841 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12843 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12845 #ifdef ENABLE_FOLD_CHECKING
12846 md5_init_ctx (&ctx
);
12847 fold_checksum_tree (op0
, &ctx
, &ht
);
12848 md5_finish_ctx (&ctx
, checksum_after_op0
);
12851 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12852 fold_check_failed (op0
, tem
);
12854 md5_init_ctx (&ctx
);
12855 fold_checksum_tree (op1
, &ctx
, &ht
);
12856 md5_finish_ctx (&ctx
, checksum_after_op1
);
12859 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12860 fold_check_failed (op1
, tem
);
12862 md5_init_ctx (&ctx
);
12863 fold_checksum_tree (op2
, &ctx
, &ht
);
12864 md5_finish_ctx (&ctx
, checksum_after_op2
);
12866 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12867 fold_check_failed (op2
, tem
);
12872 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12873 arguments in ARGARRAY, and a null static chain.
12874 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12875 of type TYPE from the given operands as constructed by build_call_array. */
12878 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12879 int nargs
, tree
*argarray
)
12882 #ifdef ENABLE_FOLD_CHECKING
12883 unsigned char checksum_before_fn
[16],
12884 checksum_before_arglist
[16],
12885 checksum_after_fn
[16],
12886 checksum_after_arglist
[16];
12887 struct md5_ctx ctx
;
12888 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12891 md5_init_ctx (&ctx
);
12892 fold_checksum_tree (fn
, &ctx
, &ht
);
12893 md5_finish_ctx (&ctx
, checksum_before_fn
);
12896 md5_init_ctx (&ctx
);
12897 for (i
= 0; i
< nargs
; i
++)
12898 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12899 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12903 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12905 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12907 #ifdef ENABLE_FOLD_CHECKING
12908 md5_init_ctx (&ctx
);
12909 fold_checksum_tree (fn
, &ctx
, &ht
);
12910 md5_finish_ctx (&ctx
, checksum_after_fn
);
12913 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12914 fold_check_failed (fn
, tem
);
12916 md5_init_ctx (&ctx
);
12917 for (i
= 0; i
< nargs
; i
++)
12918 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12919 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12921 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12922 fold_check_failed (NULL_TREE
, tem
);
12927 /* Perform constant folding and related simplification of initializer
12928 expression EXPR. These behave identically to "fold_buildN" but ignore
12929 potential run-time traps and exceptions that fold must preserve. */
12931 #define START_FOLD_INIT \
12932 int saved_signaling_nans = flag_signaling_nans;\
12933 int saved_trapping_math = flag_trapping_math;\
12934 int saved_rounding_math = flag_rounding_math;\
12935 int saved_trapv = flag_trapv;\
12936 int saved_folding_initializer = folding_initializer;\
12937 flag_signaling_nans = 0;\
12938 flag_trapping_math = 0;\
12939 flag_rounding_math = 0;\
12941 folding_initializer = 1;
12943 #define END_FOLD_INIT \
12944 flag_signaling_nans = saved_signaling_nans;\
12945 flag_trapping_math = saved_trapping_math;\
12946 flag_rounding_math = saved_rounding_math;\
12947 flag_trapv = saved_trapv;\
12948 folding_initializer = saved_folding_initializer;
12951 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12952 tree type
, tree op
)
12957 result
= fold_build1_loc (loc
, code
, type
, op
);
12964 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12965 tree type
, tree op0
, tree op1
)
12970 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12977 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12978 int nargs
, tree
*argarray
)
12983 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12989 #undef START_FOLD_INIT
12990 #undef END_FOLD_INIT
12992 /* Determine if first argument is a multiple of second argument. Return 0 if
12993 it is not, or we cannot easily determined it to be.
12995 An example of the sort of thing we care about (at this point; this routine
12996 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12997 fold cases do now) is discovering that
12999 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13005 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13007 This code also handles discovering that
13009 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13011 is a multiple of 8 so we don't have to worry about dealing with a
13012 possible remainder.
13014 Note that we *look* inside a SAVE_EXPR only to determine how it was
13015 calculated; it is not safe for fold to do much of anything else with the
13016 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13017 at run time. For example, the latter example above *cannot* be implemented
13018 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13019 evaluation time of the original SAVE_EXPR is not necessarily the same at
13020 the time the new expression is evaluated. The only optimization of this
13021 sort that would be valid is changing
13023 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13027 SAVE_EXPR (I) * SAVE_EXPR (J)
13029 (where the same SAVE_EXPR (J) is used in the original and the
13030 transformed version). */
13033 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13035 if (operand_equal_p (top
, bottom
, 0))
13038 if (TREE_CODE (type
) != INTEGER_TYPE
)
13041 switch (TREE_CODE (top
))
13044 /* Bitwise and provides a power of two multiple. If the mask is
13045 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13046 if (!integer_pow2p (bottom
))
13051 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13052 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13056 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13057 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13060 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13064 op1
= TREE_OPERAND (top
, 1);
13065 /* const_binop may not detect overflow correctly,
13066 so check for it explicitly here. */
13067 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
13068 && 0 != (t1
= fold_convert (type
,
13069 const_binop (LSHIFT_EXPR
,
13072 && !TREE_OVERFLOW (t1
))
13073 return multiple_of_p (type
, t1
, bottom
);
13078 /* Can't handle conversions from non-integral or wider integral type. */
13079 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13080 || (TYPE_PRECISION (type
)
13081 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13084 /* .. fall through ... */
13087 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13090 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13091 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
13094 if (TREE_CODE (bottom
) != INTEGER_CST
13095 || integer_zerop (bottom
)
13096 || (TYPE_UNSIGNED (type
)
13097 && (tree_int_cst_sgn (top
) < 0
13098 || tree_int_cst_sgn (bottom
) < 0)))
13100 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
13108 /* Return true if CODE or TYPE is known to be non-negative. */
13111 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13113 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13114 && truth_value_p (code
))
13115 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13116 have a signed:1 type (where the value is -1 and 0). */
13121 /* Return true if (CODE OP0) is known to be non-negative. If the return
13122 value is based on the assumption that signed overflow is undefined,
13123 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13124 *STRICT_OVERFLOW_P. */
13127 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13128 bool *strict_overflow_p
)
13130 if (TYPE_UNSIGNED (type
))
13136 /* We can't return 1 if flag_wrapv is set because
13137 ABS_EXPR<INT_MIN> = INT_MIN. */
13138 if (!ANY_INTEGRAL_TYPE_P (type
))
13140 if (TYPE_OVERFLOW_UNDEFINED (type
))
13142 *strict_overflow_p
= true;
13147 case NON_LVALUE_EXPR
:
13149 case FIX_TRUNC_EXPR
:
13150 return tree_expr_nonnegative_warnv_p (op0
,
13151 strict_overflow_p
);
13155 tree inner_type
= TREE_TYPE (op0
);
13156 tree outer_type
= type
;
13158 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13160 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13161 return tree_expr_nonnegative_warnv_p (op0
,
13162 strict_overflow_p
);
13163 if (INTEGRAL_TYPE_P (inner_type
))
13165 if (TYPE_UNSIGNED (inner_type
))
13167 return tree_expr_nonnegative_warnv_p (op0
,
13168 strict_overflow_p
);
13171 else if (INTEGRAL_TYPE_P (outer_type
))
13173 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13174 return tree_expr_nonnegative_warnv_p (op0
,
13175 strict_overflow_p
);
13176 if (INTEGRAL_TYPE_P (inner_type
))
13177 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13178 && TYPE_UNSIGNED (inner_type
);
13184 return tree_simple_nonnegative_warnv_p (code
, type
);
13187 /* We don't know sign of `t', so be conservative and return false. */
13191 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13192 value is based on the assumption that signed overflow is undefined,
13193 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13194 *STRICT_OVERFLOW_P. */
13197 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13198 tree op1
, bool *strict_overflow_p
)
13200 if (TYPE_UNSIGNED (type
))
13205 case POINTER_PLUS_EXPR
:
13207 if (FLOAT_TYPE_P (type
))
13208 return (tree_expr_nonnegative_warnv_p (op0
,
13210 && tree_expr_nonnegative_warnv_p (op1
,
13211 strict_overflow_p
));
13213 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13214 both unsigned and at least 2 bits shorter than the result. */
13215 if (TREE_CODE (type
) == INTEGER_TYPE
13216 && TREE_CODE (op0
) == NOP_EXPR
13217 && TREE_CODE (op1
) == NOP_EXPR
)
13219 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13220 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13221 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13222 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13224 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13225 TYPE_PRECISION (inner2
)) + 1;
13226 return prec
< TYPE_PRECISION (type
);
13232 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
13234 /* x * x is always non-negative for floating point x
13235 or without overflow. */
13236 if (operand_equal_p (op0
, op1
, 0)
13237 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
13238 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
13240 if (ANY_INTEGRAL_TYPE_P (type
)
13241 && TYPE_OVERFLOW_UNDEFINED (type
))
13242 *strict_overflow_p
= true;
13247 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13248 both unsigned and their total bits is shorter than the result. */
13249 if (TREE_CODE (type
) == INTEGER_TYPE
13250 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
13251 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
13253 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
13254 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
13256 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
13257 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
13260 bool unsigned0
= TYPE_UNSIGNED (inner0
);
13261 bool unsigned1
= TYPE_UNSIGNED (inner1
);
13263 if (TREE_CODE (op0
) == INTEGER_CST
)
13264 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
13266 if (TREE_CODE (op1
) == INTEGER_CST
)
13267 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
13269 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
13270 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
13272 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
13273 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
13274 : TYPE_PRECISION (inner0
);
13276 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
13277 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
13278 : TYPE_PRECISION (inner1
);
13280 return precision0
+ precision1
< TYPE_PRECISION (type
);
13287 return (tree_expr_nonnegative_warnv_p (op0
,
13289 || tree_expr_nonnegative_warnv_p (op1
,
13290 strict_overflow_p
));
13296 case TRUNC_DIV_EXPR
:
13297 case CEIL_DIV_EXPR
:
13298 case FLOOR_DIV_EXPR
:
13299 case ROUND_DIV_EXPR
:
13300 return (tree_expr_nonnegative_warnv_p (op0
,
13302 && tree_expr_nonnegative_warnv_p (op1
,
13303 strict_overflow_p
));
13305 case TRUNC_MOD_EXPR
:
13306 case CEIL_MOD_EXPR
:
13307 case FLOOR_MOD_EXPR
:
13308 case ROUND_MOD_EXPR
:
13309 return tree_expr_nonnegative_warnv_p (op0
,
13310 strict_overflow_p
);
13312 return tree_simple_nonnegative_warnv_p (code
, type
);
13315 /* We don't know sign of `t', so be conservative and return false. */
13319 /* Return true if T is known to be non-negative. If the return
13320 value is based on the assumption that signed overflow is undefined,
13321 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13322 *STRICT_OVERFLOW_P. */
13325 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13327 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13330 switch (TREE_CODE (t
))
13333 return tree_int_cst_sgn (t
) >= 0;
13336 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13339 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13342 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13344 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
13345 strict_overflow_p
));
13347 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
13350 /* We don't know sign of `t', so be conservative and return false. */
13354 /* Return true if T is known to be non-negative. If the return
13355 value is based on the assumption that signed overflow is undefined,
13356 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13357 *STRICT_OVERFLOW_P. */
13360 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
13361 tree arg0
, tree arg1
, bool *strict_overflow_p
)
13363 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
13364 switch (DECL_FUNCTION_CODE (fndecl
))
13366 CASE_FLT_FN (BUILT_IN_ACOS
):
13367 CASE_FLT_FN (BUILT_IN_ACOSH
):
13368 CASE_FLT_FN (BUILT_IN_CABS
):
13369 CASE_FLT_FN (BUILT_IN_COSH
):
13370 CASE_FLT_FN (BUILT_IN_ERFC
):
13371 CASE_FLT_FN (BUILT_IN_EXP
):
13372 CASE_FLT_FN (BUILT_IN_EXP10
):
13373 CASE_FLT_FN (BUILT_IN_EXP2
):
13374 CASE_FLT_FN (BUILT_IN_FABS
):
13375 CASE_FLT_FN (BUILT_IN_FDIM
):
13376 CASE_FLT_FN (BUILT_IN_HYPOT
):
13377 CASE_FLT_FN (BUILT_IN_POW10
):
13378 CASE_INT_FN (BUILT_IN_FFS
):
13379 CASE_INT_FN (BUILT_IN_PARITY
):
13380 CASE_INT_FN (BUILT_IN_POPCOUNT
):
13381 CASE_INT_FN (BUILT_IN_CLZ
):
13382 CASE_INT_FN (BUILT_IN_CLRSB
):
13383 case BUILT_IN_BSWAP32
:
13384 case BUILT_IN_BSWAP64
:
13388 CASE_FLT_FN (BUILT_IN_SQRT
):
13389 /* sqrt(-0.0) is -0.0. */
13390 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
13392 return tree_expr_nonnegative_warnv_p (arg0
,
13393 strict_overflow_p
);
13395 CASE_FLT_FN (BUILT_IN_ASINH
):
13396 CASE_FLT_FN (BUILT_IN_ATAN
):
13397 CASE_FLT_FN (BUILT_IN_ATANH
):
13398 CASE_FLT_FN (BUILT_IN_CBRT
):
13399 CASE_FLT_FN (BUILT_IN_CEIL
):
13400 CASE_FLT_FN (BUILT_IN_ERF
):
13401 CASE_FLT_FN (BUILT_IN_EXPM1
):
13402 CASE_FLT_FN (BUILT_IN_FLOOR
):
13403 CASE_FLT_FN (BUILT_IN_FMOD
):
13404 CASE_FLT_FN (BUILT_IN_FREXP
):
13405 CASE_FLT_FN (BUILT_IN_ICEIL
):
13406 CASE_FLT_FN (BUILT_IN_IFLOOR
):
13407 CASE_FLT_FN (BUILT_IN_IRINT
):
13408 CASE_FLT_FN (BUILT_IN_IROUND
):
13409 CASE_FLT_FN (BUILT_IN_LCEIL
):
13410 CASE_FLT_FN (BUILT_IN_LDEXP
):
13411 CASE_FLT_FN (BUILT_IN_LFLOOR
):
13412 CASE_FLT_FN (BUILT_IN_LLCEIL
):
13413 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
13414 CASE_FLT_FN (BUILT_IN_LLRINT
):
13415 CASE_FLT_FN (BUILT_IN_LLROUND
):
13416 CASE_FLT_FN (BUILT_IN_LRINT
):
13417 CASE_FLT_FN (BUILT_IN_LROUND
):
13418 CASE_FLT_FN (BUILT_IN_MODF
):
13419 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
13420 CASE_FLT_FN (BUILT_IN_RINT
):
13421 CASE_FLT_FN (BUILT_IN_ROUND
):
13422 CASE_FLT_FN (BUILT_IN_SCALB
):
13423 CASE_FLT_FN (BUILT_IN_SCALBLN
):
13424 CASE_FLT_FN (BUILT_IN_SCALBN
):
13425 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
13426 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
13427 CASE_FLT_FN (BUILT_IN_SINH
):
13428 CASE_FLT_FN (BUILT_IN_TANH
):
13429 CASE_FLT_FN (BUILT_IN_TRUNC
):
13430 /* True if the 1st argument is nonnegative. */
13431 return tree_expr_nonnegative_warnv_p (arg0
,
13432 strict_overflow_p
);
13434 CASE_FLT_FN (BUILT_IN_FMAX
):
13435 /* True if the 1st OR 2nd arguments are nonnegative. */
13436 return (tree_expr_nonnegative_warnv_p (arg0
,
13438 || (tree_expr_nonnegative_warnv_p (arg1
,
13439 strict_overflow_p
)));
13441 CASE_FLT_FN (BUILT_IN_FMIN
):
13442 /* True if the 1st AND 2nd arguments are nonnegative. */
13443 return (tree_expr_nonnegative_warnv_p (arg0
,
13445 && (tree_expr_nonnegative_warnv_p (arg1
,
13446 strict_overflow_p
)));
13448 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
13449 /* True if the 2nd argument is nonnegative. */
13450 return tree_expr_nonnegative_warnv_p (arg1
,
13451 strict_overflow_p
);
13453 CASE_FLT_FN (BUILT_IN_POWI
):
13454 /* True if the 1st argument is nonnegative or the second
13455 argument is an even integer. */
13456 if (TREE_CODE (arg1
) == INTEGER_CST
13457 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
13459 return tree_expr_nonnegative_warnv_p (arg0
,
13460 strict_overflow_p
);
13462 CASE_FLT_FN (BUILT_IN_POW
):
13463 /* True if the 1st argument is nonnegative or the second
13464 argument is an even integer valued real. */
13465 if (TREE_CODE (arg1
) == REAL_CST
)
13470 c
= TREE_REAL_CST (arg1
);
13471 n
= real_to_integer (&c
);
13474 REAL_VALUE_TYPE cint
;
13475 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
13476 if (real_identical (&c
, &cint
))
13480 return tree_expr_nonnegative_warnv_p (arg0
,
13481 strict_overflow_p
);
13486 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
13490 /* Return true if T is known to be non-negative. If the return
13491 value is based on the assumption that signed overflow is undefined,
13492 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13493 *STRICT_OVERFLOW_P. */
13496 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13498 enum tree_code code
= TREE_CODE (t
);
13499 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13506 tree temp
= TARGET_EXPR_SLOT (t
);
13507 t
= TARGET_EXPR_INITIAL (t
);
13509 /* If the initializer is non-void, then it's a normal expression
13510 that will be assigned to the slot. */
13511 if (!VOID_TYPE_P (t
))
13512 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
13514 /* Otherwise, the initializer sets the slot in some way. One common
13515 way is an assignment statement at the end of the initializer. */
13518 if (TREE_CODE (t
) == BIND_EXPR
)
13519 t
= expr_last (BIND_EXPR_BODY (t
));
13520 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13521 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13522 t
= expr_last (TREE_OPERAND (t
, 0));
13523 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13528 if (TREE_CODE (t
) == MODIFY_EXPR
13529 && TREE_OPERAND (t
, 0) == temp
)
13530 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13531 strict_overflow_p
);
13538 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
13539 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
13541 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
13542 get_callee_fndecl (t
),
13545 strict_overflow_p
);
13547 case COMPOUND_EXPR
:
13549 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13550 strict_overflow_p
);
13552 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
13553 strict_overflow_p
);
13555 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13556 strict_overflow_p
);
13559 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
13563 /* We don't know sign of `t', so be conservative and return false. */
13567 /* Return true if T is known to be non-negative. If the return
13568 value is based on the assumption that signed overflow is undefined,
13569 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13570 *STRICT_OVERFLOW_P. */
13573 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13575 enum tree_code code
;
13576 if (t
== error_mark_node
)
13579 code
= TREE_CODE (t
);
13580 switch (TREE_CODE_CLASS (code
))
13583 case tcc_comparison
:
13584 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13586 TREE_OPERAND (t
, 0),
13587 TREE_OPERAND (t
, 1),
13588 strict_overflow_p
);
13591 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13593 TREE_OPERAND (t
, 0),
13594 strict_overflow_p
);
13597 case tcc_declaration
:
13598 case tcc_reference
:
13599 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
13607 case TRUTH_AND_EXPR
:
13608 case TRUTH_OR_EXPR
:
13609 case TRUTH_XOR_EXPR
:
13610 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13612 TREE_OPERAND (t
, 0),
13613 TREE_OPERAND (t
, 1),
13614 strict_overflow_p
);
13615 case TRUTH_NOT_EXPR
:
13616 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13618 TREE_OPERAND (t
, 0),
13619 strict_overflow_p
);
13626 case WITH_SIZE_EXPR
:
13628 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
13631 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
13635 /* Return true if `t' is known to be non-negative. Handle warnings
13636 about undefined signed overflow. */
13639 tree_expr_nonnegative_p (tree t
)
13641 bool ret
, strict_overflow_p
;
13643 strict_overflow_p
= false;
13644 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13645 if (strict_overflow_p
)
13646 fold_overflow_warning (("assuming signed overflow does not occur when "
13647 "determining that expression is always "
13649 WARN_STRICT_OVERFLOW_MISC
);
13654 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13655 For floating point we further ensure that T is not denormal.
13656 Similar logic is present in nonzero_address in rtlanal.h.
13658 If the return value is based on the assumption that signed overflow
13659 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13660 change *STRICT_OVERFLOW_P. */
13663 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13664 bool *strict_overflow_p
)
13669 return tree_expr_nonzero_warnv_p (op0
,
13670 strict_overflow_p
);
13674 tree inner_type
= TREE_TYPE (op0
);
13675 tree outer_type
= type
;
13677 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13678 && tree_expr_nonzero_warnv_p (op0
,
13679 strict_overflow_p
));
13683 case NON_LVALUE_EXPR
:
13684 return tree_expr_nonzero_warnv_p (op0
,
13685 strict_overflow_p
);
13694 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13695 For floating point we further ensure that T is not denormal.
13696 Similar logic is present in nonzero_address in rtlanal.h.
13698 If the return value is based on the assumption that signed overflow
13699 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13700 change *STRICT_OVERFLOW_P. */
13703 tree_binary_nonzero_warnv_p (enum tree_code code
,
13706 tree op1
, bool *strict_overflow_p
)
13708 bool sub_strict_overflow_p
;
13711 case POINTER_PLUS_EXPR
:
13713 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13715 /* With the presence of negative values it is hard
13716 to say something. */
13717 sub_strict_overflow_p
= false;
13718 if (!tree_expr_nonnegative_warnv_p (op0
,
13719 &sub_strict_overflow_p
)
13720 || !tree_expr_nonnegative_warnv_p (op1
,
13721 &sub_strict_overflow_p
))
13723 /* One of operands must be positive and the other non-negative. */
13724 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13725 overflows, on a twos-complement machine the sum of two
13726 nonnegative numbers can never be zero. */
13727 return (tree_expr_nonzero_warnv_p (op0
,
13729 || tree_expr_nonzero_warnv_p (op1
,
13730 strict_overflow_p
));
13735 if (TYPE_OVERFLOW_UNDEFINED (type
))
13737 if (tree_expr_nonzero_warnv_p (op0
,
13739 && tree_expr_nonzero_warnv_p (op1
,
13740 strict_overflow_p
))
13742 *strict_overflow_p
= true;
13749 sub_strict_overflow_p
= false;
13750 if (tree_expr_nonzero_warnv_p (op0
,
13751 &sub_strict_overflow_p
)
13752 && tree_expr_nonzero_warnv_p (op1
,
13753 &sub_strict_overflow_p
))
13755 if (sub_strict_overflow_p
)
13756 *strict_overflow_p
= true;
13761 sub_strict_overflow_p
= false;
13762 if (tree_expr_nonzero_warnv_p (op0
,
13763 &sub_strict_overflow_p
))
13765 if (sub_strict_overflow_p
)
13766 *strict_overflow_p
= true;
13768 /* When both operands are nonzero, then MAX must be too. */
13769 if (tree_expr_nonzero_warnv_p (op1
,
13770 strict_overflow_p
))
13773 /* MAX where operand 0 is positive is positive. */
13774 return tree_expr_nonnegative_warnv_p (op0
,
13775 strict_overflow_p
);
13777 /* MAX where operand 1 is positive is positive. */
13778 else if (tree_expr_nonzero_warnv_p (op1
,
13779 &sub_strict_overflow_p
)
13780 && tree_expr_nonnegative_warnv_p (op1
,
13781 &sub_strict_overflow_p
))
13783 if (sub_strict_overflow_p
)
13784 *strict_overflow_p
= true;
13790 return (tree_expr_nonzero_warnv_p (op1
,
13792 || tree_expr_nonzero_warnv_p (op0
,
13793 strict_overflow_p
));
13802 /* Return true when T is an address and is known to be nonzero.
13803 For floating point we further ensure that T is not denormal.
13804 Similar logic is present in nonzero_address in rtlanal.h.
13806 If the return value is based on the assumption that signed overflow
13807 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13808 change *STRICT_OVERFLOW_P. */
13811 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13813 bool sub_strict_overflow_p
;
13814 switch (TREE_CODE (t
))
13817 return !integer_zerop (t
);
13821 tree base
= TREE_OPERAND (t
, 0);
13823 if (!DECL_P (base
))
13824 base
= get_base_address (base
);
13829 /* For objects in symbol table check if we know they are non-zero.
13830 Don't do anything for variables and functions before symtab is built;
13831 it is quite possible that they will be declared weak later. */
13832 if (DECL_P (base
) && decl_in_symtab_p (base
))
13834 struct symtab_node
*symbol
;
13836 symbol
= symtab_node::get_create (base
);
13838 return symbol
->nonzero_address ();
13843 /* Function local objects are never NULL. */
13845 && (DECL_CONTEXT (base
)
13846 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
13847 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
13850 /* Constants are never weak. */
13851 if (CONSTANT_CLASS_P (base
))
13858 sub_strict_overflow_p
= false;
13859 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13860 &sub_strict_overflow_p
)
13861 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13862 &sub_strict_overflow_p
))
13864 if (sub_strict_overflow_p
)
13865 *strict_overflow_p
= true;
13876 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13877 attempt to fold the expression to a constant without modifying TYPE,
13880 If the expression could be simplified to a constant, then return
13881 the constant. If the expression would not be simplified to a
13882 constant, then return NULL_TREE. */
13885 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13887 tree tem
= fold_binary (code
, type
, op0
, op1
);
13888 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13891 /* Given the components of a unary expression CODE, TYPE and OP0,
13892 attempt to fold the expression to a constant without modifying
13895 If the expression could be simplified to a constant, then return
13896 the constant. If the expression would not be simplified to a
13897 constant, then return NULL_TREE. */
13900 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13902 tree tem
= fold_unary (code
, type
, op0
);
13903 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13906 /* If EXP represents referencing an element in a constant string
13907 (either via pointer arithmetic or array indexing), return the
13908 tree representing the value accessed, otherwise return NULL. */
13911 fold_read_from_constant_string (tree exp
)
13913 if ((TREE_CODE (exp
) == INDIRECT_REF
13914 || TREE_CODE (exp
) == ARRAY_REF
)
13915 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13917 tree exp1
= TREE_OPERAND (exp
, 0);
13920 location_t loc
= EXPR_LOCATION (exp
);
13922 if (TREE_CODE (exp
) == INDIRECT_REF
)
13923 string
= string_constant (exp1
, &index
);
13926 tree low_bound
= array_ref_low_bound (exp
);
13927 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13929 /* Optimize the special-case of a zero lower bound.
13931 We convert the low_bound to sizetype to avoid some problems
13932 with constant folding. (E.g. suppose the lower bound is 1,
13933 and its mode is QI. Without the conversion,l (ARRAY
13934 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13935 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13936 if (! integer_zerop (low_bound
))
13937 index
= size_diffop_loc (loc
, index
,
13938 fold_convert_loc (loc
, sizetype
, low_bound
));
13944 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13945 && TREE_CODE (string
) == STRING_CST
13946 && TREE_CODE (index
) == INTEGER_CST
13947 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13948 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13950 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13951 return build_int_cst_type (TREE_TYPE (exp
),
13952 (TREE_STRING_POINTER (string
)
13953 [TREE_INT_CST_LOW (index
)]));
13958 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13959 an integer constant, real, or fixed-point constant.
13961 TYPE is the type of the result. */
13964 fold_negate_const (tree arg0
, tree type
)
13966 tree t
= NULL_TREE
;
13968 switch (TREE_CODE (arg0
))
13973 wide_int val
= wi::neg (arg0
, &overflow
);
13974 t
= force_fit_type (type
, val
, 1,
13975 (overflow
| TREE_OVERFLOW (arg0
))
13976 && !TYPE_UNSIGNED (type
));
13981 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13986 FIXED_VALUE_TYPE f
;
13987 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13988 &(TREE_FIXED_CST (arg0
)), NULL
,
13989 TYPE_SATURATING (type
));
13990 t
= build_fixed (type
, f
);
13991 /* Propagate overflow flags. */
13992 if (overflow_p
| TREE_OVERFLOW (arg0
))
13993 TREE_OVERFLOW (t
) = 1;
13998 gcc_unreachable ();
14004 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14005 an integer constant or real constant.
14007 TYPE is the type of the result. */
14010 fold_abs_const (tree arg0
, tree type
)
14012 tree t
= NULL_TREE
;
14014 switch (TREE_CODE (arg0
))
14018 /* If the value is unsigned or non-negative, then the absolute value
14019 is the same as the ordinary value. */
14020 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
14023 /* If the value is negative, then the absolute value is
14028 wide_int val
= wi::neg (arg0
, &overflow
);
14029 t
= force_fit_type (type
, val
, -1,
14030 overflow
| TREE_OVERFLOW (arg0
));
14036 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14037 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14043 gcc_unreachable ();
14049 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14050 constant. TYPE is the type of the result. */
14053 fold_not_const (const_tree arg0
, tree type
)
14055 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14057 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
14060 /* Given CODE, a relational operator, the target type, TYPE and two
14061 constant operands OP0 and OP1, return the result of the
14062 relational operation. If the result is not a compile time
14063 constant, then return NULL_TREE. */
14066 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14068 int result
, invert
;
14070 /* From here on, the only cases we handle are when the result is
14071 known to be a constant. */
14073 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14075 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14076 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14078 /* Handle the cases where either operand is a NaN. */
14079 if (real_isnan (c0
) || real_isnan (c1
))
14089 case UNORDERED_EXPR
:
14103 if (flag_trapping_math
)
14109 gcc_unreachable ();
14112 return constant_boolean_node (result
, type
);
14115 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14118 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14120 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14121 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14122 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14125 /* Handle equality/inequality of complex constants. */
14126 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14128 tree rcond
= fold_relational_const (code
, type
,
14129 TREE_REALPART (op0
),
14130 TREE_REALPART (op1
));
14131 tree icond
= fold_relational_const (code
, type
,
14132 TREE_IMAGPART (op0
),
14133 TREE_IMAGPART (op1
));
14134 if (code
== EQ_EXPR
)
14135 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14136 else if (code
== NE_EXPR
)
14137 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14142 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
14144 unsigned count
= VECTOR_CST_NELTS (op0
);
14145 tree
*elts
= XALLOCAVEC (tree
, count
);
14146 gcc_assert (VECTOR_CST_NELTS (op1
) == count
14147 && TYPE_VECTOR_SUBPARTS (type
) == count
);
14149 for (unsigned i
= 0; i
< count
; i
++)
14151 tree elem_type
= TREE_TYPE (type
);
14152 tree elem0
= VECTOR_CST_ELT (op0
, i
);
14153 tree elem1
= VECTOR_CST_ELT (op1
, i
);
14155 tree tem
= fold_relational_const (code
, elem_type
,
14158 if (tem
== NULL_TREE
)
14161 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
14164 return build_vector (type
, elts
);
14167 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14169 To compute GT, swap the arguments and do LT.
14170 To compute GE, do LT and invert the result.
14171 To compute LE, swap the arguments, do LT and invert the result.
14172 To compute NE, do EQ and invert the result.
14174 Therefore, the code below must handle only EQ and LT. */
14176 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14178 std::swap (op0
, op1
);
14179 code
= swap_tree_comparison (code
);
14182 /* Note that it is safe to invert for real values here because we
14183 have already handled the one case that it matters. */
14186 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14189 code
= invert_tree_comparison (code
, false);
14192 /* Compute a result for LT or EQ if args permit;
14193 Otherwise return T. */
14194 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14196 if (code
== EQ_EXPR
)
14197 result
= tree_int_cst_equal (op0
, op1
);
14199 result
= tree_int_cst_lt (op0
, op1
);
14206 return constant_boolean_node (result
, type
);
14209 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14210 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14214 fold_build_cleanup_point_expr (tree type
, tree expr
)
14216 /* If the expression does not have side effects then we don't have to wrap
14217 it with a cleanup point expression. */
14218 if (!TREE_SIDE_EFFECTS (expr
))
14221 /* If the expression is a return, check to see if the expression inside the
14222 return has no side effects or the right hand side of the modify expression
14223 inside the return. If either don't have side effects set we don't need to
14224 wrap the expression in a cleanup point expression. Note we don't check the
14225 left hand side of the modify because it should always be a return decl. */
14226 if (TREE_CODE (expr
) == RETURN_EXPR
)
14228 tree op
= TREE_OPERAND (expr
, 0);
14229 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14231 op
= TREE_OPERAND (op
, 1);
14232 if (!TREE_SIDE_EFFECTS (op
))
14236 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14239 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14240 of an indirection through OP0, or NULL_TREE if no simplification is
14244 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
14250 subtype
= TREE_TYPE (sub
);
14251 if (!POINTER_TYPE_P (subtype
))
14254 if (TREE_CODE (sub
) == ADDR_EXPR
)
14256 tree op
= TREE_OPERAND (sub
, 0);
14257 tree optype
= TREE_TYPE (op
);
14258 /* *&CONST_DECL -> to the value of the const decl. */
14259 if (TREE_CODE (op
) == CONST_DECL
)
14260 return DECL_INITIAL (op
);
14261 /* *&p => p; make sure to handle *&"str"[cst] here. */
14262 if (type
== optype
)
14264 tree fop
= fold_read_from_constant_string (op
);
14270 /* *(foo *)&fooarray => fooarray[0] */
14271 else if (TREE_CODE (optype
) == ARRAY_TYPE
14272 && type
== TREE_TYPE (optype
)
14273 && (!in_gimple_form
14274 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14276 tree type_domain
= TYPE_DOMAIN (optype
);
14277 tree min_val
= size_zero_node
;
14278 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14279 min_val
= TYPE_MIN_VALUE (type_domain
);
14281 && TREE_CODE (min_val
) != INTEGER_CST
)
14283 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14284 NULL_TREE
, NULL_TREE
);
14286 /* *(foo *)&complexfoo => __real__ complexfoo */
14287 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14288 && type
== TREE_TYPE (optype
))
14289 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14290 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14291 else if (TREE_CODE (optype
) == VECTOR_TYPE
14292 && type
== TREE_TYPE (optype
))
14294 tree part_width
= TYPE_SIZE (type
);
14295 tree index
= bitsize_int (0);
14296 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14300 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14301 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14303 tree op00
= TREE_OPERAND (sub
, 0);
14304 tree op01
= TREE_OPERAND (sub
, 1);
14307 if (TREE_CODE (op00
) == ADDR_EXPR
)
14310 op00
= TREE_OPERAND (op00
, 0);
14311 op00type
= TREE_TYPE (op00
);
14313 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14314 if (TREE_CODE (op00type
) == VECTOR_TYPE
14315 && type
== TREE_TYPE (op00type
))
14317 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
14318 tree part_width
= TYPE_SIZE (type
);
14319 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
14320 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14321 tree index
= bitsize_int (indexi
);
14323 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
14324 return fold_build3_loc (loc
,
14325 BIT_FIELD_REF
, type
, op00
,
14326 part_width
, index
);
14329 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14330 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14331 && type
== TREE_TYPE (op00type
))
14333 tree size
= TYPE_SIZE_UNIT (type
);
14334 if (tree_int_cst_equal (size
, op01
))
14335 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14337 /* ((foo *)&fooarray)[1] => fooarray[1] */
14338 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14339 && type
== TREE_TYPE (op00type
))
14341 tree type_domain
= TYPE_DOMAIN (op00type
);
14342 tree min_val
= size_zero_node
;
14343 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14344 min_val
= TYPE_MIN_VALUE (type_domain
);
14345 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14346 TYPE_SIZE_UNIT (type
));
14347 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14348 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14349 NULL_TREE
, NULL_TREE
);
14354 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14355 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14356 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14357 && (!in_gimple_form
14358 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14361 tree min_val
= size_zero_node
;
14362 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14363 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14364 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14365 min_val
= TYPE_MIN_VALUE (type_domain
);
14367 && TREE_CODE (min_val
) != INTEGER_CST
)
14369 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14376 /* Builds an expression for an indirection through T, simplifying some
14380 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14382 tree type
= TREE_TYPE (TREE_TYPE (t
));
14383 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14388 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14391 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14394 fold_indirect_ref_loc (location_t loc
, tree t
)
14396 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14404 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14405 whose result is ignored. The type of the returned tree need not be
14406 the same as the original expression. */
14409 fold_ignored_result (tree t
)
14411 if (!TREE_SIDE_EFFECTS (t
))
14412 return integer_zero_node
;
14415 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14418 t
= TREE_OPERAND (t
, 0);
14422 case tcc_comparison
:
14423 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14424 t
= TREE_OPERAND (t
, 0);
14425 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14426 t
= TREE_OPERAND (t
, 1);
14431 case tcc_expression
:
14432 switch (TREE_CODE (t
))
14434 case COMPOUND_EXPR
:
14435 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14437 t
= TREE_OPERAND (t
, 0);
14441 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14442 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14444 t
= TREE_OPERAND (t
, 0);
14457 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14460 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14462 tree div
= NULL_TREE
;
14467 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14468 have to do anything. Only do this when we are not given a const,
14469 because in that case, this check is more expensive than just
14471 if (TREE_CODE (value
) != INTEGER_CST
)
14473 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14475 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14479 /* If divisor is a power of two, simplify this to bit manipulation. */
14480 if (divisor
== (divisor
& -divisor
))
14482 if (TREE_CODE (value
) == INTEGER_CST
)
14484 wide_int val
= value
;
14487 if ((val
& (divisor
- 1)) == 0)
14490 overflow_p
= TREE_OVERFLOW (value
);
14491 val
+= divisor
- 1;
14492 val
&= - (int) divisor
;
14496 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14502 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14503 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14504 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14505 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14511 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14512 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14513 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14519 /* Likewise, but round down. */
14522 round_down_loc (location_t loc
, tree value
, int divisor
)
14524 tree div
= NULL_TREE
;
14526 gcc_assert (divisor
> 0);
14530 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14531 have to do anything. Only do this when we are not given a const,
14532 because in that case, this check is more expensive than just
14534 if (TREE_CODE (value
) != INTEGER_CST
)
14536 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14538 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14542 /* If divisor is a power of two, simplify this to bit manipulation. */
14543 if (divisor
== (divisor
& -divisor
))
14547 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14548 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14553 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14554 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14555 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14561 /* Returns the pointer to the base of the object addressed by EXP and
14562 extracts the information about the offset of the access, storing it
14563 to PBITPOS and POFFSET. */
14566 split_address_to_core_and_offset (tree exp
,
14567 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14571 int unsignedp
, volatilep
;
14572 HOST_WIDE_INT bitsize
;
14573 location_t loc
= EXPR_LOCATION (exp
);
14575 if (TREE_CODE (exp
) == ADDR_EXPR
)
14577 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14578 poffset
, &mode
, &unsignedp
, &volatilep
,
14580 core
= build_fold_addr_expr_loc (loc
, core
);
14586 *poffset
= NULL_TREE
;
14592 /* Returns true if addresses of E1 and E2 differ by a constant, false
14593 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14596 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14599 HOST_WIDE_INT bitpos1
, bitpos2
;
14600 tree toffset1
, toffset2
, tdiff
, type
;
14602 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14603 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14605 if (bitpos1
% BITS_PER_UNIT
!= 0
14606 || bitpos2
% BITS_PER_UNIT
!= 0
14607 || !operand_equal_p (core1
, core2
, 0))
14610 if (toffset1
&& toffset2
)
14612 type
= TREE_TYPE (toffset1
);
14613 if (type
!= TREE_TYPE (toffset2
))
14614 toffset2
= fold_convert (type
, toffset2
);
14616 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14617 if (!cst_and_fits_in_hwi (tdiff
))
14620 *diff
= int_cst_value (tdiff
);
14622 else if (toffset1
|| toffset2
)
14624 /* If only one of the offsets is non-constant, the difference cannot
14631 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14635 /* Simplify the floating point expression EXP when the sign of the
14636 result is not significant. Return NULL_TREE if no simplification
14640 fold_strip_sign_ops (tree exp
)
14643 location_t loc
= EXPR_LOCATION (exp
);
14645 switch (TREE_CODE (exp
))
14649 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
14650 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
14654 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
14656 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
14657 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
14658 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
14659 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
14660 arg0
? arg0
: TREE_OPERAND (exp
, 0),
14661 arg1
? arg1
: TREE_OPERAND (exp
, 1));
14664 case COMPOUND_EXPR
:
14665 arg0
= TREE_OPERAND (exp
, 0);
14666 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
14668 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
14672 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
14673 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
14675 return fold_build3_loc (loc
,
14676 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
14677 arg0
? arg0
: TREE_OPERAND (exp
, 1),
14678 arg1
? arg1
: TREE_OPERAND (exp
, 2));
14683 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
14686 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14687 /* Strip copysign function call, return the 1st argument. */
14688 arg0
= CALL_EXPR_ARG (exp
, 0);
14689 arg1
= CALL_EXPR_ARG (exp
, 1);
14690 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
14693 /* Strip sign ops from the argument of "odd" math functions. */
14694 if (negate_mathfn_p (fcode
))
14696 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
14698 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);
14711 /* Return OFF converted to a pointer offset type suitable as offset for
14712 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14714 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14716 return fold_convert_loc (loc
, sizetype
, off
);
14719 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14721 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14723 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14724 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14727 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14729 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14731 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14732 ptr
, size_int (off
));