1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
66 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
73 #include "gimple-expr.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
81 #include "plugin-api.h"
84 #include "generic-match.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
89 int folding_initializer
= 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code
{
113 static bool negate_mathfn_p (enum built_in_function
);
114 static bool negate_expr_p (tree
);
115 static tree
negate_expr (tree
);
116 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
117 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
118 static tree
const_binop (enum tree_code
, tree
, tree
);
119 static enum comparison_code
comparison_to_compcode (enum tree_code
);
120 static enum tree_code
compcode_to_comparison (enum comparison_code
);
121 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
122 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
123 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
124 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
125 static tree
make_bit_field_ref (location_t
, tree
, tree
,
126 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
127 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
129 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
131 machine_mode
*, int *, int *,
133 static int simple_operand_p (const_tree
);
134 static bool simple_operand_p_2 (tree
);
135 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
136 static tree
range_predecessor (tree
);
137 static tree
range_successor (tree
);
138 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
140 static tree
unextend (tree
, int, int, tree
);
141 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
143 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
144 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
145 static tree
fold_binary_op_with_conditional_arg (location_t
,
146 enum tree_code
, tree
,
149 static tree
fold_mathfn_compare (location_t
,
150 enum built_in_function
, enum tree_code
,
152 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
153 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
154 static bool reorder_operands_p (const_tree
, const_tree
);
155 static tree
fold_negate_const (tree
, tree
);
156 static tree
fold_not_const (const_tree
, tree
);
157 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
158 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
160 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
161 Otherwise, return LOC. */
164 expr_location_or (tree t
, location_t loc
)
166 location_t tloc
= EXPR_LOCATION (t
);
167 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
170 /* Similar to protected_set_expr_location, but never modify x in place,
171 if location can and needs to be set, unshare it. */
174 protected_set_expr_location_unshare (tree x
, location_t loc
)
176 if (CAN_HAVE_LOCATION_P (x
)
177 && EXPR_LOCATION (x
) != loc
178 && !(TREE_CODE (x
) == SAVE_EXPR
179 || TREE_CODE (x
) == TARGET_EXPR
180 || TREE_CODE (x
) == BIND_EXPR
))
183 SET_EXPR_LOCATION (x
, loc
);
188 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
189 division and returns the quotient. Otherwise returns
193 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
197 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
199 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
204 /* This is nonzero if we should defer warnings about undefined
205 overflow. This facility exists because these warnings are a
206 special case. The code to estimate loop iterations does not want
207 to issue any warnings, since it works with expressions which do not
208 occur in user code. Various bits of cleanup code call fold(), but
209 only use the result if it has certain characteristics (e.g., is a
210 constant); that code only wants to issue a warning if the result is
213 static int fold_deferring_overflow_warnings
;
215 /* If a warning about undefined overflow is deferred, this is the
216 warning. Note that this may cause us to turn two warnings into
217 one, but that is fine since it is sufficient to only give one
218 warning per expression. */
220 static const char* fold_deferred_overflow_warning
;
222 /* If a warning about undefined overflow is deferred, this is the
223 level at which the warning should be emitted. */
225 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
227 /* Start deferring overflow warnings. We could use a stack here to
228 permit nested calls, but at present it is not necessary. */
231 fold_defer_overflow_warnings (void)
233 ++fold_deferring_overflow_warnings
;
236 /* Stop deferring overflow warnings. If there is a pending warning,
237 and ISSUE is true, then issue the warning if appropriate. STMT is
238 the statement with which the warning should be associated (used for
239 location information); STMT may be NULL. CODE is the level of the
240 warning--a warn_strict_overflow_code value. This function will use
241 the smaller of CODE and the deferred code when deciding whether to
242 issue the warning. CODE may be zero to mean to always use the
246 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
251 gcc_assert (fold_deferring_overflow_warnings
> 0);
252 --fold_deferring_overflow_warnings
;
253 if (fold_deferring_overflow_warnings
> 0)
255 if (fold_deferred_overflow_warning
!= NULL
257 && code
< (int) fold_deferred_overflow_code
)
258 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
262 warnmsg
= fold_deferred_overflow_warning
;
263 fold_deferred_overflow_warning
= NULL
;
265 if (!issue
|| warnmsg
== NULL
)
268 if (gimple_no_warning_p (stmt
))
271 /* Use the smallest code level when deciding to issue the
273 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
274 code
= fold_deferred_overflow_code
;
276 if (!issue_strict_overflow_warning (code
))
280 locus
= input_location
;
282 locus
= gimple_location (stmt
);
283 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
286 /* Stop deferring overflow warnings, ignoring any deferred
290 fold_undefer_and_ignore_overflow_warnings (void)
292 fold_undefer_overflow_warnings (false, NULL
, 0);
295 /* Whether we are deferring overflow warnings. */
298 fold_deferring_overflow_warnings_p (void)
300 return fold_deferring_overflow_warnings
> 0;
303 /* This is called when we fold something based on the fact that signed
304 overflow is undefined. */
307 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
309 if (fold_deferring_overflow_warnings
> 0)
311 if (fold_deferred_overflow_warning
== NULL
312 || wc
< fold_deferred_overflow_code
)
314 fold_deferred_overflow_warning
= gmsgid
;
315 fold_deferred_overflow_code
= wc
;
318 else if (issue_strict_overflow_warning (wc
))
319 warning (OPT_Wstrict_overflow
, gmsgid
);
322 /* Return true if the built-in mathematical function specified by CODE
323 is odd, i.e. -f(x) == f(-x). */
326 negate_mathfn_p (enum built_in_function code
)
330 CASE_FLT_FN (BUILT_IN_ASIN
):
331 CASE_FLT_FN (BUILT_IN_ASINH
):
332 CASE_FLT_FN (BUILT_IN_ATAN
):
333 CASE_FLT_FN (BUILT_IN_ATANH
):
334 CASE_FLT_FN (BUILT_IN_CASIN
):
335 CASE_FLT_FN (BUILT_IN_CASINH
):
336 CASE_FLT_FN (BUILT_IN_CATAN
):
337 CASE_FLT_FN (BUILT_IN_CATANH
):
338 CASE_FLT_FN (BUILT_IN_CBRT
):
339 CASE_FLT_FN (BUILT_IN_CPROJ
):
340 CASE_FLT_FN (BUILT_IN_CSIN
):
341 CASE_FLT_FN (BUILT_IN_CSINH
):
342 CASE_FLT_FN (BUILT_IN_CTAN
):
343 CASE_FLT_FN (BUILT_IN_CTANH
):
344 CASE_FLT_FN (BUILT_IN_ERF
):
345 CASE_FLT_FN (BUILT_IN_LLROUND
):
346 CASE_FLT_FN (BUILT_IN_LROUND
):
347 CASE_FLT_FN (BUILT_IN_ROUND
):
348 CASE_FLT_FN (BUILT_IN_SIN
):
349 CASE_FLT_FN (BUILT_IN_SINH
):
350 CASE_FLT_FN (BUILT_IN_TAN
):
351 CASE_FLT_FN (BUILT_IN_TANH
):
352 CASE_FLT_FN (BUILT_IN_TRUNC
):
355 CASE_FLT_FN (BUILT_IN_LLRINT
):
356 CASE_FLT_FN (BUILT_IN_LRINT
):
357 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
358 CASE_FLT_FN (BUILT_IN_RINT
):
359 return !flag_rounding_math
;
367 /* Check whether we may negate an integer constant T without causing
371 may_negate_without_overflow_p (const_tree t
)
375 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
377 type
= TREE_TYPE (t
);
378 if (TYPE_UNSIGNED (type
))
381 return !wi::only_sign_bit_p (t
);
384 /* Determine whether an expression T can be cheaply negated using
385 the function negate_expr without introducing undefined overflow. */
388 negate_expr_p (tree t
)
395 type
= TREE_TYPE (t
);
398 switch (TREE_CODE (t
))
401 if (TYPE_OVERFLOW_WRAPS (type
))
404 /* Check that -CST will not overflow type. */
405 return may_negate_without_overflow_p (t
);
407 return (INTEGRAL_TYPE_P (type
)
408 && TYPE_OVERFLOW_WRAPS (type
));
414 return !TYPE_OVERFLOW_SANITIZED (type
);
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
422 return negate_expr_p (TREE_REALPART (t
))
423 && negate_expr_p (TREE_IMAGPART (t
));
427 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
430 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
432 for (i
= 0; i
< count
; i
++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
440 return negate_expr_p (TREE_OPERAND (t
, 0))
441 && negate_expr_p (TREE_OPERAND (t
, 1));
444 return negate_expr_p (TREE_OPERAND (t
, 0));
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
448 || HONOR_SIGNED_ZEROS (element_mode (type
)))
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t
, 1))
452 && reorder_operands_p (TREE_OPERAND (t
, 0),
453 TREE_OPERAND (t
, 1)))
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t
, 0));
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
461 && !HONOR_SIGNED_ZEROS (element_mode (type
))
462 && reorder_operands_p (TREE_OPERAND (t
, 0),
463 TREE_OPERAND (t
, 1));
466 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
473 return negate_expr_p (TREE_OPERAND (t
, 1))
474 || negate_expr_p (TREE_OPERAND (t
, 0));
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
500 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
502 return negate_expr_p (TREE_OPERAND (t
, 1));
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type
) == REAL_TYPE
)
508 tree tem
= strip_float_extensions (t
);
510 return negate_expr_p (tem
);
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t
)))
517 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
524 tree op1
= TREE_OPERAND (t
, 1);
525 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
542 fold_negate_expr (location_t loc
, tree t
)
544 tree type
= TREE_TYPE (t
);
547 switch (TREE_CODE (t
))
549 /* Convert - (~A) to A + 1. */
551 if (INTEGRAL_TYPE_P (type
))
552 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
553 build_one_cst (type
));
557 tem
= fold_negate_const (t
, type
);
558 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
559 || (!TYPE_OVERFLOW_TRAPS (type
)
560 && TYPE_OVERFLOW_WRAPS (type
))
561 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
566 tem
= fold_negate_const (t
, type
);
567 /* Two's complement FP formats, such as c4x, may overflow. */
568 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
573 tem
= fold_negate_const (t
, type
);
578 tree rpart
= negate_expr (TREE_REALPART (t
));
579 tree ipart
= negate_expr (TREE_IMAGPART (t
));
581 if ((TREE_CODE (rpart
) == REAL_CST
582 && TREE_CODE (ipart
) == REAL_CST
)
583 || (TREE_CODE (rpart
) == INTEGER_CST
584 && TREE_CODE (ipart
) == INTEGER_CST
))
585 return build_complex (type
, rpart
, ipart
);
591 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
592 tree
*elts
= XALLOCAVEC (tree
, count
);
594 for (i
= 0; i
< count
; i
++)
596 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
597 if (elts
[i
] == NULL_TREE
)
601 return build_vector (type
, elts
);
605 if (negate_expr_p (t
))
606 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
607 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
608 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
612 if (negate_expr_p (t
))
613 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
614 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
618 if (!TYPE_OVERFLOW_SANITIZED (type
))
619 return TREE_OPERAND (t
, 0);
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
624 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
626 /* -(A + B) -> (-B) - A. */
627 if (negate_expr_p (TREE_OPERAND (t
, 1))
628 && reorder_operands_p (TREE_OPERAND (t
, 0),
629 TREE_OPERAND (t
, 1)))
631 tem
= negate_expr (TREE_OPERAND (t
, 1));
632 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
633 tem
, TREE_OPERAND (t
, 0));
636 /* -(A + B) -> (-A) - B. */
637 if (negate_expr_p (TREE_OPERAND (t
, 0)))
639 tem
= negate_expr (TREE_OPERAND (t
, 0));
640 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
641 tem
, TREE_OPERAND (t
, 1));
647 /* - (A - B) -> B - A */
648 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
649 && !HONOR_SIGNED_ZEROS (element_mode (type
))
650 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
651 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
652 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
656 if (TYPE_UNSIGNED (type
))
662 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
664 tem
= TREE_OPERAND (t
, 1);
665 if (negate_expr_p (tem
))
666 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
667 TREE_OPERAND (t
, 0), negate_expr (tem
));
668 tem
= TREE_OPERAND (t
, 0);
669 if (negate_expr_p (tem
))
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 negate_expr (tem
), TREE_OPERAND (t
, 1));
678 /* In general we can't negate A / B, because if A is INT_MIN and
679 B is 1, we may turn this into INT_MIN / -1 which is undefined
680 and actually traps on some architectures. But if overflow is
681 undefined, we can negate, because - (INT_MIN / 1) is an
683 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
685 const char * const warnmsg
= G_("assuming signed overflow does not "
686 "occur when negating a division");
687 tem
= TREE_OPERAND (t
, 1);
688 if (negate_expr_p (tem
))
690 if (INTEGRAL_TYPE_P (type
)
691 && (TREE_CODE (tem
) != INTEGER_CST
692 || integer_onep (tem
)))
693 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
694 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
695 TREE_OPERAND (t
, 0), negate_expr (tem
));
697 /* If overflow is undefined then we have to be careful because
698 we ask whether it's ok to associate the negate with the
699 division which is not ok for example for
700 -((a - b) / c) where (-(a - b)) / c may invoke undefined
701 overflow because of negating INT_MIN. So do not use
702 negate_expr_p here but open-code the two important cases. */
703 tem
= TREE_OPERAND (t
, 0);
704 if ((INTEGRAL_TYPE_P (type
)
705 && (TREE_CODE (tem
) == NEGATE_EXPR
706 || (TREE_CODE (tem
) == INTEGER_CST
707 && may_negate_without_overflow_p (tem
))))
708 || !INTEGRAL_TYPE_P (type
))
709 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
710 negate_expr (tem
), TREE_OPERAND (t
, 1));
715 /* Convert -((double)float) into (double)(-float). */
716 if (TREE_CODE (type
) == REAL_TYPE
)
718 tem
= strip_float_extensions (t
);
719 if (tem
!= t
&& negate_expr_p (tem
))
720 return fold_convert_loc (loc
, type
, negate_expr (tem
));
725 /* Negate -f(x) as f(-x). */
726 if (negate_mathfn_p (builtin_mathfn_code (t
))
727 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
731 fndecl
= get_callee_fndecl (t
);
732 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
733 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
738 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
739 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
741 tree op1
= TREE_OPERAND (t
, 1);
742 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
744 tree ntype
= TYPE_UNSIGNED (type
)
745 ? signed_type_for (type
)
746 : unsigned_type_for (type
);
747 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
748 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
749 return fold_convert_loc (loc
, type
, temp
);
761 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
762 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
774 loc
= EXPR_LOCATION (t
);
775 type
= TREE_TYPE (t
);
778 tem
= fold_negate_expr (loc
, t
);
780 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
781 return fold_convert_loc (loc
, type
, tem
);
784 /* Split a tree IN into a constant, literal and variable parts that could be
785 combined with CODE to make IN. "constant" means an expression with
786 TREE_CONSTANT but that isn't an actual constant. CODE must be a
787 commutative arithmetic operation. Store the constant part into *CONP,
788 the literal in *LITP and return the variable part. If a part isn't
789 present, set it to null. If the tree does not decompose in this way,
790 return the entire tree as the variable part and the other parts as null.
792 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
793 case, we negate an operand that was subtracted. Except if it is a
794 literal for which we use *MINUS_LITP instead.
796 If NEGATE_P is true, we are negating all of IN, again except a literal
797 for which we use *MINUS_LITP instead.
799 If IN is itself a literal or constant, return it as appropriate.
801 Note that we do not guarantee that any of the three values will be the
802 same type as IN, but they will have the same signedness and mode. */
805 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
806 tree
*minus_litp
, int negate_p
)
814 /* Strip any conversions that don't change the machine mode or signedness. */
815 STRIP_SIGN_NOPS (in
);
817 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
818 || TREE_CODE (in
) == FIXED_CST
)
820 else if (TREE_CODE (in
) == code
821 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
822 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
823 /* We can associate addition and subtraction together (even
824 though the C standard doesn't say so) for integers because
825 the value is not affected. For reals, the value might be
826 affected, so we can't. */
827 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
828 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
830 tree op0
= TREE_OPERAND (in
, 0);
831 tree op1
= TREE_OPERAND (in
, 1);
832 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
833 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
835 /* First see if either of the operands is a literal, then a constant. */
836 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
837 || TREE_CODE (op0
) == FIXED_CST
)
838 *litp
= op0
, op0
= 0;
839 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
840 || TREE_CODE (op1
) == FIXED_CST
)
841 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
843 if (op0
!= 0 && TREE_CONSTANT (op0
))
844 *conp
= op0
, op0
= 0;
845 else if (op1
!= 0 && TREE_CONSTANT (op1
))
846 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
848 /* If we haven't dealt with either operand, this is not a case we can
849 decompose. Otherwise, VAR is either of the ones remaining, if any. */
850 if (op0
!= 0 && op1
!= 0)
855 var
= op1
, neg_var_p
= neg1_p
;
857 /* Now do any needed negations. */
859 *minus_litp
= *litp
, *litp
= 0;
861 *conp
= negate_expr (*conp
);
863 var
= negate_expr (var
);
865 else if (TREE_CODE (in
) == BIT_NOT_EXPR
866 && code
== PLUS_EXPR
)
868 /* -X - 1 is folded to ~X, undo that here. */
869 *minus_litp
= build_one_cst (TREE_TYPE (in
));
870 var
= negate_expr (TREE_OPERAND (in
, 0));
872 else if (TREE_CONSTANT (in
))
880 *minus_litp
= *litp
, *litp
= 0;
881 else if (*minus_litp
)
882 *litp
= *minus_litp
, *minus_litp
= 0;
883 *conp
= negate_expr (*conp
);
884 var
= negate_expr (var
);
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
896 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
903 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
904 try to fold this since we will have infinite recursion. But do
905 deal with any NEGATE_EXPRs. */
906 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
907 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
909 if (code
== PLUS_EXPR
)
911 if (TREE_CODE (t1
) == NEGATE_EXPR
)
912 return build2_loc (loc
, MINUS_EXPR
, type
,
913 fold_convert_loc (loc
, type
, t2
),
914 fold_convert_loc (loc
, type
,
915 TREE_OPERAND (t1
, 0)));
916 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
917 return build2_loc (loc
, MINUS_EXPR
, type
,
918 fold_convert_loc (loc
, type
, t1
),
919 fold_convert_loc (loc
, type
,
920 TREE_OPERAND (t2
, 0)));
921 else if (integer_zerop (t2
))
922 return fold_convert_loc (loc
, type
, t1
);
924 else if (code
== MINUS_EXPR
)
926 if (integer_zerop (t2
))
927 return fold_convert_loc (loc
, type
, t1
);
930 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
931 fold_convert_loc (loc
, type
, t2
));
934 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
935 fold_convert_loc (loc
, type
, t2
));
938 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
939 for use in int_const_binop, size_binop and size_diffop. */
942 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
944 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
946 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
961 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
962 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
963 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
967 /* Combine two integer constants ARG1 and ARG2 under operation CODE
968 to produce a new constant. Return NULL_TREE if we don't know how
969 to evaluate CODE at compile-time. */
972 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
977 tree type
= TREE_TYPE (arg1
);
978 signop sign
= TYPE_SIGN (type
);
979 bool overflow
= false;
981 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
982 TYPE_SIGN (TREE_TYPE (parg2
)));
987 res
= wi::bit_or (arg1
, arg2
);
991 res
= wi::bit_xor (arg1
, arg2
);
995 res
= wi::bit_and (arg1
, arg2
);
1000 if (wi::neg_p (arg2
))
1003 if (code
== RSHIFT_EXPR
)
1009 if (code
== RSHIFT_EXPR
)
1010 /* It's unclear from the C standard whether shifts can overflow.
1011 The following code ignores overflow; perhaps a C standard
1012 interpretation ruling is needed. */
1013 res
= wi::rshift (arg1
, arg2
, sign
);
1015 res
= wi::lshift (arg1
, arg2
);
1020 if (wi::neg_p (arg2
))
1023 if (code
== RROTATE_EXPR
)
1024 code
= LROTATE_EXPR
;
1026 code
= RROTATE_EXPR
;
1029 if (code
== RROTATE_EXPR
)
1030 res
= wi::rrotate (arg1
, arg2
);
1032 res
= wi::lrotate (arg1
, arg2
);
1036 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1040 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1044 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1047 case MULT_HIGHPART_EXPR
:
1048 res
= wi::mul_high (arg1
, arg2
, sign
);
1051 case TRUNC_DIV_EXPR
:
1052 case EXACT_DIV_EXPR
:
1055 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1058 case FLOOR_DIV_EXPR
:
1061 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1067 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1070 case ROUND_DIV_EXPR
:
1073 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1076 case TRUNC_MOD_EXPR
:
1079 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1082 case FLOOR_MOD_EXPR
:
1085 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1091 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1094 case ROUND_MOD_EXPR
:
1097 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1101 res
= wi::min (arg1
, arg2
, sign
);
1105 res
= wi::max (arg1
, arg2
, sign
);
1112 t
= force_fit_type (type
, res
, overflowable
,
1113 (((sign
== SIGNED
|| overflowable
== -1)
1115 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1121 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1123 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1126 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1127 constant. We assume ARG1 and ARG2 have the same data type, or at least
1128 are the same kind of constant and the same machine mode. Return zero if
1129 combining the constants is not allowed in the current operating mode. */
1132 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1134 /* Sanity check for the recursive cases. */
1141 if (TREE_CODE (arg1
) == INTEGER_CST
)
1142 return int_const_binop (code
, arg1
, arg2
);
1144 if (TREE_CODE (arg1
) == REAL_CST
)
1149 REAL_VALUE_TYPE value
;
1150 REAL_VALUE_TYPE result
;
1154 /* The following codes are handled by real_arithmetic. */
1169 d1
= TREE_REAL_CST (arg1
);
1170 d2
= TREE_REAL_CST (arg2
);
1172 type
= TREE_TYPE (arg1
);
1173 mode
= TYPE_MODE (type
);
1175 /* Don't perform operation if we honor signaling NaNs and
1176 either operand is a NaN. */
1177 if (HONOR_SNANS (mode
)
1178 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1181 /* Don't perform operation if it would raise a division
1182 by zero exception. */
1183 if (code
== RDIV_EXPR
1184 && REAL_VALUES_EQUAL (d2
, dconst0
)
1185 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1188 /* If either operand is a NaN, just return it. Otherwise, set up
1189 for floating-point trap; we return an overflow. */
1190 if (REAL_VALUE_ISNAN (d1
))
1192 else if (REAL_VALUE_ISNAN (d2
))
1195 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1196 real_convert (&result
, mode
, &value
);
1198 /* Don't constant fold this floating point operation if
1199 the result has overflowed and flag_trapping_math. */
1200 if (flag_trapping_math
1201 && MODE_HAS_INFINITIES (mode
)
1202 && REAL_VALUE_ISINF (result
)
1203 && !REAL_VALUE_ISINF (d1
)
1204 && !REAL_VALUE_ISINF (d2
))
1207 /* Don't constant fold this floating point operation if the
1208 result may dependent upon the run-time rounding mode and
1209 flag_rounding_math is set, or if GCC's software emulation
1210 is unable to accurately represent the result. */
1211 if ((flag_rounding_math
1212 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1213 && (inexact
|| !real_identical (&result
, &value
)))
1216 t
= build_real (type
, result
);
1218 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1222 if (TREE_CODE (arg1
) == FIXED_CST
)
1224 FIXED_VALUE_TYPE f1
;
1225 FIXED_VALUE_TYPE f2
;
1226 FIXED_VALUE_TYPE result
;
1231 /* The following codes are handled by fixed_arithmetic. */
1237 case TRUNC_DIV_EXPR
:
1238 f2
= TREE_FIXED_CST (arg2
);
1245 f2
.data
.high
= w2
.elt (1);
1246 f2
.data
.low
= w2
.elt (0);
1255 f1
= TREE_FIXED_CST (arg1
);
1256 type
= TREE_TYPE (arg1
);
1257 sat_p
= TYPE_SATURATING (type
);
1258 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1259 t
= build_fixed (type
, result
);
1260 /* Propagate overflow flags. */
1261 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1262 TREE_OVERFLOW (t
) = 1;
1266 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1268 tree type
= TREE_TYPE (arg1
);
1269 tree r1
= TREE_REALPART (arg1
);
1270 tree i1
= TREE_IMAGPART (arg1
);
1271 tree r2
= TREE_REALPART (arg2
);
1272 tree i2
= TREE_IMAGPART (arg2
);
1279 real
= const_binop (code
, r1
, r2
);
1280 imag
= const_binop (code
, i1
, i2
);
1284 if (COMPLEX_FLOAT_TYPE_P (type
))
1285 return do_mpc_arg2 (arg1
, arg2
, type
,
1286 /* do_nonfinite= */ folding_initializer
,
1289 real
= const_binop (MINUS_EXPR
,
1290 const_binop (MULT_EXPR
, r1
, r2
),
1291 const_binop (MULT_EXPR
, i1
, i2
));
1292 imag
= const_binop (PLUS_EXPR
,
1293 const_binop (MULT_EXPR
, r1
, i2
),
1294 const_binop (MULT_EXPR
, i1
, r2
));
1298 if (COMPLEX_FLOAT_TYPE_P (type
))
1299 return do_mpc_arg2 (arg1
, arg2
, type
,
1300 /* do_nonfinite= */ folding_initializer
,
1303 case TRUNC_DIV_EXPR
:
1305 case FLOOR_DIV_EXPR
:
1306 case ROUND_DIV_EXPR
:
1307 if (flag_complex_method
== 0)
1309 /* Keep this algorithm in sync with
1310 tree-complex.c:expand_complex_div_straight().
1312 Expand complex division to scalars, straightforward algorithm.
1313 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1317 = const_binop (PLUS_EXPR
,
1318 const_binop (MULT_EXPR
, r2
, r2
),
1319 const_binop (MULT_EXPR
, i2
, i2
));
1321 = const_binop (PLUS_EXPR
,
1322 const_binop (MULT_EXPR
, r1
, r2
),
1323 const_binop (MULT_EXPR
, i1
, i2
));
1325 = const_binop (MINUS_EXPR
,
1326 const_binop (MULT_EXPR
, i1
, r2
),
1327 const_binop (MULT_EXPR
, r1
, i2
));
1329 real
= const_binop (code
, t1
, magsquared
);
1330 imag
= const_binop (code
, t2
, magsquared
);
1334 /* Keep this algorithm in sync with
1335 tree-complex.c:expand_complex_div_wide().
1337 Expand complex division to scalars, modified algorithm to minimize
1338 overflow with wide input ranges. */
1339 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1340 fold_abs_const (r2
, TREE_TYPE (type
)),
1341 fold_abs_const (i2
, TREE_TYPE (type
)));
1343 if (integer_nonzerop (compare
))
1345 /* In the TRUE branch, we compute
1347 div = (br * ratio) + bi;
1348 tr = (ar * ratio) + ai;
1349 ti = (ai * ratio) - ar;
1352 tree ratio
= const_binop (code
, r2
, i2
);
1353 tree div
= const_binop (PLUS_EXPR
, i2
,
1354 const_binop (MULT_EXPR
, r2
, ratio
));
1355 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1356 real
= const_binop (PLUS_EXPR
, real
, i1
);
1357 real
= const_binop (code
, real
, div
);
1359 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1360 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1361 imag
= const_binop (code
, imag
, div
);
1365 /* In the FALSE branch, we compute
1367 divisor = (d * ratio) + c;
1368 tr = (b * ratio) + a;
1369 ti = b - (a * ratio);
1372 tree ratio
= const_binop (code
, i2
, r2
);
1373 tree div
= const_binop (PLUS_EXPR
, r2
,
1374 const_binop (MULT_EXPR
, i2
, ratio
));
1376 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1377 real
= const_binop (PLUS_EXPR
, real
, r1
);
1378 real
= const_binop (code
, real
, div
);
1380 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1381 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1382 imag
= const_binop (code
, imag
, div
);
1392 return build_complex (type
, real
, imag
);
1395 if (TREE_CODE (arg1
) == VECTOR_CST
1396 && TREE_CODE (arg2
) == VECTOR_CST
)
1398 tree type
= TREE_TYPE (arg1
);
1399 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1400 tree
*elts
= XALLOCAVEC (tree
, count
);
1402 for (i
= 0; i
< count
; i
++)
1404 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1405 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1407 elts
[i
] = const_binop (code
, elem1
, elem2
);
1409 /* It is possible that const_binop cannot handle the given
1410 code and return NULL_TREE */
1411 if (elts
[i
] == NULL_TREE
)
1415 return build_vector (type
, elts
);
1418 /* Shifts allow a scalar offset for a vector. */
1419 if (TREE_CODE (arg1
) == VECTOR_CST
1420 && TREE_CODE (arg2
) == INTEGER_CST
)
1422 tree type
= TREE_TYPE (arg1
);
1423 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1424 tree
*elts
= XALLOCAVEC (tree
, count
);
1426 for (i
= 0; i
< count
; i
++)
1428 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1430 elts
[i
] = const_binop (code
, elem1
, arg2
);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE. */
1434 if (elts
[i
] == NULL_TREE
)
1438 return build_vector (type
, elts
);
1443 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1444 indicates which particular sizetype to create. */
1447 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1449 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1452 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1453 is a tree code. The type of the result is taken from the operands.
1454 Both must be equivalent integer types, ala int_binop_types_match_p.
1455 If the operands are constant, so is the result. */
1458 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1460 tree type
= TREE_TYPE (arg0
);
1462 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1463 return error_mark_node
;
1465 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1468 /* Handle the special case of two integer constants faster. */
1469 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1471 /* And some specific cases even faster than that. */
1472 if (code
== PLUS_EXPR
)
1474 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1476 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1479 else if (code
== MINUS_EXPR
)
1481 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1484 else if (code
== MULT_EXPR
)
1486 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1490 /* Handle general case of two integer constants. For sizetype
1491 constant calculations we always want to know about overflow,
1492 even in the unsigned case. */
1493 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1496 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1499 /* Given two values, either both of sizetype or both of bitsizetype,
1500 compute the difference between the two values. Return the value
1501 in signed type corresponding to the type of the operands. */
1504 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1506 tree type
= TREE_TYPE (arg0
);
1509 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1512 /* If the type is already signed, just do the simple thing. */
1513 if (!TYPE_UNSIGNED (type
))
1514 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1516 if (type
== sizetype
)
1518 else if (type
== bitsizetype
)
1519 ctype
= sbitsizetype
;
1521 ctype
= signed_type_for (type
);
1523 /* If either operand is not a constant, do the conversions to the signed
1524 type and subtract. The hardware will do the right thing with any
1525 overflow in the subtraction. */
1526 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1527 return size_binop_loc (loc
, MINUS_EXPR
,
1528 fold_convert_loc (loc
, ctype
, arg0
),
1529 fold_convert_loc (loc
, ctype
, arg1
));
1531 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1532 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1533 overflow) and negate (which can't either). Special-case a result
1534 of zero while we're here. */
1535 if (tree_int_cst_equal (arg0
, arg1
))
1536 return build_int_cst (ctype
, 0);
1537 else if (tree_int_cst_lt (arg1
, arg0
))
1538 return fold_convert_loc (loc
, ctype
,
1539 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1541 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1542 fold_convert_loc (loc
, ctype
,
1543 size_binop_loc (loc
,
1548 /* A subroutine of fold_convert_const handling conversions of an
1549 INTEGER_CST to another integer type. */
1552 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1554 /* Given an integer constant, make new constant with new type,
1555 appropriately sign-extended or truncated. Use widest_int
1556 so that any extension is done according ARG1's type. */
1557 return force_fit_type (type
, wi::to_widest (arg1
),
1558 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1559 TREE_OVERFLOW (arg1
));
1562 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1563 to an integer type. */
1566 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1568 bool overflow
= false;
1571 /* The following code implements the floating point to integer
1572 conversion rules required by the Java Language Specification,
1573 that IEEE NaNs are mapped to zero and values that overflow
1574 the target precision saturate, i.e. values greater than
1575 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1576 are mapped to INT_MIN. These semantics are allowed by the
1577 C and C++ standards that simply state that the behavior of
1578 FP-to-integer conversion is unspecified upon overflow. */
1582 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1586 case FIX_TRUNC_EXPR
:
1587 real_trunc (&r
, VOIDmode
, &x
);
1594 /* If R is NaN, return zero and show we have an overflow. */
1595 if (REAL_VALUE_ISNAN (r
))
1598 val
= wi::zero (TYPE_PRECISION (type
));
1601 /* See if R is less than the lower bound or greater than the
1606 tree lt
= TYPE_MIN_VALUE (type
);
1607 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1608 if (REAL_VALUES_LESS (r
, l
))
1617 tree ut
= TYPE_MAX_VALUE (type
);
1620 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1621 if (REAL_VALUES_LESS (u
, r
))
1630 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1632 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1636 /* A subroutine of fold_convert_const handling conversions of a
1637 FIXED_CST to an integer type. */
1640 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1643 double_int temp
, temp_trunc
;
1646 /* Right shift FIXED_CST to temp by fbit. */
1647 temp
= TREE_FIXED_CST (arg1
).data
;
1648 mode
= TREE_FIXED_CST (arg1
).mode
;
1649 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1651 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1652 HOST_BITS_PER_DOUBLE_INT
,
1653 SIGNED_FIXED_POINT_MODE_P (mode
));
1655 /* Left shift temp to temp_trunc by fbit. */
1656 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1657 HOST_BITS_PER_DOUBLE_INT
,
1658 SIGNED_FIXED_POINT_MODE_P (mode
));
1662 temp
= double_int_zero
;
1663 temp_trunc
= double_int_zero
;
1666 /* If FIXED_CST is negative, we need to round the value toward 0.
1667 By checking if the fractional bits are not zero to add 1 to temp. */
1668 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1669 && temp_trunc
.is_negative ()
1670 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1671 temp
+= double_int_one
;
1673 /* Given a fixed-point constant, make new constant with new type,
1674 appropriately sign-extended or truncated. */
1675 t
= force_fit_type (type
, temp
, -1,
1676 (temp
.is_negative ()
1677 && (TYPE_UNSIGNED (type
)
1678 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1679 | TREE_OVERFLOW (arg1
));
1684 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1685 to another floating point type. */
1688 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1690 REAL_VALUE_TYPE value
;
1693 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1694 t
= build_real (type
, value
);
1696 /* If converting an infinity or NAN to a representation that doesn't
1697 have one, set the overflow bit so that we can produce some kind of
1698 error message at the appropriate point if necessary. It's not the
1699 most user-friendly message, but it's better than nothing. */
1700 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1701 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1702 TREE_OVERFLOW (t
) = 1;
1703 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1704 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1705 TREE_OVERFLOW (t
) = 1;
1706 /* Regular overflow, conversion produced an infinity in a mode that
1707 can't represent them. */
1708 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1709 && REAL_VALUE_ISINF (value
)
1710 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1711 TREE_OVERFLOW (t
) = 1;
1713 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1717 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1718 to a floating point type. */
1721 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1723 REAL_VALUE_TYPE value
;
1726 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1727 t
= build_real (type
, value
);
1729 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1733 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1734 to another fixed-point type. */
1737 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1739 FIXED_VALUE_TYPE value
;
1743 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1744 TYPE_SATURATING (type
));
1745 t
= build_fixed (type
, value
);
1747 /* Propagate overflow flags. */
1748 if (overflow_p
| TREE_OVERFLOW (arg1
))
1749 TREE_OVERFLOW (t
) = 1;
1753 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1754 to a fixed-point type. */
1757 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1759 FIXED_VALUE_TYPE value
;
1764 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1766 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1767 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1768 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1770 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1772 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1773 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1774 TYPE_SATURATING (type
));
1775 t
= build_fixed (type
, value
);
1777 /* Propagate overflow flags. */
1778 if (overflow_p
| TREE_OVERFLOW (arg1
))
1779 TREE_OVERFLOW (t
) = 1;
1783 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1784 to a fixed-point type. */
1787 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1789 FIXED_VALUE_TYPE value
;
1793 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1794 &TREE_REAL_CST (arg1
),
1795 TYPE_SATURATING (type
));
1796 t
= build_fixed (type
, value
);
1798 /* Propagate overflow flags. */
1799 if (overflow_p
| TREE_OVERFLOW (arg1
))
1800 TREE_OVERFLOW (t
) = 1;
1804 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1805 type TYPE. If no simplification can be done return NULL_TREE. */
1808 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1810 if (TREE_TYPE (arg1
) == type
)
1813 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1814 || TREE_CODE (type
) == OFFSET_TYPE
)
1816 if (TREE_CODE (arg1
) == INTEGER_CST
)
1817 return fold_convert_const_int_from_int (type
, arg1
);
1818 else if (TREE_CODE (arg1
) == REAL_CST
)
1819 return fold_convert_const_int_from_real (code
, type
, arg1
);
1820 else if (TREE_CODE (arg1
) == FIXED_CST
)
1821 return fold_convert_const_int_from_fixed (type
, arg1
);
1823 else if (TREE_CODE (type
) == REAL_TYPE
)
1825 if (TREE_CODE (arg1
) == INTEGER_CST
)
1826 return build_real_from_int_cst (type
, arg1
);
1827 else if (TREE_CODE (arg1
) == REAL_CST
)
1828 return fold_convert_const_real_from_real (type
, arg1
);
1829 else if (TREE_CODE (arg1
) == FIXED_CST
)
1830 return fold_convert_const_real_from_fixed (type
, arg1
);
1832 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1834 if (TREE_CODE (arg1
) == FIXED_CST
)
1835 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1836 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1837 return fold_convert_const_fixed_from_int (type
, arg1
);
1838 else if (TREE_CODE (arg1
) == REAL_CST
)
1839 return fold_convert_const_fixed_from_real (type
, arg1
);
1844 /* Construct a vector of zero elements of vector type TYPE. */
1847 build_zero_vector (tree type
)
1851 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1852 return build_vector_from_val (type
, t
);
1855 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1858 fold_convertible_p (const_tree type
, const_tree arg
)
1860 tree orig
= TREE_TYPE (arg
);
1865 if (TREE_CODE (arg
) == ERROR_MARK
1866 || TREE_CODE (type
) == ERROR_MARK
1867 || TREE_CODE (orig
) == ERROR_MARK
)
1870 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1873 switch (TREE_CODE (type
))
1875 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1876 case POINTER_TYPE
: case REFERENCE_TYPE
:
1878 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1879 || TREE_CODE (orig
) == OFFSET_TYPE
)
1881 return (TREE_CODE (orig
) == VECTOR_TYPE
1882 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1885 case FIXED_POINT_TYPE
:
1889 return TREE_CODE (type
) == TREE_CODE (orig
);
1896 /* Convert expression ARG to type TYPE. Used by the middle-end for
1897 simple conversions in preference to calling the front-end's convert. */
1900 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1902 tree orig
= TREE_TYPE (arg
);
1908 if (TREE_CODE (arg
) == ERROR_MARK
1909 || TREE_CODE (type
) == ERROR_MARK
1910 || TREE_CODE (orig
) == ERROR_MARK
)
1911 return error_mark_node
;
1913 switch (TREE_CODE (type
))
1916 case REFERENCE_TYPE
:
1917 /* Handle conversions between pointers to different address spaces. */
1918 if (POINTER_TYPE_P (orig
)
1919 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1920 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1921 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1924 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1926 if (TREE_CODE (arg
) == INTEGER_CST
)
1928 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1929 if (tem
!= NULL_TREE
)
1932 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1933 || TREE_CODE (orig
) == OFFSET_TYPE
)
1934 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1935 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1936 return fold_convert_loc (loc
, type
,
1937 fold_build1_loc (loc
, REALPART_EXPR
,
1938 TREE_TYPE (orig
), arg
));
1939 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1940 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1941 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1944 if (TREE_CODE (arg
) == INTEGER_CST
)
1946 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1947 if (tem
!= NULL_TREE
)
1950 else if (TREE_CODE (arg
) == REAL_CST
)
1952 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1953 if (tem
!= NULL_TREE
)
1956 else if (TREE_CODE (arg
) == FIXED_CST
)
1958 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1959 if (tem
!= NULL_TREE
)
1963 switch (TREE_CODE (orig
))
1966 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1967 case POINTER_TYPE
: case REFERENCE_TYPE
:
1968 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1971 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1973 case FIXED_POINT_TYPE
:
1974 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1977 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1978 return fold_convert_loc (loc
, type
, tem
);
1984 case FIXED_POINT_TYPE
:
1985 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1986 || TREE_CODE (arg
) == REAL_CST
)
1988 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1989 if (tem
!= NULL_TREE
)
1990 goto fold_convert_exit
;
1993 switch (TREE_CODE (orig
))
1995 case FIXED_POINT_TYPE
:
2000 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2003 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2004 return fold_convert_loc (loc
, type
, tem
);
2011 switch (TREE_CODE (orig
))
2014 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2015 case POINTER_TYPE
: case REFERENCE_TYPE
:
2017 case FIXED_POINT_TYPE
:
2018 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2019 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2020 fold_convert_loc (loc
, TREE_TYPE (type
),
2021 integer_zero_node
));
2026 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2028 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2029 TREE_OPERAND (arg
, 0));
2030 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2031 TREE_OPERAND (arg
, 1));
2032 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2035 arg
= save_expr (arg
);
2036 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2037 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2038 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2039 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2040 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2048 if (integer_zerop (arg
))
2049 return build_zero_vector (type
);
2050 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2051 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2052 || TREE_CODE (orig
) == VECTOR_TYPE
);
2053 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2056 tem
= fold_ignored_result (arg
);
2057 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2060 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2061 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2065 protected_set_expr_location_unshare (tem
, loc
);
2069 /* Return false if expr can be assumed not to be an lvalue, true
2073 maybe_lvalue_p (const_tree x
)
2075 /* We only need to wrap lvalue tree codes. */
2076 switch (TREE_CODE (x
))
2089 case ARRAY_RANGE_REF
:
2095 case PREINCREMENT_EXPR
:
2096 case PREDECREMENT_EXPR
:
2098 case TRY_CATCH_EXPR
:
2099 case WITH_CLEANUP_EXPR
:
2108 /* Assume the worst for front-end tree codes. */
2109 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2117 /* Return an expr equal to X but certainly not valid as an lvalue. */
2120 non_lvalue_loc (location_t loc
, tree x
)
2122 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2127 if (! maybe_lvalue_p (x
))
2129 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2132 /* When pedantic, return an expr equal to X but certainly not valid as a
2133 pedantic lvalue. Otherwise, return X. */
2136 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2138 return protected_set_expr_location_unshare (x
, loc
);
2141 /* Given a tree comparison code, return the code that is the logical inverse.
2142 It is generally not safe to do this for floating-point comparisons, except
2143 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2144 ERROR_MARK in this case. */
2147 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2149 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2150 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2160 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2162 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2164 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2166 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2180 return UNORDERED_EXPR
;
2181 case UNORDERED_EXPR
:
2182 return ORDERED_EXPR
;
2188 /* Similar, but return the comparison that results if the operands are
2189 swapped. This is safe for floating-point. */
2192 swap_tree_comparison (enum tree_code code
)
2199 case UNORDERED_EXPR
:
2225 /* Convert a comparison tree code from an enum tree_code representation
2226 into a compcode bit-based encoding. This function is the inverse of
2227 compcode_to_comparison. */
2229 static enum comparison_code
2230 comparison_to_compcode (enum tree_code code
)
2247 return COMPCODE_ORD
;
2248 case UNORDERED_EXPR
:
2249 return COMPCODE_UNORD
;
2251 return COMPCODE_UNLT
;
2253 return COMPCODE_UNEQ
;
2255 return COMPCODE_UNLE
;
2257 return COMPCODE_UNGT
;
2259 return COMPCODE_LTGT
;
2261 return COMPCODE_UNGE
;
2267 /* Convert a compcode bit-based encoding of a comparison operator back
2268 to GCC's enum tree_code representation. This function is the
2269 inverse of comparison_to_compcode. */
2271 static enum tree_code
2272 compcode_to_comparison (enum comparison_code code
)
2289 return ORDERED_EXPR
;
2290 case COMPCODE_UNORD
:
2291 return UNORDERED_EXPR
;
2309 /* Return a tree for the comparison which is the combination of
2310 doing the AND or OR (depending on CODE) of the two operations LCODE
2311 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2312 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2313 if this makes the transformation invalid. */
2316 combine_comparisons (location_t loc
,
2317 enum tree_code code
, enum tree_code lcode
,
2318 enum tree_code rcode
, tree truth_type
,
2319 tree ll_arg
, tree lr_arg
)
2321 bool honor_nans
= HONOR_NANS (element_mode (ll_arg
));
2322 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2323 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2328 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2329 compcode
= lcompcode
& rcompcode
;
2332 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2333 compcode
= lcompcode
| rcompcode
;
2342 /* Eliminate unordered comparisons, as well as LTGT and ORD
2343 which are not used unless the mode has NaNs. */
2344 compcode
&= ~COMPCODE_UNORD
;
2345 if (compcode
== COMPCODE_LTGT
)
2346 compcode
= COMPCODE_NE
;
2347 else if (compcode
== COMPCODE_ORD
)
2348 compcode
= COMPCODE_TRUE
;
2350 else if (flag_trapping_math
)
2352 /* Check that the original operation and the optimized ones will trap
2353 under the same condition. */
2354 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2355 && (lcompcode
!= COMPCODE_EQ
)
2356 && (lcompcode
!= COMPCODE_ORD
);
2357 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2358 && (rcompcode
!= COMPCODE_EQ
)
2359 && (rcompcode
!= COMPCODE_ORD
);
2360 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2361 && (compcode
!= COMPCODE_EQ
)
2362 && (compcode
!= COMPCODE_ORD
);
2364 /* In a short-circuited boolean expression the LHS might be
2365 such that the RHS, if evaluated, will never trap. For
2366 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2367 if neither x nor y is NaN. (This is a mixed blessing: for
2368 example, the expression above will never trap, hence
2369 optimizing it to x < y would be invalid). */
2370 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2371 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2374 /* If the comparison was short-circuited, and only the RHS
2375 trapped, we may now generate a spurious trap. */
2377 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2380 /* If we changed the conditions that cause a trap, we lose. */
2381 if ((ltrap
|| rtrap
) != trap
)
2385 if (compcode
== COMPCODE_TRUE
)
2386 return constant_boolean_node (true, truth_type
);
2387 else if (compcode
== COMPCODE_FALSE
)
2388 return constant_boolean_node (false, truth_type
);
2391 enum tree_code tcode
;
2393 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2394 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2398 /* Return nonzero if two operands (typically of the same tree node)
2399 are necessarily equal. If either argument has side-effects this
2400 function returns zero. FLAGS modifies behavior as follows:
2402 If OEP_ONLY_CONST is set, only return nonzero for constants.
2403 This function tests whether the operands are indistinguishable;
2404 it does not test whether they are equal using C's == operation.
2405 The distinction is important for IEEE floating point, because
2406 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2407 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2409 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2410 even though it may hold multiple values during a function.
2411 This is because a GCC tree node guarantees that nothing else is
2412 executed between the evaluation of its "operands" (which may often
2413 be evaluated in arbitrary order). Hence if the operands themselves
2414 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2415 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2416 unset means assuming isochronic (or instantaneous) tree equivalence.
2417 Unless comparing arbitrary expression trees, such as from different
2418 statements, this flag can usually be left unset.
2420 If OEP_PURE_SAME is set, then pure functions with identical arguments
2421 are considered the same. It is used when the caller has other ways
2422 to ensure that global memory is unchanged in between. */
2425 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2427 /* If either is ERROR_MARK, they aren't equal. */
2428 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2429 || TREE_TYPE (arg0
) == error_mark_node
2430 || TREE_TYPE (arg1
) == error_mark_node
)
2433 /* Similar, if either does not have a type (like a released SSA name),
2434 they aren't equal. */
2435 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2438 /* Check equality of integer constants before bailing out due to
2439 precision differences. */
2440 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2441 return tree_int_cst_equal (arg0
, arg1
);
2443 /* If both types don't have the same signedness, then we can't consider
2444 them equal. We must check this before the STRIP_NOPS calls
2445 because they may change the signedness of the arguments. As pointers
2446 strictly don't have a signedness, require either two pointers or
2447 two non-pointers as well. */
2448 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2449 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2452 /* We cannot consider pointers to different address space equal. */
2453 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2454 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2455 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2458 /* If both types don't have the same precision, then it is not safe
2460 if (element_precision (TREE_TYPE (arg0
))
2461 != element_precision (TREE_TYPE (arg1
)))
2467 /* In case both args are comparisons but with different comparison
2468 code, try to swap the comparison operands of one arg to produce
2469 a match and compare that variant. */
2470 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2471 && COMPARISON_CLASS_P (arg0
)
2472 && COMPARISON_CLASS_P (arg1
))
2474 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2476 if (TREE_CODE (arg0
) == swap_code
)
2477 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2478 TREE_OPERAND (arg1
, 1), flags
)
2479 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2480 TREE_OPERAND (arg1
, 0), flags
);
2483 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2484 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2485 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2488 /* This is needed for conversions and for COMPONENT_REF.
2489 Might as well play it safe and always test this. */
2490 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2491 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2492 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2495 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2496 We don't care about side effects in that case because the SAVE_EXPR
2497 takes care of that for us. In all other cases, two expressions are
2498 equal if they have no side effects. If we have two identical
2499 expressions with side effects that should be treated the same due
2500 to the only side effects being identical SAVE_EXPR's, that will
2501 be detected in the recursive calls below.
2502 If we are taking an invariant address of two identical objects
2503 they are necessarily equal as well. */
2504 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2505 && (TREE_CODE (arg0
) == SAVE_EXPR
2506 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2507 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2510 /* Next handle constant cases, those for which we can return 1 even
2511 if ONLY_CONST is set. */
2512 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2513 switch (TREE_CODE (arg0
))
2516 return tree_int_cst_equal (arg0
, arg1
);
2519 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2520 TREE_FIXED_CST (arg1
));
2523 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2524 TREE_REAL_CST (arg1
)))
2528 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2530 /* If we do not distinguish between signed and unsigned zero,
2531 consider them equal. */
2532 if (real_zerop (arg0
) && real_zerop (arg1
))
2541 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2544 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2546 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2547 VECTOR_CST_ELT (arg1
, i
), flags
))
2554 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2556 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2560 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2561 && ! memcmp (TREE_STRING_POINTER (arg0
),
2562 TREE_STRING_POINTER (arg1
),
2563 TREE_STRING_LENGTH (arg0
)));
2566 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2567 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2568 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2573 if (flags
& OEP_ONLY_CONST
)
2576 /* Define macros to test an operand from arg0 and arg1 for equality and a
2577 variant that allows null and views null as being different from any
2578 non-null value. In the latter case, if either is null, the both
2579 must be; otherwise, do the normal comparison. */
2580 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2581 TREE_OPERAND (arg1, N), flags)
2583 #define OP_SAME_WITH_NULL(N) \
2584 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2585 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2587 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2590 /* Two conversions are equal only if signedness and modes match. */
2591 switch (TREE_CODE (arg0
))
2594 case FIX_TRUNC_EXPR
:
2595 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2596 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2606 case tcc_comparison
:
2608 if (OP_SAME (0) && OP_SAME (1))
2611 /* For commutative ops, allow the other order. */
2612 return (commutative_tree_code (TREE_CODE (arg0
))
2613 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2614 TREE_OPERAND (arg1
, 1), flags
)
2615 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2616 TREE_OPERAND (arg1
, 0), flags
));
2619 /* If either of the pointer (or reference) expressions we are
2620 dereferencing contain a side effect, these cannot be equal,
2621 but their addresses can be. */
2622 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2623 && (TREE_SIDE_EFFECTS (arg0
)
2624 || TREE_SIDE_EFFECTS (arg1
)))
2627 switch (TREE_CODE (arg0
))
2630 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2637 case TARGET_MEM_REF
:
2638 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2639 /* Require equal extra operands and then fall through to MEM_REF
2640 handling of the two common operands. */
2641 if (!OP_SAME_WITH_NULL (2)
2642 || !OP_SAME_WITH_NULL (3)
2643 || !OP_SAME_WITH_NULL (4))
2647 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2648 /* Require equal access sizes, and similar pointer types.
2649 We can have incomplete types for array references of
2650 variable-sized arrays from the Fortran frontend
2651 though. Also verify the types are compatible. */
2652 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2653 || (TYPE_SIZE (TREE_TYPE (arg0
))
2654 && TYPE_SIZE (TREE_TYPE (arg1
))
2655 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2656 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2657 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2658 && alias_ptr_types_compatible_p
2659 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2660 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2661 && OP_SAME (0) && OP_SAME (1));
2664 case ARRAY_RANGE_REF
:
2665 /* Operands 2 and 3 may be null.
2666 Compare the array index by value if it is constant first as we
2667 may have different types but same value here. */
2670 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2671 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2672 TREE_OPERAND (arg1
, 1))
2674 && OP_SAME_WITH_NULL (2)
2675 && OP_SAME_WITH_NULL (3));
2678 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2679 may be NULL when we're called to compare MEM_EXPRs. */
2680 if (!OP_SAME_WITH_NULL (0)
2683 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2684 return OP_SAME_WITH_NULL (2);
2689 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2690 return OP_SAME (1) && OP_SAME (2);
2696 case tcc_expression
:
2697 switch (TREE_CODE (arg0
))
2700 case TRUTH_NOT_EXPR
:
2703 case TRUTH_ANDIF_EXPR
:
2704 case TRUTH_ORIF_EXPR
:
2705 return OP_SAME (0) && OP_SAME (1);
2708 case WIDEN_MULT_PLUS_EXPR
:
2709 case WIDEN_MULT_MINUS_EXPR
:
2712 /* The multiplcation operands are commutative. */
2715 case TRUTH_AND_EXPR
:
2717 case TRUTH_XOR_EXPR
:
2718 if (OP_SAME (0) && OP_SAME (1))
2721 /* Otherwise take into account this is a commutative operation. */
2722 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2723 TREE_OPERAND (arg1
, 1), flags
)
2724 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2725 TREE_OPERAND (arg1
, 0), flags
));
2730 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2737 switch (TREE_CODE (arg0
))
2740 /* If the CALL_EXPRs call different functions, then they
2741 clearly can not be equal. */
2742 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2747 unsigned int cef
= call_expr_flags (arg0
);
2748 if (flags
& OEP_PURE_SAME
)
2749 cef
&= ECF_CONST
| ECF_PURE
;
2756 /* Now see if all the arguments are the same. */
2758 const_call_expr_arg_iterator iter0
, iter1
;
2760 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2761 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2763 a0
= next_const_call_expr_arg (&iter0
),
2764 a1
= next_const_call_expr_arg (&iter1
))
2765 if (! operand_equal_p (a0
, a1
, flags
))
2768 /* If we get here and both argument lists are exhausted
2769 then the CALL_EXPRs are equal. */
2770 return ! (a0
|| a1
);
2776 case tcc_declaration
:
2777 /* Consider __builtin_sqrt equal to sqrt. */
2778 return (TREE_CODE (arg0
) == FUNCTION_DECL
2779 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2780 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2781 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2788 #undef OP_SAME_WITH_NULL
2791 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2792 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2794 When in doubt, return 0. */
2797 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2799 int unsignedp1
, unsignedpo
;
2800 tree primarg0
, primarg1
, primother
;
2801 unsigned int correct_width
;
2803 if (operand_equal_p (arg0
, arg1
, 0))
2806 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2807 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2810 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2811 and see if the inner values are the same. This removes any
2812 signedness comparison, which doesn't matter here. */
2813 primarg0
= arg0
, primarg1
= arg1
;
2814 STRIP_NOPS (primarg0
);
2815 STRIP_NOPS (primarg1
);
2816 if (operand_equal_p (primarg0
, primarg1
, 0))
2819 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2820 actual comparison operand, ARG0.
2822 First throw away any conversions to wider types
2823 already present in the operands. */
2825 primarg1
= get_narrower (arg1
, &unsignedp1
);
2826 primother
= get_narrower (other
, &unsignedpo
);
2828 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2829 if (unsignedp1
== unsignedpo
2830 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2831 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2833 tree type
= TREE_TYPE (arg0
);
2835 /* Make sure shorter operand is extended the right way
2836 to match the longer operand. */
2837 primarg1
= fold_convert (signed_or_unsigned_type_for
2838 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2840 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2847 /* See if ARG is an expression that is either a comparison or is performing
2848 arithmetic on comparisons. The comparisons must only be comparing
2849 two different values, which will be stored in *CVAL1 and *CVAL2; if
2850 they are nonzero it means that some operands have already been found.
2851 No variables may be used anywhere else in the expression except in the
2852 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2853 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2855 If this is true, return 1. Otherwise, return zero. */
2858 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2860 enum tree_code code
= TREE_CODE (arg
);
2861 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2863 /* We can handle some of the tcc_expression cases here. */
2864 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2866 else if (tclass
== tcc_expression
2867 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2868 || code
== COMPOUND_EXPR
))
2869 tclass
= tcc_binary
;
2871 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2872 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2874 /* If we've already found a CVAL1 or CVAL2, this expression is
2875 two complex to handle. */
2876 if (*cval1
|| *cval2
)
2886 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2889 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2890 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2891 cval1
, cval2
, save_p
));
2896 case tcc_expression
:
2897 if (code
== COND_EXPR
)
2898 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2899 cval1
, cval2
, save_p
)
2900 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2901 cval1
, cval2
, save_p
)
2902 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2903 cval1
, cval2
, save_p
));
2906 case tcc_comparison
:
2907 /* First see if we can handle the first operand, then the second. For
2908 the second operand, we know *CVAL1 can't be zero. It must be that
2909 one side of the comparison is each of the values; test for the
2910 case where this isn't true by failing if the two operands
2913 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2914 TREE_OPERAND (arg
, 1), 0))
2918 *cval1
= TREE_OPERAND (arg
, 0);
2919 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2921 else if (*cval2
== 0)
2922 *cval2
= TREE_OPERAND (arg
, 0);
2923 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2928 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2930 else if (*cval2
== 0)
2931 *cval2
= TREE_OPERAND (arg
, 1);
2932 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2944 /* ARG is a tree that is known to contain just arithmetic operations and
2945 comparisons. Evaluate the operations in the tree substituting NEW0 for
2946 any occurrence of OLD0 as an operand of a comparison and likewise for
2950 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2951 tree old1
, tree new1
)
2953 tree type
= TREE_TYPE (arg
);
2954 enum tree_code code
= TREE_CODE (arg
);
2955 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2957 /* We can handle some of the tcc_expression cases here. */
2958 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2960 else if (tclass
== tcc_expression
2961 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2962 tclass
= tcc_binary
;
2967 return fold_build1_loc (loc
, code
, type
,
2968 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2969 old0
, new0
, old1
, new1
));
2972 return fold_build2_loc (loc
, code
, type
,
2973 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2974 old0
, new0
, old1
, new1
),
2975 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2976 old0
, new0
, old1
, new1
));
2978 case tcc_expression
:
2982 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2986 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2990 return fold_build3_loc (loc
, code
, type
,
2991 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2992 old0
, new0
, old1
, new1
),
2993 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2994 old0
, new0
, old1
, new1
),
2995 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2996 old0
, new0
, old1
, new1
));
3000 /* Fall through - ??? */
3002 case tcc_comparison
:
3004 tree arg0
= TREE_OPERAND (arg
, 0);
3005 tree arg1
= TREE_OPERAND (arg
, 1);
3007 /* We need to check both for exact equality and tree equality. The
3008 former will be true if the operand has a side-effect. In that
3009 case, we know the operand occurred exactly once. */
3011 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3013 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3016 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3018 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3021 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3029 /* Return a tree for the case when the result of an expression is RESULT
3030 converted to TYPE and OMITTED was previously an operand of the expression
3031 but is now not needed (e.g., we folded OMITTED * 0).
3033 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3034 the conversion of RESULT to TYPE. */
3037 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3039 tree t
= fold_convert_loc (loc
, type
, result
);
3041 /* If the resulting operand is an empty statement, just return the omitted
3042 statement casted to void. */
3043 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3044 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3045 fold_ignored_result (omitted
));
3047 if (TREE_SIDE_EFFECTS (omitted
))
3048 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3049 fold_ignored_result (omitted
), t
);
3051 return non_lvalue_loc (loc
, t
);
3054 /* Return a tree for the case when the result of an expression is RESULT
3055 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3056 of the expression but are now not needed.
3058 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3059 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3060 evaluated before OMITTED2. Otherwise, if neither has side effects,
3061 just do the conversion of RESULT to TYPE. */
3064 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3065 tree omitted1
, tree omitted2
)
3067 tree t
= fold_convert_loc (loc
, type
, result
);
3069 if (TREE_SIDE_EFFECTS (omitted2
))
3070 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3071 if (TREE_SIDE_EFFECTS (omitted1
))
3072 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3074 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3078 /* Return a simplified tree node for the truth-negation of ARG. This
3079 never alters ARG itself. We assume that ARG is an operation that
3080 returns a truth value (0 or 1).
3082 FIXME: one would think we would fold the result, but it causes
3083 problems with the dominator optimizer. */
3086 fold_truth_not_expr (location_t loc
, tree arg
)
3088 tree type
= TREE_TYPE (arg
);
3089 enum tree_code code
= TREE_CODE (arg
);
3090 location_t loc1
, loc2
;
3092 /* If this is a comparison, we can simply invert it, except for
3093 floating-point non-equality comparisons, in which case we just
3094 enclose a TRUTH_NOT_EXPR around what we have. */
3096 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3098 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3099 if (FLOAT_TYPE_P (op_type
)
3100 && flag_trapping_math
3101 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3102 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3105 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3106 if (code
== ERROR_MARK
)
3109 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3110 TREE_OPERAND (arg
, 1));
3116 return constant_boolean_node (integer_zerop (arg
), type
);
3118 case TRUTH_AND_EXPR
:
3119 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3120 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3121 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3122 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3123 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3126 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3127 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3128 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3129 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3130 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3132 case TRUTH_XOR_EXPR
:
3133 /* Here we can invert either operand. We invert the first operand
3134 unless the second operand is a TRUTH_NOT_EXPR in which case our
3135 result is the XOR of the first operand with the inside of the
3136 negation of the second operand. */
3138 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3139 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3140 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3142 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3143 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3144 TREE_OPERAND (arg
, 1));
3146 case TRUTH_ANDIF_EXPR
:
3147 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3148 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3149 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3150 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3151 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3153 case TRUTH_ORIF_EXPR
:
3154 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3155 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3156 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3157 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3158 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3160 case TRUTH_NOT_EXPR
:
3161 return TREE_OPERAND (arg
, 0);
3165 tree arg1
= TREE_OPERAND (arg
, 1);
3166 tree arg2
= TREE_OPERAND (arg
, 2);
3168 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3169 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3171 /* A COND_EXPR may have a throw as one operand, which
3172 then has void type. Just leave void operands
3174 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3175 VOID_TYPE_P (TREE_TYPE (arg1
))
3176 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3177 VOID_TYPE_P (TREE_TYPE (arg2
))
3178 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3182 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3183 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3184 TREE_OPERAND (arg
, 0),
3185 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3187 case NON_LVALUE_EXPR
:
3188 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3189 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3192 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3193 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3195 /* ... fall through ... */
3198 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3199 return build1_loc (loc
, TREE_CODE (arg
), type
,
3200 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3203 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3205 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3208 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3210 case CLEANUP_POINT_EXPR
:
3211 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3212 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3213 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3220 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3221 assume that ARG is an operation that returns a truth value (0 or 1
3222 for scalars, 0 or -1 for vectors). Return the folded expression if
3223 folding is successful. Otherwise, return NULL_TREE. */
3226 fold_invert_truthvalue (location_t loc
, tree arg
)
3228 tree type
= TREE_TYPE (arg
);
3229 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3235 /* Return a simplified tree node for the truth-negation of ARG. This
3236 never alters ARG itself. We assume that ARG is an operation that
3237 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3240 invert_truthvalue_loc (location_t loc
, tree arg
)
3242 if (TREE_CODE (arg
) == ERROR_MARK
)
3245 tree type
= TREE_TYPE (arg
);
3246 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3252 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3253 operands are another bit-wise operation with a common input. If so,
3254 distribute the bit operations to save an operation and possibly two if
3255 constants are involved. For example, convert
3256 (A | B) & (A | C) into A | (B & C)
3257 Further simplification will occur if B and C are constants.
3259 If this optimization cannot be done, 0 will be returned. */
3262 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3263 tree arg0
, tree arg1
)
3268 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3269 || TREE_CODE (arg0
) == code
3270 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3271 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3274 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3276 common
= TREE_OPERAND (arg0
, 0);
3277 left
= TREE_OPERAND (arg0
, 1);
3278 right
= TREE_OPERAND (arg1
, 1);
3280 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3282 common
= TREE_OPERAND (arg0
, 0);
3283 left
= TREE_OPERAND (arg0
, 1);
3284 right
= TREE_OPERAND (arg1
, 0);
3286 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3288 common
= TREE_OPERAND (arg0
, 1);
3289 left
= TREE_OPERAND (arg0
, 0);
3290 right
= TREE_OPERAND (arg1
, 1);
3292 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3294 common
= TREE_OPERAND (arg0
, 1);
3295 left
= TREE_OPERAND (arg0
, 0);
3296 right
= TREE_OPERAND (arg1
, 0);
3301 common
= fold_convert_loc (loc
, type
, common
);
3302 left
= fold_convert_loc (loc
, type
, left
);
3303 right
= fold_convert_loc (loc
, type
, right
);
3304 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3305 fold_build2_loc (loc
, code
, type
, left
, right
));
3308 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3309 with code CODE. This optimization is unsafe. */
3311 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3312 tree arg0
, tree arg1
)
3314 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3315 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3317 /* (A / C) +- (B / C) -> (A +- B) / C. */
3319 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3320 TREE_OPERAND (arg1
, 1), 0))
3321 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3322 fold_build2_loc (loc
, code
, type
,
3323 TREE_OPERAND (arg0
, 0),
3324 TREE_OPERAND (arg1
, 0)),
3325 TREE_OPERAND (arg0
, 1));
3327 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3328 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3329 TREE_OPERAND (arg1
, 0), 0)
3330 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3331 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3333 REAL_VALUE_TYPE r0
, r1
;
3334 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3335 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3337 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3339 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3340 real_arithmetic (&r0
, code
, &r0
, &r1
);
3341 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3342 TREE_OPERAND (arg0
, 0),
3343 build_real (type
, r0
));
3349 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3350 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3353 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3354 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3356 tree result
, bftype
;
3360 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3361 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3362 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3363 && tree_fits_shwi_p (size
)
3364 && tree_to_shwi (size
) == bitsize
)
3365 return fold_convert_loc (loc
, type
, inner
);
3369 if (TYPE_PRECISION (bftype
) != bitsize
3370 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3371 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3373 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3374 size_int (bitsize
), bitsize_int (bitpos
));
3377 result
= fold_convert_loc (loc
, type
, result
);
3382 /* Optimize a bit-field compare.
3384 There are two cases: First is a compare against a constant and the
3385 second is a comparison of two items where the fields are at the same
3386 bit position relative to the start of a chunk (byte, halfword, word)
3387 large enough to contain it. In these cases we can avoid the shift
3388 implicit in bitfield extractions.
3390 For constants, we emit a compare of the shifted constant with the
3391 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3392 compared. For two fields at the same position, we do the ANDs with the
3393 similar mask and compare the result of the ANDs.
3395 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3396 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3397 are the left and right operands of the comparison, respectively.
3399 If the optimization described above can be done, we return the resulting
3400 tree. Otherwise we return zero. */
3403 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3404 tree compare_type
, tree lhs
, tree rhs
)
3406 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3407 tree type
= TREE_TYPE (lhs
);
3409 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3410 machine_mode lmode
, rmode
, nmode
;
3411 int lunsignedp
, runsignedp
;
3412 int lvolatilep
= 0, rvolatilep
= 0;
3413 tree linner
, rinner
= NULL_TREE
;
3417 /* Get all the information about the extractions being done. If the bit size
3418 if the same as the size of the underlying object, we aren't doing an
3419 extraction at all and so can do nothing. We also don't want to
3420 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3421 then will no longer be able to replace it. */
3422 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3423 &lunsignedp
, &lvolatilep
, false);
3424 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3425 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3430 /* If this is not a constant, we can only do something if bit positions,
3431 sizes, and signedness are the same. */
3432 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3433 &runsignedp
, &rvolatilep
, false);
3435 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3436 || lunsignedp
!= runsignedp
|| offset
!= 0
3437 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3441 /* See if we can find a mode to refer to this field. We should be able to,
3442 but fail if we can't. */
3443 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3444 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3445 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3446 TYPE_ALIGN (TREE_TYPE (rinner
))),
3448 if (nmode
== VOIDmode
)
3451 /* Set signed and unsigned types of the precision of this mode for the
3453 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3455 /* Compute the bit position and size for the new reference and our offset
3456 within it. If the new reference is the same size as the original, we
3457 won't optimize anything, so return zero. */
3458 nbitsize
= GET_MODE_BITSIZE (nmode
);
3459 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3461 if (nbitsize
== lbitsize
)
3464 if (BYTES_BIG_ENDIAN
)
3465 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3467 /* Make the mask to be used against the extracted field. */
3468 mask
= build_int_cst_type (unsigned_type
, -1);
3469 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3470 mask
= const_binop (RSHIFT_EXPR
, mask
,
3471 size_int (nbitsize
- lbitsize
- lbitpos
));
3474 /* If not comparing with constant, just rework the comparison
3476 return fold_build2_loc (loc
, code
, compare_type
,
3477 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3478 make_bit_field_ref (loc
, linner
,
3483 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3484 make_bit_field_ref (loc
, rinner
,
3490 /* Otherwise, we are handling the constant case. See if the constant is too
3491 big for the field. Warn and return a tree of for 0 (false) if so. We do
3492 this not only for its own sake, but to avoid having to test for this
3493 error case below. If we didn't, we might generate wrong code.
3495 For unsigned fields, the constant shifted right by the field length should
3496 be all zero. For signed fields, the high-order bits should agree with
3501 if (wi::lrshift (rhs
, lbitsize
) != 0)
3503 warning (0, "comparison is always %d due to width of bit-field",
3505 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3510 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3511 if (tem
!= 0 && tem
!= -1)
3513 warning (0, "comparison is always %d due to width of bit-field",
3515 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3519 /* Single-bit compares should always be against zero. */
3520 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3522 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3523 rhs
= build_int_cst (type
, 0);
3526 /* Make a new bitfield reference, shift the constant over the
3527 appropriate number of bits and mask it with the computed mask
3528 (in case this was a signed field). If we changed it, make a new one. */
3529 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3531 rhs
= const_binop (BIT_AND_EXPR
,
3532 const_binop (LSHIFT_EXPR
,
3533 fold_convert_loc (loc
, unsigned_type
, rhs
),
3534 size_int (lbitpos
)),
3537 lhs
= build2_loc (loc
, code
, compare_type
,
3538 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3542 /* Subroutine for fold_truth_andor_1: decode a field reference.
3544 If EXP is a comparison reference, we return the innermost reference.
3546 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3547 set to the starting bit number.
3549 If the innermost field can be completely contained in a mode-sized
3550 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3552 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3553 otherwise it is not changed.
3555 *PUNSIGNEDP is set to the signedness of the field.
3557 *PMASK is set to the mask used. This is either contained in a
3558 BIT_AND_EXPR or derived from the width of the field.
3560 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3562 Return 0 if this is not a component reference or is one that we can't
3563 do anything with. */
3566 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3567 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3568 int *punsignedp
, int *pvolatilep
,
3569 tree
*pmask
, tree
*pand_mask
)
3571 tree outer_type
= 0;
3573 tree mask
, inner
, offset
;
3575 unsigned int precision
;
3577 /* All the optimizations using this function assume integer fields.
3578 There are problems with FP fields since the type_for_size call
3579 below can fail for, e.g., XFmode. */
3580 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3583 /* We are interested in the bare arrangement of bits, so strip everything
3584 that doesn't affect the machine mode. However, record the type of the
3585 outermost expression if it may matter below. */
3586 if (CONVERT_EXPR_P (exp
)
3587 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3588 outer_type
= TREE_TYPE (exp
);
3591 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3593 and_mask
= TREE_OPERAND (exp
, 1);
3594 exp
= TREE_OPERAND (exp
, 0);
3595 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3596 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3600 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3601 punsignedp
, pvolatilep
, false);
3602 if ((inner
== exp
&& and_mask
== 0)
3603 || *pbitsize
< 0 || offset
!= 0
3604 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3607 /* If the number of bits in the reference is the same as the bitsize of
3608 the outer type, then the outer type gives the signedness. Otherwise
3609 (in case of a small bitfield) the signedness is unchanged. */
3610 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3611 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3613 /* Compute the mask to access the bitfield. */
3614 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3615 precision
= TYPE_PRECISION (unsigned_type
);
3617 mask
= build_int_cst_type (unsigned_type
, -1);
3619 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3620 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3622 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3624 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3625 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3628 *pand_mask
= and_mask
;
3632 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3633 bit positions and MASK is SIGNED. */
3636 all_ones_mask_p (const_tree mask
, unsigned int size
)
3638 tree type
= TREE_TYPE (mask
);
3639 unsigned int precision
= TYPE_PRECISION (type
);
3641 /* If this function returns true when the type of the mask is
3642 UNSIGNED, then there will be errors. In particular see
3643 gcc.c-torture/execute/990326-1.c. There does not appear to be
3644 any documentation paper trail as to why this is so. But the pre
3645 wide-int worked with that restriction and it has been preserved
3647 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3650 return wi::mask (size
, false, precision
) == mask
;
3653 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3654 represents the sign bit of EXP's type. If EXP represents a sign
3655 or zero extension, also test VAL against the unextended type.
3656 The return value is the (sub)expression whose sign bit is VAL,
3657 or NULL_TREE otherwise. */
3660 sign_bit_p (tree exp
, const_tree val
)
3665 /* Tree EXP must have an integral type. */
3666 t
= TREE_TYPE (exp
);
3667 if (! INTEGRAL_TYPE_P (t
))
3670 /* Tree VAL must be an integer constant. */
3671 if (TREE_CODE (val
) != INTEGER_CST
3672 || TREE_OVERFLOW (val
))
3675 width
= TYPE_PRECISION (t
);
3676 if (wi::only_sign_bit_p (val
, width
))
3679 /* Handle extension from a narrower type. */
3680 if (TREE_CODE (exp
) == NOP_EXPR
3681 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3682 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3687 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3688 to be evaluated unconditionally. */
3691 simple_operand_p (const_tree exp
)
3693 /* Strip any conversions that don't change the machine mode. */
3696 return (CONSTANT_CLASS_P (exp
)
3697 || TREE_CODE (exp
) == SSA_NAME
3699 && ! TREE_ADDRESSABLE (exp
)
3700 && ! TREE_THIS_VOLATILE (exp
)
3701 && ! DECL_NONLOCAL (exp
)
3702 /* Don't regard global variables as simple. They may be
3703 allocated in ways unknown to the compiler (shared memory,
3704 #pragma weak, etc). */
3705 && ! TREE_PUBLIC (exp
)
3706 && ! DECL_EXTERNAL (exp
)
3707 /* Weakrefs are not safe to be read, since they can be NULL.
3708 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3709 have DECL_WEAK flag set. */
3710 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3711 /* Loading a static variable is unduly expensive, but global
3712 registers aren't expensive. */
3713 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3716 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3717 to be evaluated unconditionally.
3718 I addition to simple_operand_p, we assume that comparisons, conversions,
3719 and logic-not operations are simple, if their operands are simple, too. */
3722 simple_operand_p_2 (tree exp
)
3724 enum tree_code code
;
3726 if (TREE_SIDE_EFFECTS (exp
)
3727 || tree_could_trap_p (exp
))
3730 while (CONVERT_EXPR_P (exp
))
3731 exp
= TREE_OPERAND (exp
, 0);
3733 code
= TREE_CODE (exp
);
3735 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3736 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3737 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3739 if (code
== TRUTH_NOT_EXPR
)
3740 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3742 return simple_operand_p (exp
);
3746 /* The following functions are subroutines to fold_range_test and allow it to
3747 try to change a logical combination of comparisons into a range test.
3750 X == 2 || X == 3 || X == 4 || X == 5
3754 (unsigned) (X - 2) <= 3
3756 We describe each set of comparisons as being either inside or outside
3757 a range, using a variable named like IN_P, and then describe the
3758 range with a lower and upper bound. If one of the bounds is omitted,
3759 it represents either the highest or lowest value of the type.
3761 In the comments below, we represent a range by two numbers in brackets
3762 preceded by a "+" to designate being inside that range, or a "-" to
3763 designate being outside that range, so the condition can be inverted by
3764 flipping the prefix. An omitted bound is represented by a "-". For
3765 example, "- [-, 10]" means being outside the range starting at the lowest
3766 possible value and ending at 10, in other words, being greater than 10.
3767 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3770 We set up things so that the missing bounds are handled in a consistent
3771 manner so neither a missing bound nor "true" and "false" need to be
3772 handled using a special case. */
3774 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3775 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3776 and UPPER1_P are nonzero if the respective argument is an upper bound
3777 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3778 must be specified for a comparison. ARG1 will be converted to ARG0's
3779 type if both are specified. */
3782 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3783 tree arg1
, int upper1_p
)
3789 /* If neither arg represents infinity, do the normal operation.
3790 Else, if not a comparison, return infinity. Else handle the special
3791 comparison rules. Note that most of the cases below won't occur, but
3792 are handled for consistency. */
3794 if (arg0
!= 0 && arg1
!= 0)
3796 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3797 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3799 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3802 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3805 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3806 for neither. In real maths, we cannot assume open ended ranges are
3807 the same. But, this is computer arithmetic, where numbers are finite.
3808 We can therefore make the transformation of any unbounded range with
3809 the value Z, Z being greater than any representable number. This permits
3810 us to treat unbounded ranges as equal. */
3811 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3812 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3816 result
= sgn0
== sgn1
;
3819 result
= sgn0
!= sgn1
;
3822 result
= sgn0
< sgn1
;
3825 result
= sgn0
<= sgn1
;
3828 result
= sgn0
> sgn1
;
3831 result
= sgn0
>= sgn1
;
3837 return constant_boolean_node (result
, type
);
3840 /* Helper routine for make_range. Perform one step for it, return
3841 new expression if the loop should continue or NULL_TREE if it should
3845 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3846 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3847 bool *strict_overflow_p
)
3849 tree arg0_type
= TREE_TYPE (arg0
);
3850 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3851 int in_p
= *p_in_p
, n_in_p
;
3855 case TRUTH_NOT_EXPR
:
3856 /* We can only do something if the range is testing for zero. */
3857 if (low
== NULL_TREE
|| high
== NULL_TREE
3858 || ! integer_zerop (low
) || ! integer_zerop (high
))
3863 case EQ_EXPR
: case NE_EXPR
:
3864 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3865 /* We can only do something if the range is testing for zero
3866 and if the second operand is an integer constant. Note that
3867 saying something is "in" the range we make is done by
3868 complementing IN_P since it will set in the initial case of
3869 being not equal to zero; "out" is leaving it alone. */
3870 if (low
== NULL_TREE
|| high
== NULL_TREE
3871 || ! integer_zerop (low
) || ! integer_zerop (high
)
3872 || TREE_CODE (arg1
) != INTEGER_CST
)
3877 case NE_EXPR
: /* - [c, c] */
3880 case EQ_EXPR
: /* + [c, c] */
3881 in_p
= ! in_p
, low
= high
= arg1
;
3883 case GT_EXPR
: /* - [-, c] */
3884 low
= 0, high
= arg1
;
3886 case GE_EXPR
: /* + [c, -] */
3887 in_p
= ! in_p
, low
= arg1
, high
= 0;
3889 case LT_EXPR
: /* - [c, -] */
3890 low
= arg1
, high
= 0;
3892 case LE_EXPR
: /* + [-, c] */
3893 in_p
= ! in_p
, low
= 0, high
= arg1
;
3899 /* If this is an unsigned comparison, we also know that EXP is
3900 greater than or equal to zero. We base the range tests we make
3901 on that fact, so we record it here so we can parse existing
3902 range tests. We test arg0_type since often the return type
3903 of, e.g. EQ_EXPR, is boolean. */
3904 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3906 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3908 build_int_cst (arg0_type
, 0),
3912 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3914 /* If the high bound is missing, but we have a nonzero low
3915 bound, reverse the range so it goes from zero to the low bound
3917 if (high
== 0 && low
&& ! integer_zerop (low
))
3920 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3921 build_int_cst (TREE_TYPE (low
), 1), 0);
3922 low
= build_int_cst (arg0_type
, 0);
3932 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3933 low and high are non-NULL, then normalize will DTRT. */
3934 if (!TYPE_UNSIGNED (arg0_type
)
3935 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3937 if (low
== NULL_TREE
)
3938 low
= TYPE_MIN_VALUE (arg0_type
);
3939 if (high
== NULL_TREE
)
3940 high
= TYPE_MAX_VALUE (arg0_type
);
3943 /* (-x) IN [a,b] -> x in [-b, -a] */
3944 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3945 build_int_cst (exp_type
, 0),
3947 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3948 build_int_cst (exp_type
, 0),
3950 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3956 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3957 build_int_cst (exp_type
, 1));
3961 if (TREE_CODE (arg1
) != INTEGER_CST
)
3964 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3965 move a constant to the other side. */
3966 if (!TYPE_UNSIGNED (arg0_type
)
3967 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3970 /* If EXP is signed, any overflow in the computation is undefined,
3971 so we don't worry about it so long as our computations on
3972 the bounds don't overflow. For unsigned, overflow is defined
3973 and this is exactly the right thing. */
3974 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3975 arg0_type
, low
, 0, arg1
, 0);
3976 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3977 arg0_type
, high
, 1, arg1
, 0);
3978 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3979 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3982 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3983 *strict_overflow_p
= true;
3986 /* Check for an unsigned range which has wrapped around the maximum
3987 value thus making n_high < n_low, and normalize it. */
3988 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3990 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3991 build_int_cst (TREE_TYPE (n_high
), 1), 0);
3992 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3993 build_int_cst (TREE_TYPE (n_low
), 1), 0);
3995 /* If the range is of the form +/- [ x+1, x ], we won't
3996 be able to normalize it. But then, it represents the
3997 whole range or the empty set, so make it
3999 if (tree_int_cst_equal (n_low
, low
)
4000 && tree_int_cst_equal (n_high
, high
))
4006 low
= n_low
, high
= n_high
;
4014 case NON_LVALUE_EXPR
:
4015 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4018 if (! INTEGRAL_TYPE_P (arg0_type
)
4019 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4020 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4023 n_low
= low
, n_high
= high
;
4026 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4029 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4031 /* If we're converting arg0 from an unsigned type, to exp,
4032 a signed type, we will be doing the comparison as unsigned.
4033 The tests above have already verified that LOW and HIGH
4036 So we have to ensure that we will handle large unsigned
4037 values the same way that the current signed bounds treat
4040 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4044 /* For fixed-point modes, we need to pass the saturating flag
4045 as the 2nd parameter. */
4046 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4048 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4049 TYPE_SATURATING (arg0_type
));
4052 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4054 /* A range without an upper bound is, naturally, unbounded.
4055 Since convert would have cropped a very large value, use
4056 the max value for the destination type. */
4058 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4059 : TYPE_MAX_VALUE (arg0_type
);
4061 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4062 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4063 fold_convert_loc (loc
, arg0_type
,
4065 build_int_cst (arg0_type
, 1));
4067 /* If the low bound is specified, "and" the range with the
4068 range for which the original unsigned value will be
4072 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4073 1, fold_convert_loc (loc
, arg0_type
,
4078 in_p
= (n_in_p
== in_p
);
4082 /* Otherwise, "or" the range with the range of the input
4083 that will be interpreted as negative. */
4084 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4085 1, fold_convert_loc (loc
, arg0_type
,
4090 in_p
= (in_p
!= n_in_p
);
4104 /* Given EXP, a logical expression, set the range it is testing into
4105 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4106 actually being tested. *PLOW and *PHIGH will be made of the same
4107 type as the returned expression. If EXP is not a comparison, we
4108 will most likely not be returning a useful value and range. Set
4109 *STRICT_OVERFLOW_P to true if the return value is only valid
4110 because signed overflow is undefined; otherwise, do not change
4111 *STRICT_OVERFLOW_P. */
4114 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4115 bool *strict_overflow_p
)
4117 enum tree_code code
;
4118 tree arg0
, arg1
= NULL_TREE
;
4119 tree exp_type
, nexp
;
4122 location_t loc
= EXPR_LOCATION (exp
);
4124 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4125 and see if we can refine the range. Some of the cases below may not
4126 happen, but it doesn't seem worth worrying about this. We "continue"
4127 the outer loop when we've changed something; otherwise we "break"
4128 the switch, which will "break" the while. */
4131 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4135 code
= TREE_CODE (exp
);
4136 exp_type
= TREE_TYPE (exp
);
4139 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4141 if (TREE_OPERAND_LENGTH (exp
) > 0)
4142 arg0
= TREE_OPERAND (exp
, 0);
4143 if (TREE_CODE_CLASS (code
) == tcc_binary
4144 || TREE_CODE_CLASS (code
) == tcc_comparison
4145 || (TREE_CODE_CLASS (code
) == tcc_expression
4146 && TREE_OPERAND_LENGTH (exp
) > 1))
4147 arg1
= TREE_OPERAND (exp
, 1);
4149 if (arg0
== NULL_TREE
)
4152 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4153 &high
, &in_p
, strict_overflow_p
);
4154 if (nexp
== NULL_TREE
)
4159 /* If EXP is a constant, we can evaluate whether this is true or false. */
4160 if (TREE_CODE (exp
) == INTEGER_CST
)
4162 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4164 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4170 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4174 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4175 type, TYPE, return an expression to test if EXP is in (or out of, depending
4176 on IN_P) the range. Return 0 if the test couldn't be created. */
4179 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4180 tree low
, tree high
)
4182 tree etype
= TREE_TYPE (exp
), value
;
4184 #ifdef HAVE_canonicalize_funcptr_for_compare
4185 /* Disable this optimization for function pointer expressions
4186 on targets that require function pointer canonicalization. */
4187 if (HAVE_canonicalize_funcptr_for_compare
4188 && TREE_CODE (etype
) == POINTER_TYPE
4189 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4195 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4197 return invert_truthvalue_loc (loc
, value
);
4202 if (low
== 0 && high
== 0)
4203 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4206 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4207 fold_convert_loc (loc
, etype
, high
));
4210 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4211 fold_convert_loc (loc
, etype
, low
));
4213 if (operand_equal_p (low
, high
, 0))
4214 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4215 fold_convert_loc (loc
, etype
, low
));
4217 if (integer_zerop (low
))
4219 if (! TYPE_UNSIGNED (etype
))
4221 etype
= unsigned_type_for (etype
);
4222 high
= fold_convert_loc (loc
, etype
, high
);
4223 exp
= fold_convert_loc (loc
, etype
, exp
);
4225 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4228 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4229 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4231 int prec
= TYPE_PRECISION (etype
);
4233 if (wi::mask (prec
- 1, false, prec
) == high
)
4235 if (TYPE_UNSIGNED (etype
))
4237 tree signed_etype
= signed_type_for (etype
);
4238 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4240 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4242 etype
= signed_etype
;
4243 exp
= fold_convert_loc (loc
, etype
, exp
);
4245 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4246 build_int_cst (etype
, 0));
4250 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4251 This requires wrap-around arithmetics for the type of the expression.
4252 First make sure that arithmetics in this type is valid, then make sure
4253 that it wraps around. */
4254 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4255 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4256 TYPE_UNSIGNED (etype
));
4258 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4260 tree utype
, minv
, maxv
;
4262 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4263 for the type in question, as we rely on this here. */
4264 utype
= unsigned_type_for (etype
);
4265 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4266 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4267 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4268 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4270 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4277 high
= fold_convert_loc (loc
, etype
, high
);
4278 low
= fold_convert_loc (loc
, etype
, low
);
4279 exp
= fold_convert_loc (loc
, etype
, exp
);
4281 value
= const_binop (MINUS_EXPR
, high
, low
);
4284 if (POINTER_TYPE_P (etype
))
4286 if (value
!= 0 && !TREE_OVERFLOW (value
))
4288 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4289 return build_range_check (loc
, type
,
4290 fold_build_pointer_plus_loc (loc
, exp
, low
),
4291 1, build_int_cst (etype
, 0), value
);
4296 if (value
!= 0 && !TREE_OVERFLOW (value
))
4297 return build_range_check (loc
, type
,
4298 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4299 1, build_int_cst (etype
, 0), value
);
4304 /* Return the predecessor of VAL in its type, handling the infinite case. */
4307 range_predecessor (tree val
)
4309 tree type
= TREE_TYPE (val
);
4311 if (INTEGRAL_TYPE_P (type
)
4312 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4315 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4316 build_int_cst (TREE_TYPE (val
), 1), 0);
4319 /* Return the successor of VAL in its type, handling the infinite case. */
4322 range_successor (tree val
)
4324 tree type
= TREE_TYPE (val
);
4326 if (INTEGRAL_TYPE_P (type
)
4327 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4330 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4331 build_int_cst (TREE_TYPE (val
), 1), 0);
4334 /* Given two ranges, see if we can merge them into one. Return 1 if we
4335 can, 0 if we can't. Set the output range into the specified parameters. */
4338 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4339 tree high0
, int in1_p
, tree low1
, tree high1
)
4347 int lowequal
= ((low0
== 0 && low1
== 0)
4348 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4349 low0
, 0, low1
, 0)));
4350 int highequal
= ((high0
== 0 && high1
== 0)
4351 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4352 high0
, 1, high1
, 1)));
4354 /* Make range 0 be the range that starts first, or ends last if they
4355 start at the same value. Swap them if it isn't. */
4356 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4359 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4360 high1
, 1, high0
, 1))))
4362 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4363 tem
= low0
, low0
= low1
, low1
= tem
;
4364 tem
= high0
, high0
= high1
, high1
= tem
;
4367 /* Now flag two cases, whether the ranges are disjoint or whether the
4368 second range is totally subsumed in the first. Note that the tests
4369 below are simplified by the ones above. */
4370 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4371 high0
, 1, low1
, 0));
4372 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4373 high1
, 1, high0
, 1));
4375 /* We now have four cases, depending on whether we are including or
4376 excluding the two ranges. */
4379 /* If they don't overlap, the result is false. If the second range
4380 is a subset it is the result. Otherwise, the range is from the start
4381 of the second to the end of the first. */
4383 in_p
= 0, low
= high
= 0;
4385 in_p
= 1, low
= low1
, high
= high1
;
4387 in_p
= 1, low
= low1
, high
= high0
;
4390 else if (in0_p
&& ! in1_p
)
4392 /* If they don't overlap, the result is the first range. If they are
4393 equal, the result is false. If the second range is a subset of the
4394 first, and the ranges begin at the same place, we go from just after
4395 the end of the second range to the end of the first. If the second
4396 range is not a subset of the first, or if it is a subset and both
4397 ranges end at the same place, the range starts at the start of the
4398 first range and ends just before the second range.
4399 Otherwise, we can't describe this as a single range. */
4401 in_p
= 1, low
= low0
, high
= high0
;
4402 else if (lowequal
&& highequal
)
4403 in_p
= 0, low
= high
= 0;
4404 else if (subset
&& lowequal
)
4406 low
= range_successor (high1
);
4411 /* We are in the weird situation where high0 > high1 but
4412 high1 has no successor. Punt. */
4416 else if (! subset
|| highequal
)
4419 high
= range_predecessor (low1
);
4423 /* low0 < low1 but low1 has no predecessor. Punt. */
4431 else if (! in0_p
&& in1_p
)
4433 /* If they don't overlap, the result is the second range. If the second
4434 is a subset of the first, the result is false. Otherwise,
4435 the range starts just after the first range and ends at the
4436 end of the second. */
4438 in_p
= 1, low
= low1
, high
= high1
;
4439 else if (subset
|| highequal
)
4440 in_p
= 0, low
= high
= 0;
4443 low
= range_successor (high0
);
4448 /* high1 > high0 but high0 has no successor. Punt. */
4456 /* The case where we are excluding both ranges. Here the complex case
4457 is if they don't overlap. In that case, the only time we have a
4458 range is if they are adjacent. If the second is a subset of the
4459 first, the result is the first. Otherwise, the range to exclude
4460 starts at the beginning of the first range and ends at the end of the
4464 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4465 range_successor (high0
),
4467 in_p
= 0, low
= low0
, high
= high1
;
4470 /* Canonicalize - [min, x] into - [-, x]. */
4471 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4472 switch (TREE_CODE (TREE_TYPE (low0
)))
4475 if (TYPE_PRECISION (TREE_TYPE (low0
))
4476 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4480 if (tree_int_cst_equal (low0
,
4481 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4485 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4486 && integer_zerop (low0
))
4493 /* Canonicalize - [x, max] into - [x, -]. */
4494 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4495 switch (TREE_CODE (TREE_TYPE (high1
)))
4498 if (TYPE_PRECISION (TREE_TYPE (high1
))
4499 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4503 if (tree_int_cst_equal (high1
,
4504 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4508 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4509 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4511 build_int_cst (TREE_TYPE (high1
), 1),
4519 /* The ranges might be also adjacent between the maximum and
4520 minimum values of the given type. For
4521 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4522 return + [x + 1, y - 1]. */
4523 if (low0
== 0 && high1
== 0)
4525 low
= range_successor (high0
);
4526 high
= range_predecessor (low1
);
4527 if (low
== 0 || high
== 0)
4537 in_p
= 0, low
= low0
, high
= high0
;
4539 in_p
= 0, low
= low0
, high
= high1
;
4542 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4547 /* Subroutine of fold, looking inside expressions of the form
4548 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4549 of the COND_EXPR. This function is being used also to optimize
4550 A op B ? C : A, by reversing the comparison first.
4552 Return a folded expression whose code is not a COND_EXPR
4553 anymore, or NULL_TREE if no folding opportunity is found. */
4556 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4557 tree arg0
, tree arg1
, tree arg2
)
4559 enum tree_code comp_code
= TREE_CODE (arg0
);
4560 tree arg00
= TREE_OPERAND (arg0
, 0);
4561 tree arg01
= TREE_OPERAND (arg0
, 1);
4562 tree arg1_type
= TREE_TYPE (arg1
);
4568 /* If we have A op 0 ? A : -A, consider applying the following
4571 A == 0? A : -A same as -A
4572 A != 0? A : -A same as A
4573 A >= 0? A : -A same as abs (A)
4574 A > 0? A : -A same as abs (A)
4575 A <= 0? A : -A same as -abs (A)
4576 A < 0? A : -A same as -abs (A)
4578 None of these transformations work for modes with signed
4579 zeros. If A is +/-0, the first two transformations will
4580 change the sign of the result (from +0 to -0, or vice
4581 versa). The last four will fix the sign of the result,
4582 even though the original expressions could be positive or
4583 negative, depending on the sign of A.
4585 Note that all these transformations are correct if A is
4586 NaN, since the two alternatives (A and -A) are also NaNs. */
4587 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4588 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4589 ? real_zerop (arg01
)
4590 : integer_zerop (arg01
))
4591 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4592 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4593 /* In the case that A is of the form X-Y, '-A' (arg2) may
4594 have already been folded to Y-X, check for that. */
4595 || (TREE_CODE (arg1
) == MINUS_EXPR
4596 && TREE_CODE (arg2
) == MINUS_EXPR
4597 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4598 TREE_OPERAND (arg2
, 1), 0)
4599 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4600 TREE_OPERAND (arg2
, 0), 0))))
4605 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4606 return pedantic_non_lvalue_loc (loc
,
4607 fold_convert_loc (loc
, type
,
4608 negate_expr (tem
)));
4611 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4614 if (flag_trapping_math
)
4619 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4620 arg1
= fold_convert_loc (loc
, signed_type_for
4621 (TREE_TYPE (arg1
)), arg1
);
4622 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4623 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4626 if (flag_trapping_math
)
4630 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4631 arg1
= fold_convert_loc (loc
, signed_type_for
4632 (TREE_TYPE (arg1
)), arg1
);
4633 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4634 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4636 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4640 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4641 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4642 both transformations are correct when A is NaN: A != 0
4643 is then true, and A == 0 is false. */
4645 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4646 && integer_zerop (arg01
) && integer_zerop (arg2
))
4648 if (comp_code
== NE_EXPR
)
4649 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4650 else if (comp_code
== EQ_EXPR
)
4651 return build_zero_cst (type
);
4654 /* Try some transformations of A op B ? A : B.
4656 A == B? A : B same as B
4657 A != B? A : B same as A
4658 A >= B? A : B same as max (A, B)
4659 A > B? A : B same as max (B, A)
4660 A <= B? A : B same as min (A, B)
4661 A < B? A : B same as min (B, A)
4663 As above, these transformations don't work in the presence
4664 of signed zeros. For example, if A and B are zeros of
4665 opposite sign, the first two transformations will change
4666 the sign of the result. In the last four, the original
4667 expressions give different results for (A=+0, B=-0) and
4668 (A=-0, B=+0), but the transformed expressions do not.
4670 The first two transformations are correct if either A or B
4671 is a NaN. In the first transformation, the condition will
4672 be false, and B will indeed be chosen. In the case of the
4673 second transformation, the condition A != B will be true,
4674 and A will be chosen.
4676 The conversions to max() and min() are not correct if B is
4677 a number and A is not. The conditions in the original
4678 expressions will be false, so all four give B. The min()
4679 and max() versions would give a NaN instead. */
4680 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4681 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4682 /* Avoid these transformations if the COND_EXPR may be used
4683 as an lvalue in the C++ front-end. PR c++/19199. */
4685 || VECTOR_TYPE_P (type
)
4686 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4687 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4688 || ! maybe_lvalue_p (arg1
)
4689 || ! maybe_lvalue_p (arg2
)))
4691 tree comp_op0
= arg00
;
4692 tree comp_op1
= arg01
;
4693 tree comp_type
= TREE_TYPE (comp_op0
);
4695 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4696 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4706 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4708 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4713 /* In C++ a ?: expression can be an lvalue, so put the
4714 operand which will be used if they are equal first
4715 so that we can convert this back to the
4716 corresponding COND_EXPR. */
4717 if (!HONOR_NANS (element_mode (arg1
)))
4719 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4720 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4721 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4722 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4723 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4724 comp_op1
, comp_op0
);
4725 return pedantic_non_lvalue_loc (loc
,
4726 fold_convert_loc (loc
, type
, tem
));
4733 if (!HONOR_NANS (element_mode (arg1
)))
4735 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4736 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4737 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4738 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4739 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4740 comp_op1
, comp_op0
);
4741 return pedantic_non_lvalue_loc (loc
,
4742 fold_convert_loc (loc
, type
, tem
));
4746 if (!HONOR_NANS (element_mode (arg1
)))
4747 return pedantic_non_lvalue_loc (loc
,
4748 fold_convert_loc (loc
, type
, arg2
));
4751 if (!HONOR_NANS (element_mode (arg1
)))
4752 return pedantic_non_lvalue_loc (loc
,
4753 fold_convert_loc (loc
, type
, arg1
));
4756 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4761 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4762 we might still be able to simplify this. For example,
4763 if C1 is one less or one more than C2, this might have started
4764 out as a MIN or MAX and been transformed by this function.
4765 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4767 if (INTEGRAL_TYPE_P (type
)
4768 && TREE_CODE (arg01
) == INTEGER_CST
4769 && TREE_CODE (arg2
) == INTEGER_CST
)
4773 if (TREE_CODE (arg1
) == INTEGER_CST
)
4775 /* We can replace A with C1 in this case. */
4776 arg1
= fold_convert_loc (loc
, type
, arg01
);
4777 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4780 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4781 MIN_EXPR, to preserve the signedness of the comparison. */
4782 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4784 && operand_equal_p (arg01
,
4785 const_binop (PLUS_EXPR
, arg2
,
4786 build_int_cst (type
, 1)),
4789 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4790 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4792 return pedantic_non_lvalue_loc (loc
,
4793 fold_convert_loc (loc
, type
, tem
));
4798 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4800 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4802 && operand_equal_p (arg01
,
4803 const_binop (MINUS_EXPR
, arg2
,
4804 build_int_cst (type
, 1)),
4807 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4808 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4810 return pedantic_non_lvalue_loc (loc
,
4811 fold_convert_loc (loc
, type
, tem
));
4816 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4817 MAX_EXPR, to preserve the signedness of the comparison. */
4818 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4820 && operand_equal_p (arg01
,
4821 const_binop (MINUS_EXPR
, arg2
,
4822 build_int_cst (type
, 1)),
4825 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4826 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4828 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4833 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4834 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4836 && operand_equal_p (arg01
,
4837 const_binop (PLUS_EXPR
, arg2
,
4838 build_int_cst (type
, 1)),
4841 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4842 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4844 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4858 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4859 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4860 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4864 /* EXP is some logical combination of boolean tests. See if we can
4865 merge it into some range test. Return the new tree if so. */
4868 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4871 int or_op
= (code
== TRUTH_ORIF_EXPR
4872 || code
== TRUTH_OR_EXPR
);
4873 int in0_p
, in1_p
, in_p
;
4874 tree low0
, low1
, low
, high0
, high1
, high
;
4875 bool strict_overflow_p
= false;
4877 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4878 "when simplifying range test");
4880 if (!INTEGRAL_TYPE_P (type
))
4883 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4884 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4886 /* If this is an OR operation, invert both sides; we will invert
4887 again at the end. */
4889 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4891 /* If both expressions are the same, if we can merge the ranges, and we
4892 can build the range test, return it or it inverted. If one of the
4893 ranges is always true or always false, consider it to be the same
4894 expression as the other. */
4895 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4896 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4898 && 0 != (tem
= (build_range_check (loc
, type
,
4900 : rhs
!= 0 ? rhs
: integer_zero_node
,
4903 if (strict_overflow_p
)
4904 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4905 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4908 /* On machines where the branch cost is expensive, if this is a
4909 short-circuited branch and the underlying object on both sides
4910 is the same, make a non-short-circuit operation. */
4911 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4912 && lhs
!= 0 && rhs
!= 0
4913 && (code
== TRUTH_ANDIF_EXPR
4914 || code
== TRUTH_ORIF_EXPR
)
4915 && operand_equal_p (lhs
, rhs
, 0))
4917 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4918 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4919 which cases we can't do this. */
4920 if (simple_operand_p (lhs
))
4921 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4922 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4925 else if (!lang_hooks
.decls
.global_bindings_p ()
4926 && !CONTAINS_PLACEHOLDER_P (lhs
))
4928 tree common
= save_expr (lhs
);
4930 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4931 or_op
? ! in0_p
: in0_p
,
4933 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4934 or_op
? ! in1_p
: in1_p
,
4937 if (strict_overflow_p
)
4938 fold_overflow_warning (warnmsg
,
4939 WARN_STRICT_OVERFLOW_COMPARISON
);
4940 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4941 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4950 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4951 bit value. Arrange things so the extra bits will be set to zero if and
4952 only if C is signed-extended to its full width. If MASK is nonzero,
4953 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4956 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4958 tree type
= TREE_TYPE (c
);
4959 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4962 if (p
== modesize
|| unsignedp
)
4965 /* We work by getting just the sign bit into the low-order bit, then
4966 into the high-order bit, then sign-extend. We then XOR that value
4968 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
4970 /* We must use a signed type in order to get an arithmetic right shift.
4971 However, we must also avoid introducing accidental overflows, so that
4972 a subsequent call to integer_zerop will work. Hence we must
4973 do the type conversion here. At this point, the constant is either
4974 zero or one, and the conversion to a signed type can never overflow.
4975 We could get an overflow if this conversion is done anywhere else. */
4976 if (TYPE_UNSIGNED (type
))
4977 temp
= fold_convert (signed_type_for (type
), temp
);
4979 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4980 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4982 temp
= const_binop (BIT_AND_EXPR
, temp
,
4983 fold_convert (TREE_TYPE (c
), mask
));
4984 /* If necessary, convert the type back to match the type of C. */
4985 if (TYPE_UNSIGNED (type
))
4986 temp
= fold_convert (type
, temp
);
4988 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4991 /* For an expression that has the form
4995 we can drop one of the inner expressions and simplify to
4999 LOC is the location of the resulting expression. OP is the inner
5000 logical operation; the left-hand side in the examples above, while CMPOP
5001 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5002 removing a condition that guards another, as in
5003 (A != NULL && A->...) || A == NULL
5004 which we must not transform. If RHS_ONLY is true, only eliminate the
5005 right-most operand of the inner logical operation. */
5008 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5011 tree type
= TREE_TYPE (cmpop
);
5012 enum tree_code code
= TREE_CODE (cmpop
);
5013 enum tree_code truthop_code
= TREE_CODE (op
);
5014 tree lhs
= TREE_OPERAND (op
, 0);
5015 tree rhs
= TREE_OPERAND (op
, 1);
5016 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5017 enum tree_code rhs_code
= TREE_CODE (rhs
);
5018 enum tree_code lhs_code
= TREE_CODE (lhs
);
5019 enum tree_code inv_code
;
5021 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5024 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5027 if (rhs_code
== truthop_code
)
5029 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5030 if (newrhs
!= NULL_TREE
)
5033 rhs_code
= TREE_CODE (rhs
);
5036 if (lhs_code
== truthop_code
&& !rhs_only
)
5038 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5039 if (newlhs
!= NULL_TREE
)
5042 lhs_code
= TREE_CODE (lhs
);
5046 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5047 if (inv_code
== rhs_code
5048 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5049 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5051 if (!rhs_only
&& inv_code
== lhs_code
5052 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5053 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5055 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5056 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5061 /* Find ways of folding logical expressions of LHS and RHS:
5062 Try to merge two comparisons to the same innermost item.
5063 Look for range tests like "ch >= '0' && ch <= '9'".
5064 Look for combinations of simple terms on machines with expensive branches
5065 and evaluate the RHS unconditionally.
5067 For example, if we have p->a == 2 && p->b == 4 and we can make an
5068 object large enough to span both A and B, we can do this with a comparison
5069 against the object ANDed with the a mask.
5071 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5072 operations to do this with one comparison.
5074 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5075 function and the one above.
5077 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5078 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5080 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5083 We return the simplified tree or 0 if no optimization is possible. */
5086 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5089 /* If this is the "or" of two comparisons, we can do something if
5090 the comparisons are NE_EXPR. If this is the "and", we can do something
5091 if the comparisons are EQ_EXPR. I.e.,
5092 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5094 WANTED_CODE is this operation code. For single bit fields, we can
5095 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5096 comparison for one-bit fields. */
5098 enum tree_code wanted_code
;
5099 enum tree_code lcode
, rcode
;
5100 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5101 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5102 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5103 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5104 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5105 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5106 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5107 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5108 machine_mode lnmode
, rnmode
;
5109 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5110 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5111 tree l_const
, r_const
;
5112 tree lntype
, rntype
, result
;
5113 HOST_WIDE_INT first_bit
, end_bit
;
5116 /* Start by getting the comparison codes. Fail if anything is volatile.
5117 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5118 it were surrounded with a NE_EXPR. */
5120 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5123 lcode
= TREE_CODE (lhs
);
5124 rcode
= TREE_CODE (rhs
);
5126 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5128 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5129 build_int_cst (TREE_TYPE (lhs
), 0));
5133 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5135 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5136 build_int_cst (TREE_TYPE (rhs
), 0));
5140 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5141 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5144 ll_arg
= TREE_OPERAND (lhs
, 0);
5145 lr_arg
= TREE_OPERAND (lhs
, 1);
5146 rl_arg
= TREE_OPERAND (rhs
, 0);
5147 rr_arg
= TREE_OPERAND (rhs
, 1);
5149 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5150 if (simple_operand_p (ll_arg
)
5151 && simple_operand_p (lr_arg
))
5153 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5154 && operand_equal_p (lr_arg
, rr_arg
, 0))
5156 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5157 truth_type
, ll_arg
, lr_arg
);
5161 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5162 && operand_equal_p (lr_arg
, rl_arg
, 0))
5164 result
= combine_comparisons (loc
, code
, lcode
,
5165 swap_tree_comparison (rcode
),
5166 truth_type
, ll_arg
, lr_arg
);
5172 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5173 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5175 /* If the RHS can be evaluated unconditionally and its operands are
5176 simple, it wins to evaluate the RHS unconditionally on machines
5177 with expensive branches. In this case, this isn't a comparison
5178 that can be merged. */
5180 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5182 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5183 && simple_operand_p (rl_arg
)
5184 && simple_operand_p (rr_arg
))
5186 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5187 if (code
== TRUTH_OR_EXPR
5188 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5189 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5190 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5191 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5192 return build2_loc (loc
, NE_EXPR
, truth_type
,
5193 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5195 build_int_cst (TREE_TYPE (ll_arg
), 0));
5197 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5198 if (code
== TRUTH_AND_EXPR
5199 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5200 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5201 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5202 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5203 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5204 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5206 build_int_cst (TREE_TYPE (ll_arg
), 0));
5209 /* See if the comparisons can be merged. Then get all the parameters for
5212 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5213 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5217 ll_inner
= decode_field_reference (loc
, ll_arg
,
5218 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5219 &ll_unsignedp
, &volatilep
, &ll_mask
,
5221 lr_inner
= decode_field_reference (loc
, lr_arg
,
5222 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5223 &lr_unsignedp
, &volatilep
, &lr_mask
,
5225 rl_inner
= decode_field_reference (loc
, rl_arg
,
5226 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5227 &rl_unsignedp
, &volatilep
, &rl_mask
,
5229 rr_inner
= decode_field_reference (loc
, rr_arg
,
5230 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5231 &rr_unsignedp
, &volatilep
, &rr_mask
,
5234 /* It must be true that the inner operation on the lhs of each
5235 comparison must be the same if we are to be able to do anything.
5236 Then see if we have constants. If not, the same must be true for
5238 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5239 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5242 if (TREE_CODE (lr_arg
) == INTEGER_CST
5243 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5244 l_const
= lr_arg
, r_const
= rr_arg
;
5245 else if (lr_inner
== 0 || rr_inner
== 0
5246 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5249 l_const
= r_const
= 0;
5251 /* If either comparison code is not correct for our logical operation,
5252 fail. However, we can convert a one-bit comparison against zero into
5253 the opposite comparison against that bit being set in the field. */
5255 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5256 if (lcode
!= wanted_code
)
5258 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5260 /* Make the left operand unsigned, since we are only interested
5261 in the value of one bit. Otherwise we are doing the wrong
5270 /* This is analogous to the code for l_const above. */
5271 if (rcode
!= wanted_code
)
5273 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5282 /* See if we can find a mode that contains both fields being compared on
5283 the left. If we can't, fail. Otherwise, update all constants and masks
5284 to be relative to a field of that size. */
5285 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5286 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5287 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5288 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5290 if (lnmode
== VOIDmode
)
5293 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5294 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5295 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5296 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5298 if (BYTES_BIG_ENDIAN
)
5300 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5301 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5304 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5305 size_int (xll_bitpos
));
5306 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5307 size_int (xrl_bitpos
));
5311 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5312 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5313 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5314 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5315 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5318 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5320 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5325 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5326 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5327 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5328 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5329 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5332 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5334 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5338 /* If the right sides are not constant, do the same for it. Also,
5339 disallow this optimization if a size or signedness mismatch occurs
5340 between the left and right sides. */
5343 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5344 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5345 /* Make sure the two fields on the right
5346 correspond to the left without being swapped. */
5347 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5350 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5351 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5352 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5353 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5355 if (rnmode
== VOIDmode
)
5358 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5359 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5360 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5361 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5363 if (BYTES_BIG_ENDIAN
)
5365 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5366 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5369 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5371 size_int (xlr_bitpos
));
5372 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5374 size_int (xrr_bitpos
));
5376 /* Make a mask that corresponds to both fields being compared.
5377 Do this for both items being compared. If the operands are the
5378 same size and the bits being compared are in the same position
5379 then we can do this by masking both and comparing the masked
5381 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5382 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5383 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5385 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5386 ll_unsignedp
|| rl_unsignedp
);
5387 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5388 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5390 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5391 lr_unsignedp
|| rr_unsignedp
);
5392 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5393 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5395 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5398 /* There is still another way we can do something: If both pairs of
5399 fields being compared are adjacent, we may be able to make a wider
5400 field containing them both.
5402 Note that we still must mask the lhs/rhs expressions. Furthermore,
5403 the mask must be shifted to account for the shift done by
5404 make_bit_field_ref. */
5405 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5406 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5407 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5408 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5412 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5413 ll_bitsize
+ rl_bitsize
,
5414 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5415 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5416 lr_bitsize
+ rr_bitsize
,
5417 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5419 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5420 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5421 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5422 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5424 /* Convert to the smaller type before masking out unwanted bits. */
5426 if (lntype
!= rntype
)
5428 if (lnbitsize
> rnbitsize
)
5430 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5431 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5434 else if (lnbitsize
< rnbitsize
)
5436 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5437 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5442 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5443 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5445 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5446 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5448 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5454 /* Handle the case of comparisons with constants. If there is something in
5455 common between the masks, those bits of the constants must be the same.
5456 If not, the condition is always false. Test for this to avoid generating
5457 incorrect code below. */
5458 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5459 if (! integer_zerop (result
)
5460 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5461 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5463 if (wanted_code
== NE_EXPR
)
5465 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5466 return constant_boolean_node (true, truth_type
);
5470 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5471 return constant_boolean_node (false, truth_type
);
5475 /* Construct the expression we will return. First get the component
5476 reference we will make. Unless the mask is all ones the width of
5477 that field, perform the mask operation. Then compare with the
5479 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5480 ll_unsignedp
|| rl_unsignedp
);
5482 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5483 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5484 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5486 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5487 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5490 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5494 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5498 enum tree_code op_code
;
5501 int consts_equal
, consts_lt
;
5504 STRIP_SIGN_NOPS (arg0
);
5506 op_code
= TREE_CODE (arg0
);
5507 minmax_const
= TREE_OPERAND (arg0
, 1);
5508 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5509 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5510 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5511 inner
= TREE_OPERAND (arg0
, 0);
5513 /* If something does not permit us to optimize, return the original tree. */
5514 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5515 || TREE_CODE (comp_const
) != INTEGER_CST
5516 || TREE_OVERFLOW (comp_const
)
5517 || TREE_CODE (minmax_const
) != INTEGER_CST
5518 || TREE_OVERFLOW (minmax_const
))
5521 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5522 and GT_EXPR, doing the rest with recursive calls using logical
5526 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5529 = optimize_minmax_comparison (loc
,
5530 invert_tree_comparison (code
, false),
5533 return invert_truthvalue_loc (loc
, tem
);
5539 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5540 optimize_minmax_comparison
5541 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5542 optimize_minmax_comparison
5543 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5546 if (op_code
== MAX_EXPR
&& consts_equal
)
5547 /* MAX (X, 0) == 0 -> X <= 0 */
5548 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5550 else if (op_code
== MAX_EXPR
&& consts_lt
)
5551 /* MAX (X, 0) == 5 -> X == 5 */
5552 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5554 else if (op_code
== MAX_EXPR
)
5555 /* MAX (X, 0) == -1 -> false */
5556 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5558 else if (consts_equal
)
5559 /* MIN (X, 0) == 0 -> X >= 0 */
5560 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5563 /* MIN (X, 0) == 5 -> false */
5564 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5567 /* MIN (X, 0) == -1 -> X == -1 */
5568 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5571 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5572 /* MAX (X, 0) > 0 -> X > 0
5573 MAX (X, 0) > 5 -> X > 5 */
5574 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5576 else if (op_code
== MAX_EXPR
)
5577 /* MAX (X, 0) > -1 -> true */
5578 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5580 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5581 /* MIN (X, 0) > 0 -> false
5582 MIN (X, 0) > 5 -> false */
5583 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5586 /* MIN (X, 0) > -1 -> X > -1 */
5587 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5594 /* T is an integer expression that is being multiplied, divided, or taken a
5595 modulus (CODE says which and what kind of divide or modulus) by a
5596 constant C. See if we can eliminate that operation by folding it with
5597 other operations already in T. WIDE_TYPE, if non-null, is a type that
5598 should be used for the computation if wider than our type.
5600 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5601 (X * 2) + (Y * 4). We must, however, be assured that either the original
5602 expression would not overflow or that overflow is undefined for the type
5603 in the language in question.
5605 If we return a non-null expression, it is an equivalent form of the
5606 original computation, but need not be in the original type.
5608 We set *STRICT_OVERFLOW_P to true if the return values depends on
5609 signed overflow being undefined. Otherwise we do not change
5610 *STRICT_OVERFLOW_P. */
5613 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5614 bool *strict_overflow_p
)
5616 /* To avoid exponential search depth, refuse to allow recursion past
5617 three levels. Beyond that (1) it's highly unlikely that we'll find
5618 something interesting and (2) we've probably processed it before
5619 when we built the inner expression. */
5628 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5635 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5636 bool *strict_overflow_p
)
5638 tree type
= TREE_TYPE (t
);
5639 enum tree_code tcode
= TREE_CODE (t
);
5640 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5641 > GET_MODE_SIZE (TYPE_MODE (type
)))
5642 ? wide_type
: type
);
5644 int same_p
= tcode
== code
;
5645 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5646 bool sub_strict_overflow_p
;
5648 /* Don't deal with constants of zero here; they confuse the code below. */
5649 if (integer_zerop (c
))
5652 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5653 op0
= TREE_OPERAND (t
, 0);
5655 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5656 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5658 /* Note that we need not handle conditional operations here since fold
5659 already handles those cases. So just do arithmetic here. */
5663 /* For a constant, we can always simplify if we are a multiply
5664 or (for divide and modulus) if it is a multiple of our constant. */
5665 if (code
== MULT_EXPR
5666 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5667 return const_binop (code
, fold_convert (ctype
, t
),
5668 fold_convert (ctype
, c
));
5671 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5672 /* If op0 is an expression ... */
5673 if ((COMPARISON_CLASS_P (op0
)
5674 || UNARY_CLASS_P (op0
)
5675 || BINARY_CLASS_P (op0
)
5676 || VL_EXP_CLASS_P (op0
)
5677 || EXPRESSION_CLASS_P (op0
))
5678 /* ... and has wrapping overflow, and its type is smaller
5679 than ctype, then we cannot pass through as widening. */
5680 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5681 && (TYPE_PRECISION (ctype
)
5682 > TYPE_PRECISION (TREE_TYPE (op0
))))
5683 /* ... or this is a truncation (t is narrower than op0),
5684 then we cannot pass through this narrowing. */
5685 || (TYPE_PRECISION (type
)
5686 < TYPE_PRECISION (TREE_TYPE (op0
)))
5687 /* ... or signedness changes for division or modulus,
5688 then we cannot pass through this conversion. */
5689 || (code
!= MULT_EXPR
5690 && (TYPE_UNSIGNED (ctype
)
5691 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5692 /* ... or has undefined overflow while the converted to
5693 type has not, we cannot do the operation in the inner type
5694 as that would introduce undefined overflow. */
5695 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5696 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5699 /* Pass the constant down and see if we can make a simplification. If
5700 we can, replace this expression with the inner simplification for
5701 possible later conversion to our or some other type. */
5702 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5703 && TREE_CODE (t2
) == INTEGER_CST
5704 && !TREE_OVERFLOW (t2
)
5705 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5707 ? ctype
: NULL_TREE
,
5708 strict_overflow_p
))))
5713 /* If widening the type changes it from signed to unsigned, then we
5714 must avoid building ABS_EXPR itself as unsigned. */
5715 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5717 tree cstype
= (*signed_type_for
) (ctype
);
5718 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5721 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5722 return fold_convert (ctype
, t1
);
5726 /* If the constant is negative, we cannot simplify this. */
5727 if (tree_int_cst_sgn (c
) == -1)
5731 /* For division and modulus, type can't be unsigned, as e.g.
5732 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5733 For signed types, even with wrapping overflow, this is fine. */
5734 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5736 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5738 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5741 case MIN_EXPR
: case MAX_EXPR
:
5742 /* If widening the type changes the signedness, then we can't perform
5743 this optimization as that changes the result. */
5744 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5747 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5748 sub_strict_overflow_p
= false;
5749 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5750 &sub_strict_overflow_p
)) != 0
5751 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5752 &sub_strict_overflow_p
)) != 0)
5754 if (tree_int_cst_sgn (c
) < 0)
5755 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5756 if (sub_strict_overflow_p
)
5757 *strict_overflow_p
= true;
5758 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5759 fold_convert (ctype
, t2
));
5763 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5764 /* If the second operand is constant, this is a multiplication
5765 or floor division, by a power of two, so we can treat it that
5766 way unless the multiplier or divisor overflows. Signed
5767 left-shift overflow is implementation-defined rather than
5768 undefined in C90, so do not convert signed left shift into
5770 if (TREE_CODE (op1
) == INTEGER_CST
5771 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5772 /* const_binop may not detect overflow correctly,
5773 so check for it explicitly here. */
5774 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5775 && 0 != (t1
= fold_convert (ctype
,
5776 const_binop (LSHIFT_EXPR
,
5779 && !TREE_OVERFLOW (t1
))
5780 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5781 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5783 fold_convert (ctype
, op0
),
5785 c
, code
, wide_type
, strict_overflow_p
);
5788 case PLUS_EXPR
: case MINUS_EXPR
:
5789 /* See if we can eliminate the operation on both sides. If we can, we
5790 can return a new PLUS or MINUS. If we can't, the only remaining
5791 cases where we can do anything are if the second operand is a
5793 sub_strict_overflow_p
= false;
5794 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5795 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5796 if (t1
!= 0 && t2
!= 0
5797 && (code
== MULT_EXPR
5798 /* If not multiplication, we can only do this if both operands
5799 are divisible by c. */
5800 || (multiple_of_p (ctype
, op0
, c
)
5801 && multiple_of_p (ctype
, op1
, c
))))
5803 if (sub_strict_overflow_p
)
5804 *strict_overflow_p
= true;
5805 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5806 fold_convert (ctype
, t2
));
5809 /* If this was a subtraction, negate OP1 and set it to be an addition.
5810 This simplifies the logic below. */
5811 if (tcode
== MINUS_EXPR
)
5813 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5814 /* If OP1 was not easily negatable, the constant may be OP0. */
5815 if (TREE_CODE (op0
) == INTEGER_CST
)
5826 if (TREE_CODE (op1
) != INTEGER_CST
)
5829 /* If either OP1 or C are negative, this optimization is not safe for
5830 some of the division and remainder types while for others we need
5831 to change the code. */
5832 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5834 if (code
== CEIL_DIV_EXPR
)
5835 code
= FLOOR_DIV_EXPR
;
5836 else if (code
== FLOOR_DIV_EXPR
)
5837 code
= CEIL_DIV_EXPR
;
5838 else if (code
!= MULT_EXPR
5839 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5843 /* If it's a multiply or a division/modulus operation of a multiple
5844 of our constant, do the operation and verify it doesn't overflow. */
5845 if (code
== MULT_EXPR
5846 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5848 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5849 fold_convert (ctype
, c
));
5850 /* We allow the constant to overflow with wrapping semantics. */
5852 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5858 /* If we have an unsigned type, we cannot widen the operation since it
5859 will change the result if the original computation overflowed. */
5860 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5863 /* If we were able to eliminate our operation from the first side,
5864 apply our operation to the second side and reform the PLUS. */
5865 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5866 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5868 /* The last case is if we are a multiply. In that case, we can
5869 apply the distributive law to commute the multiply and addition
5870 if the multiplication of the constants doesn't overflow
5871 and overflow is defined. With undefined overflow
5872 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5873 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5874 return fold_build2 (tcode
, ctype
,
5875 fold_build2 (code
, ctype
,
5876 fold_convert (ctype
, op0
),
5877 fold_convert (ctype
, c
)),
5883 /* We have a special case here if we are doing something like
5884 (C * 8) % 4 since we know that's zero. */
5885 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5886 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5887 /* If the multiplication can overflow we cannot optimize this. */
5888 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5889 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5890 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5892 *strict_overflow_p
= true;
5893 return omit_one_operand (type
, integer_zero_node
, op0
);
5896 /* ... fall through ... */
5898 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5899 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5900 /* If we can extract our operation from the LHS, do so and return a
5901 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5902 do something only if the second operand is a constant. */
5904 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5905 strict_overflow_p
)) != 0)
5906 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5907 fold_convert (ctype
, op1
));
5908 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5909 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5910 strict_overflow_p
)) != 0)
5911 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5912 fold_convert (ctype
, t1
));
5913 else if (TREE_CODE (op1
) != INTEGER_CST
)
5916 /* If these are the same operation types, we can associate them
5917 assuming no overflow. */
5920 bool overflow_p
= false;
5921 bool overflow_mul_p
;
5922 signop sign
= TYPE_SIGN (ctype
);
5923 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5924 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5926 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5929 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5930 wide_int_to_tree (ctype
, mul
));
5933 /* If these operations "cancel" each other, we have the main
5934 optimizations of this pass, which occur when either constant is a
5935 multiple of the other, in which case we replace this with either an
5936 operation or CODE or TCODE.
5938 If we have an unsigned type, we cannot do this since it will change
5939 the result if the original computation overflowed. */
5940 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5941 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5942 || (tcode
== MULT_EXPR
5943 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5944 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5945 && code
!= MULT_EXPR
)))
5947 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5949 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5950 *strict_overflow_p
= true;
5951 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5952 fold_convert (ctype
,
5953 const_binop (TRUNC_DIV_EXPR
,
5956 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
5958 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5959 *strict_overflow_p
= true;
5960 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5961 fold_convert (ctype
,
5962 const_binop (TRUNC_DIV_EXPR
,
5975 /* Return a node which has the indicated constant VALUE (either 0 or
5976 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5977 and is of the indicated TYPE. */
5980 constant_boolean_node (bool value
, tree type
)
5982 if (type
== integer_type_node
)
5983 return value
? integer_one_node
: integer_zero_node
;
5984 else if (type
== boolean_type_node
)
5985 return value
? boolean_true_node
: boolean_false_node
;
5986 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5987 return build_vector_from_val (type
,
5988 build_int_cst (TREE_TYPE (type
),
5991 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5995 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5996 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5997 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5998 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5999 COND is the first argument to CODE; otherwise (as in the example
6000 given here), it is the second argument. TYPE is the type of the
6001 original expression. Return NULL_TREE if no simplification is
6005 fold_binary_op_with_conditional_arg (location_t loc
,
6006 enum tree_code code
,
6007 tree type
, tree op0
, tree op1
,
6008 tree cond
, tree arg
, int cond_first_p
)
6010 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6011 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6012 tree test
, true_value
, false_value
;
6013 tree lhs
= NULL_TREE
;
6014 tree rhs
= NULL_TREE
;
6015 enum tree_code cond_code
= COND_EXPR
;
6017 if (TREE_CODE (cond
) == COND_EXPR
6018 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6020 test
= TREE_OPERAND (cond
, 0);
6021 true_value
= TREE_OPERAND (cond
, 1);
6022 false_value
= TREE_OPERAND (cond
, 2);
6023 /* If this operand throws an expression, then it does not make
6024 sense to try to perform a logical or arithmetic operation
6026 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6028 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6033 tree testtype
= TREE_TYPE (cond
);
6035 true_value
= constant_boolean_node (true, testtype
);
6036 false_value
= constant_boolean_node (false, testtype
);
6039 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6040 cond_code
= VEC_COND_EXPR
;
6042 /* This transformation is only worthwhile if we don't have to wrap ARG
6043 in a SAVE_EXPR and the operation can be simplified without recursing
6044 on at least one of the branches once its pushed inside the COND_EXPR. */
6045 if (!TREE_CONSTANT (arg
)
6046 && (TREE_SIDE_EFFECTS (arg
)
6047 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6048 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6051 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6054 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6056 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6058 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6062 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6064 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6066 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6069 /* Check that we have simplified at least one of the branches. */
6070 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6073 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6077 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6079 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6080 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6081 ADDEND is the same as X.
6083 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6084 and finite. The problematic cases are when X is zero, and its mode
6085 has signed zeros. In the case of rounding towards -infinity,
6086 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6087 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6090 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6092 if (!real_zerop (addend
))
6095 /* Don't allow the fold with -fsignaling-nans. */
6096 if (HONOR_SNANS (element_mode (type
)))
6099 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6100 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6103 /* In a vector or complex, we would need to check the sign of all zeros. */
6104 if (TREE_CODE (addend
) != REAL_CST
)
6107 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6108 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6111 /* The mode has signed zeros, and we have to honor their sign.
6112 In this situation, there is only one case we can return true for.
6113 X - 0 is the same as X unless rounding towards -infinity is
6115 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6118 /* Subroutine of fold() that checks comparisons of built-in math
6119 functions against real constants.
6121 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6122 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6123 is the type of the result and ARG0 and ARG1 are the operands of the
6124 comparison. ARG1 must be a TREE_REAL_CST.
6126 The function returns the constant folded tree if a simplification
6127 can be made, and NULL_TREE otherwise. */
6130 fold_mathfn_compare (location_t loc
,
6131 enum built_in_function fcode
, enum tree_code code
,
6132 tree type
, tree arg0
, tree arg1
)
6136 if (BUILTIN_SQRT_P (fcode
))
6138 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6139 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6141 c
= TREE_REAL_CST (arg1
);
6142 if (REAL_VALUE_NEGATIVE (c
))
6144 /* sqrt(x) < y is always false, if y is negative. */
6145 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6146 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6148 /* sqrt(x) > y is always true, if y is negative and we
6149 don't care about NaNs, i.e. negative values of x. */
6150 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6151 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6153 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6154 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6155 build_real (TREE_TYPE (arg
), dconst0
));
6157 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6161 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6162 real_convert (&c2
, mode
, &c2
);
6164 if (REAL_VALUE_ISINF (c2
))
6166 /* sqrt(x) > y is x == +Inf, when y is very large. */
6167 if (HONOR_INFINITIES (mode
))
6168 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6169 build_real (TREE_TYPE (arg
), c2
));
6171 /* sqrt(x) > y is always false, when y is very large
6172 and we don't care about infinities. */
6173 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6176 /* sqrt(x) > c is the same as x > c*c. */
6177 return fold_build2_loc (loc
, code
, type
, arg
,
6178 build_real (TREE_TYPE (arg
), c2
));
6180 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6184 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6185 real_convert (&c2
, mode
, &c2
);
6187 if (REAL_VALUE_ISINF (c2
))
6189 /* sqrt(x) < y is always true, when y is a very large
6190 value and we don't care about NaNs or Infinities. */
6191 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6192 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6194 /* sqrt(x) < y is x != +Inf when y is very large and we
6195 don't care about NaNs. */
6196 if (! HONOR_NANS (mode
))
6197 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6198 build_real (TREE_TYPE (arg
), c2
));
6200 /* sqrt(x) < y is x >= 0 when y is very large and we
6201 don't care about Infinities. */
6202 if (! HONOR_INFINITIES (mode
))
6203 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6204 build_real (TREE_TYPE (arg
), dconst0
));
6206 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6207 arg
= save_expr (arg
);
6208 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6209 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6210 build_real (TREE_TYPE (arg
),
6212 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6213 build_real (TREE_TYPE (arg
),
6217 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6218 if (! HONOR_NANS (mode
))
6219 return fold_build2_loc (loc
, code
, type
, arg
,
6220 build_real (TREE_TYPE (arg
), c2
));
6222 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6223 arg
= save_expr (arg
);
6224 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6225 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6226 build_real (TREE_TYPE (arg
),
6228 fold_build2_loc (loc
, code
, type
, arg
,
6229 build_real (TREE_TYPE (arg
),
6237 /* Subroutine of fold() that optimizes comparisons against Infinities,
6238 either +Inf or -Inf.
6240 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6241 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6242 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6244 The function returns the constant folded tree if a simplification
6245 can be made, and NULL_TREE otherwise. */
6248 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6249 tree arg0
, tree arg1
)
6252 REAL_VALUE_TYPE max
;
6256 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6258 /* For negative infinity swap the sense of the comparison. */
6259 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6261 code
= swap_tree_comparison (code
);
6266 /* x > +Inf is always false, if with ignore sNANs. */
6267 if (HONOR_SNANS (mode
))
6269 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6272 /* x <= +Inf is always true, if we don't case about NaNs. */
6273 if (! HONOR_NANS (mode
))
6274 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6276 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6277 arg0
= save_expr (arg0
);
6278 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6282 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6283 real_maxval (&max
, neg
, mode
);
6284 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6285 arg0
, build_real (TREE_TYPE (arg0
), max
));
6288 /* x < +Inf is always equal to x <= DBL_MAX. */
6289 real_maxval (&max
, neg
, mode
);
6290 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6291 arg0
, build_real (TREE_TYPE (arg0
), max
));
6294 /* x != +Inf is always equal to !(x > DBL_MAX). */
6295 real_maxval (&max
, neg
, mode
);
6296 if (! HONOR_NANS (mode
))
6297 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6298 arg0
, build_real (TREE_TYPE (arg0
), max
));
6300 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6301 arg0
, build_real (TREE_TYPE (arg0
), max
));
6302 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6311 /* Subroutine of fold() that optimizes comparisons of a division by
6312 a nonzero integer constant against an integer constant, i.e.
6315 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6316 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6317 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6319 The function returns the constant folded tree if a simplification
6320 can be made, and NULL_TREE otherwise. */
6323 fold_div_compare (location_t loc
,
6324 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6326 tree prod
, tmp
, hi
, lo
;
6327 tree arg00
= TREE_OPERAND (arg0
, 0);
6328 tree arg01
= TREE_OPERAND (arg0
, 1);
6329 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6330 bool neg_overflow
= false;
6333 /* We have to do this the hard way to detect unsigned overflow.
6334 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6335 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6336 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6337 neg_overflow
= false;
6339 if (sign
== UNSIGNED
)
6341 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6342 build_int_cst (TREE_TYPE (arg01
), 1));
6345 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6346 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6347 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6348 -1, overflow
| TREE_OVERFLOW (prod
));
6350 else if (tree_int_cst_sgn (arg01
) >= 0)
6352 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6353 build_int_cst (TREE_TYPE (arg01
), 1));
6354 switch (tree_int_cst_sgn (arg1
))
6357 neg_overflow
= true;
6358 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6363 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6368 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6378 /* A negative divisor reverses the relational operators. */
6379 code
= swap_tree_comparison (code
);
6381 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6382 build_int_cst (TREE_TYPE (arg01
), 1));
6383 switch (tree_int_cst_sgn (arg1
))
6386 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6391 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6396 neg_overflow
= true;
6397 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6409 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6410 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6411 if (TREE_OVERFLOW (hi
))
6412 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6413 if (TREE_OVERFLOW (lo
))
6414 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6415 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6418 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6419 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6420 if (TREE_OVERFLOW (hi
))
6421 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6422 if (TREE_OVERFLOW (lo
))
6423 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6424 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6427 if (TREE_OVERFLOW (lo
))
6429 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6430 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6432 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6435 if (TREE_OVERFLOW (hi
))
6437 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6438 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6440 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6443 if (TREE_OVERFLOW (hi
))
6445 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6446 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6448 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6451 if (TREE_OVERFLOW (lo
))
6453 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6454 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6456 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6466 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6467 equality/inequality test, then return a simplified form of the test
6468 using a sign testing. Otherwise return NULL. TYPE is the desired
6472 fold_single_bit_test_into_sign_test (location_t loc
,
6473 enum tree_code code
, tree arg0
, tree arg1
,
6476 /* If this is testing a single bit, we can optimize the test. */
6477 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6478 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6479 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6481 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6482 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6483 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6485 if (arg00
!= NULL_TREE
6486 /* This is only a win if casting to a signed type is cheap,
6487 i.e. when arg00's type is not a partial mode. */
6488 && TYPE_PRECISION (TREE_TYPE (arg00
))
6489 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6491 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6492 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6494 fold_convert_loc (loc
, stype
, arg00
),
6495 build_int_cst (stype
, 0));
6502 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6503 equality/inequality test, then return a simplified form of
6504 the test using shifts and logical operations. Otherwise return
6505 NULL. TYPE is the desired result type. */
6508 fold_single_bit_test (location_t loc
, enum tree_code code
,
6509 tree arg0
, tree arg1
, tree result_type
)
6511 /* If this is testing a single bit, we can optimize the test. */
6512 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6513 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6514 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6516 tree inner
= TREE_OPERAND (arg0
, 0);
6517 tree type
= TREE_TYPE (arg0
);
6518 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6519 machine_mode operand_mode
= TYPE_MODE (type
);
6521 tree signed_type
, unsigned_type
, intermediate_type
;
6524 /* First, see if we can fold the single bit test into a sign-bit
6526 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6531 /* Otherwise we have (A & C) != 0 where C is a single bit,
6532 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6533 Similarly for (A & C) == 0. */
6535 /* If INNER is a right shift of a constant and it plus BITNUM does
6536 not overflow, adjust BITNUM and INNER. */
6537 if (TREE_CODE (inner
) == RSHIFT_EXPR
6538 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6539 && bitnum
< TYPE_PRECISION (type
)
6540 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6541 TYPE_PRECISION (type
) - bitnum
))
6543 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6544 inner
= TREE_OPERAND (inner
, 0);
6547 /* If we are going to be able to omit the AND below, we must do our
6548 operations as unsigned. If we must use the AND, we have a choice.
6549 Normally unsigned is faster, but for some machines signed is. */
6550 #ifdef LOAD_EXTEND_OP
6551 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6552 && !flag_syntax_only
) ? 0 : 1;
6557 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6558 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6559 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6560 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6563 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6564 inner
, size_int (bitnum
));
6566 one
= build_int_cst (intermediate_type
, 1);
6568 if (code
== EQ_EXPR
)
6569 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6571 /* Put the AND last so it can combine with more things. */
6572 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6574 /* Make sure to return the proper type. */
6575 inner
= fold_convert_loc (loc
, result_type
, inner
);
6582 /* Check whether we are allowed to reorder operands arg0 and arg1,
6583 such that the evaluation of arg1 occurs before arg0. */
6586 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6588 if (! flag_evaluation_order
)
6590 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6592 return ! TREE_SIDE_EFFECTS (arg0
)
6593 && ! TREE_SIDE_EFFECTS (arg1
);
6596 /* Test whether it is preferable two swap two operands, ARG0 and
6597 ARG1, for example because ARG0 is an integer constant and ARG1
6598 isn't. If REORDER is true, only recommend swapping if we can
6599 evaluate the operands in reverse order. */
6602 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6604 if (CONSTANT_CLASS_P (arg1
))
6606 if (CONSTANT_CLASS_P (arg0
))
6609 STRIP_SIGN_NOPS (arg0
);
6610 STRIP_SIGN_NOPS (arg1
);
6612 if (TREE_CONSTANT (arg1
))
6614 if (TREE_CONSTANT (arg0
))
6617 if (reorder
&& flag_evaluation_order
6618 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6621 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6622 for commutative and comparison operators. Ensuring a canonical
6623 form allows the optimizers to find additional redundancies without
6624 having to explicitly check for both orderings. */
6625 if (TREE_CODE (arg0
) == SSA_NAME
6626 && TREE_CODE (arg1
) == SSA_NAME
6627 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6630 /* Put SSA_NAMEs last. */
6631 if (TREE_CODE (arg1
) == SSA_NAME
)
6633 if (TREE_CODE (arg0
) == SSA_NAME
)
6636 /* Put variables last. */
6645 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6646 ARG0 is extended to a wider type. */
6649 fold_widened_comparison (location_t loc
, enum tree_code code
,
6650 tree type
, tree arg0
, tree arg1
)
6652 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6654 tree shorter_type
, outer_type
;
6658 if (arg0_unw
== arg0
)
6660 shorter_type
= TREE_TYPE (arg0_unw
);
6662 #ifdef HAVE_canonicalize_funcptr_for_compare
6663 /* Disable this optimization if we're casting a function pointer
6664 type on targets that require function pointer canonicalization. */
6665 if (HAVE_canonicalize_funcptr_for_compare
6666 && TREE_CODE (shorter_type
) == POINTER_TYPE
6667 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6671 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6674 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6676 /* If possible, express the comparison in the shorter mode. */
6677 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6678 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6679 && (TREE_TYPE (arg1_unw
) == shorter_type
6680 || ((TYPE_PRECISION (shorter_type
)
6681 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6682 && (TYPE_UNSIGNED (shorter_type
)
6683 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6684 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6685 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6686 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6687 && int_fits_type_p (arg1_unw
, shorter_type
))))
6688 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6689 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6691 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6692 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6693 || !int_fits_type_p (arg1_unw
, shorter_type
))
6696 /* If we are comparing with the integer that does not fit into the range
6697 of the shorter type, the result is known. */
6698 outer_type
= TREE_TYPE (arg1_unw
);
6699 min
= lower_bound_in_type (outer_type
, shorter_type
);
6700 max
= upper_bound_in_type (outer_type
, shorter_type
);
6702 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6704 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6711 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6716 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6722 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6724 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6729 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6731 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6740 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6741 ARG0 just the signedness is changed. */
6744 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6745 tree arg0
, tree arg1
)
6748 tree inner_type
, outer_type
;
6750 if (!CONVERT_EXPR_P (arg0
))
6753 outer_type
= TREE_TYPE (arg0
);
6754 arg0_inner
= TREE_OPERAND (arg0
, 0);
6755 inner_type
= TREE_TYPE (arg0_inner
);
6757 #ifdef HAVE_canonicalize_funcptr_for_compare
6758 /* Disable this optimization if we're casting a function pointer
6759 type on targets that require function pointer canonicalization. */
6760 if (HAVE_canonicalize_funcptr_for_compare
6761 && TREE_CODE (inner_type
) == POINTER_TYPE
6762 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6766 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6769 if (TREE_CODE (arg1
) != INTEGER_CST
6770 && !(CONVERT_EXPR_P (arg1
)
6771 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6774 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6779 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6782 if (TREE_CODE (arg1
) == INTEGER_CST
)
6783 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6784 TREE_OVERFLOW (arg1
));
6786 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6788 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6792 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6793 means A >= Y && A != MAX, but in this case we know that
6794 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6797 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6799 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6801 if (TREE_CODE (bound
) == LT_EXPR
)
6802 a
= TREE_OPERAND (bound
, 0);
6803 else if (TREE_CODE (bound
) == GT_EXPR
)
6804 a
= TREE_OPERAND (bound
, 1);
6808 typea
= TREE_TYPE (a
);
6809 if (!INTEGRAL_TYPE_P (typea
)
6810 && !POINTER_TYPE_P (typea
))
6813 if (TREE_CODE (ineq
) == LT_EXPR
)
6815 a1
= TREE_OPERAND (ineq
, 1);
6816 y
= TREE_OPERAND (ineq
, 0);
6818 else if (TREE_CODE (ineq
) == GT_EXPR
)
6820 a1
= TREE_OPERAND (ineq
, 0);
6821 y
= TREE_OPERAND (ineq
, 1);
6826 if (TREE_TYPE (a1
) != typea
)
6829 if (POINTER_TYPE_P (typea
))
6831 /* Convert the pointer types into integer before taking the difference. */
6832 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6833 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6834 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6837 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6839 if (!diff
|| !integer_onep (diff
))
6842 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6845 /* Fold a sum or difference of at least one multiplication.
6846 Returns the folded tree or NULL if no simplification could be made. */
6849 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6850 tree arg0
, tree arg1
)
6852 tree arg00
, arg01
, arg10
, arg11
;
6853 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6855 /* (A * C) +- (B * C) -> (A+-B) * C.
6856 (A * C) +- A -> A * (C+-1).
6857 We are most concerned about the case where C is a constant,
6858 but other combinations show up during loop reduction. Since
6859 it is not difficult, try all four possibilities. */
6861 if (TREE_CODE (arg0
) == MULT_EXPR
)
6863 arg00
= TREE_OPERAND (arg0
, 0);
6864 arg01
= TREE_OPERAND (arg0
, 1);
6866 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6868 arg00
= build_one_cst (type
);
6873 /* We cannot generate constant 1 for fract. */
6874 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6877 arg01
= build_one_cst (type
);
6879 if (TREE_CODE (arg1
) == MULT_EXPR
)
6881 arg10
= TREE_OPERAND (arg1
, 0);
6882 arg11
= TREE_OPERAND (arg1
, 1);
6884 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6886 arg10
= build_one_cst (type
);
6887 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6888 the purpose of this canonicalization. */
6889 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6890 && negate_expr_p (arg1
)
6891 && code
== PLUS_EXPR
)
6893 arg11
= negate_expr (arg1
);
6901 /* We cannot generate constant 1 for fract. */
6902 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6905 arg11
= build_one_cst (type
);
6909 if (operand_equal_p (arg01
, arg11
, 0))
6910 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6911 else if (operand_equal_p (arg00
, arg10
, 0))
6912 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6913 else if (operand_equal_p (arg00
, arg11
, 0))
6914 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6915 else if (operand_equal_p (arg01
, arg10
, 0))
6916 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6918 /* No identical multiplicands; see if we can find a common
6919 power-of-two factor in non-power-of-two multiplies. This
6920 can help in multi-dimensional array access. */
6921 else if (tree_fits_shwi_p (arg01
)
6922 && tree_fits_shwi_p (arg11
))
6924 HOST_WIDE_INT int01
, int11
, tmp
;
6927 int01
= tree_to_shwi (arg01
);
6928 int11
= tree_to_shwi (arg11
);
6930 /* Move min of absolute values to int11. */
6931 if (absu_hwi (int01
) < absu_hwi (int11
))
6933 tmp
= int01
, int01
= int11
, int11
= tmp
;
6934 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6941 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6942 /* The remainder should not be a constant, otherwise we
6943 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6944 increased the number of multiplications necessary. */
6945 && TREE_CODE (arg10
) != INTEGER_CST
)
6947 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6948 build_int_cst (TREE_TYPE (arg00
),
6953 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6958 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6959 fold_build2_loc (loc
, code
, type
,
6960 fold_convert_loc (loc
, type
, alt0
),
6961 fold_convert_loc (loc
, type
, alt1
)),
6962 fold_convert_loc (loc
, type
, same
));
6967 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6968 specified by EXPR into the buffer PTR of length LEN bytes.
6969 Return the number of bytes placed in the buffer, or zero
6973 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6975 tree type
= TREE_TYPE (expr
);
6976 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6977 int byte
, offset
, word
, words
;
6978 unsigned char value
;
6980 if ((off
== -1 && total_bytes
> len
)
6981 || off
>= total_bytes
)
6985 words
= total_bytes
/ UNITS_PER_WORD
;
6987 for (byte
= 0; byte
< total_bytes
; byte
++)
6989 int bitpos
= byte
* BITS_PER_UNIT
;
6990 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6992 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
6994 if (total_bytes
> UNITS_PER_WORD
)
6996 word
= byte
/ UNITS_PER_WORD
;
6997 if (WORDS_BIG_ENDIAN
)
6998 word
= (words
- 1) - word
;
6999 offset
= word
* UNITS_PER_WORD
;
7000 if (BYTES_BIG_ENDIAN
)
7001 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7003 offset
+= byte
% UNITS_PER_WORD
;
7006 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7008 && offset
- off
< len
)
7009 ptr
[offset
- off
] = value
;
7011 return MIN (len
, total_bytes
- off
);
7015 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7021 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7023 tree type
= TREE_TYPE (expr
);
7024 machine_mode mode
= TYPE_MODE (type
);
7025 int total_bytes
= GET_MODE_SIZE (mode
);
7026 FIXED_VALUE_TYPE value
;
7027 tree i_value
, i_type
;
7029 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7032 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7034 if (NULL_TREE
== i_type
7035 || TYPE_PRECISION (i_type
) != total_bytes
)
7038 value
= TREE_FIXED_CST (expr
);
7039 i_value
= double_int_to_tree (i_type
, value
.data
);
7041 return native_encode_int (i_value
, ptr
, len
, off
);
7045 /* Subroutine of native_encode_expr. Encode the REAL_CST
7046 specified by EXPR into the buffer PTR of length LEN bytes.
7047 Return the number of bytes placed in the buffer, or zero
7051 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7053 tree type
= TREE_TYPE (expr
);
7054 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7055 int byte
, offset
, word
, words
, bitpos
;
7056 unsigned char value
;
7058 /* There are always 32 bits in each long, no matter the size of
7059 the hosts long. We handle floating point representations with
7063 if ((off
== -1 && total_bytes
> len
)
7064 || off
>= total_bytes
)
7068 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7070 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7072 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7073 bitpos
+= BITS_PER_UNIT
)
7075 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7076 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7078 if (UNITS_PER_WORD
< 4)
7080 word
= byte
/ UNITS_PER_WORD
;
7081 if (WORDS_BIG_ENDIAN
)
7082 word
= (words
- 1) - word
;
7083 offset
= word
* UNITS_PER_WORD
;
7084 if (BYTES_BIG_ENDIAN
)
7085 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7087 offset
+= byte
% UNITS_PER_WORD
;
7090 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7091 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7093 && offset
- off
< len
)
7094 ptr
[offset
- off
] = value
;
7096 return MIN (len
, total_bytes
- off
);
7099 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7100 specified by EXPR into the buffer PTR of length LEN bytes.
7101 Return the number of bytes placed in the buffer, or zero
7105 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7110 part
= TREE_REALPART (expr
);
7111 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7115 part
= TREE_IMAGPART (expr
);
7117 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7118 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7122 return rsize
+ isize
;
7126 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7127 specified by EXPR into the buffer PTR of length LEN bytes.
7128 Return the number of bytes placed in the buffer, or zero
7132 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7139 count
= VECTOR_CST_NELTS (expr
);
7140 itype
= TREE_TYPE (TREE_TYPE (expr
));
7141 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7142 for (i
= 0; i
< count
; i
++)
7149 elem
= VECTOR_CST_ELT (expr
, i
);
7150 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7151 if ((off
== -1 && res
!= size
)
7164 /* Subroutine of native_encode_expr. Encode the STRING_CST
7165 specified by EXPR into the buffer PTR of length LEN bytes.
7166 Return the number of bytes placed in the buffer, or zero
7170 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7172 tree type
= TREE_TYPE (expr
);
7173 HOST_WIDE_INT total_bytes
;
7175 if (TREE_CODE (type
) != ARRAY_TYPE
7176 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7177 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7178 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7180 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7181 if ((off
== -1 && total_bytes
> len
)
7182 || off
>= total_bytes
)
7186 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7189 if (off
< TREE_STRING_LENGTH (expr
))
7191 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7192 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7194 memset (ptr
+ written
, 0,
7195 MIN (total_bytes
- written
, len
- written
));
7198 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7199 return MIN (total_bytes
- off
, len
);
7203 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7204 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7205 buffer PTR of length LEN bytes. If OFF is not -1 then start
7206 the encoding at byte offset OFF and encode at most LEN bytes.
7207 Return the number of bytes placed in the buffer, or zero upon failure. */
7210 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7212 switch (TREE_CODE (expr
))
7215 return native_encode_int (expr
, ptr
, len
, off
);
7218 return native_encode_real (expr
, ptr
, len
, off
);
7221 return native_encode_fixed (expr
, ptr
, len
, off
);
7224 return native_encode_complex (expr
, ptr
, len
, off
);
7227 return native_encode_vector (expr
, ptr
, len
, off
);
7230 return native_encode_string (expr
, ptr
, len
, off
);
7238 /* Subroutine of native_interpret_expr. Interpret the contents of
7239 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7240 If the buffer cannot be interpreted, return NULL_TREE. */
7243 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7245 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7247 if (total_bytes
> len
7248 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7251 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7253 return wide_int_to_tree (type
, result
);
7257 /* Subroutine of native_interpret_expr. Interpret the contents of
7258 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7259 If the buffer cannot be interpreted, return NULL_TREE. */
7262 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7264 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7266 FIXED_VALUE_TYPE fixed_value
;
7268 if (total_bytes
> len
7269 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7272 result
= double_int::from_buffer (ptr
, total_bytes
);
7273 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7275 return build_fixed (type
, fixed_value
);
7279 /* Subroutine of native_interpret_expr. Interpret the contents of
7280 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7281 If the buffer cannot be interpreted, return NULL_TREE. */
7284 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7286 machine_mode mode
= TYPE_MODE (type
);
7287 int total_bytes
= GET_MODE_SIZE (mode
);
7288 int byte
, offset
, word
, words
, bitpos
;
7289 unsigned char value
;
7290 /* There are always 32 bits in each long, no matter the size of
7291 the hosts long. We handle floating point representations with
7296 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7297 if (total_bytes
> len
|| total_bytes
> 24)
7299 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7301 memset (tmp
, 0, sizeof (tmp
));
7302 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7303 bitpos
+= BITS_PER_UNIT
)
7305 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7306 if (UNITS_PER_WORD
< 4)
7308 word
= byte
/ UNITS_PER_WORD
;
7309 if (WORDS_BIG_ENDIAN
)
7310 word
= (words
- 1) - word
;
7311 offset
= word
* UNITS_PER_WORD
;
7312 if (BYTES_BIG_ENDIAN
)
7313 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7315 offset
+= byte
% UNITS_PER_WORD
;
7318 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7319 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7321 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7324 real_from_target (&r
, tmp
, mode
);
7325 return build_real (type
, r
);
7329 /* Subroutine of native_interpret_expr. Interpret the contents of
7330 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7331 If the buffer cannot be interpreted, return NULL_TREE. */
7334 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7336 tree etype
, rpart
, ipart
;
7339 etype
= TREE_TYPE (type
);
7340 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7343 rpart
= native_interpret_expr (etype
, ptr
, size
);
7346 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7349 return build_complex (type
, rpart
, ipart
);
7353 /* Subroutine of native_interpret_expr. Interpret the contents of
7354 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7355 If the buffer cannot be interpreted, return NULL_TREE. */
7358 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7364 etype
= TREE_TYPE (type
);
7365 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7366 count
= TYPE_VECTOR_SUBPARTS (type
);
7367 if (size
* count
> len
)
7370 elements
= XALLOCAVEC (tree
, count
);
7371 for (i
= count
- 1; i
>= 0; i
--)
7373 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7378 return build_vector (type
, elements
);
7382 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7383 the buffer PTR of length LEN as a constant of type TYPE. For
7384 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7385 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7386 return NULL_TREE. */
7389 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7391 switch (TREE_CODE (type
))
7397 case REFERENCE_TYPE
:
7398 return native_interpret_int (type
, ptr
, len
);
7401 return native_interpret_real (type
, ptr
, len
);
7403 case FIXED_POINT_TYPE
:
7404 return native_interpret_fixed (type
, ptr
, len
);
7407 return native_interpret_complex (type
, ptr
, len
);
7410 return native_interpret_vector (type
, ptr
, len
);
7417 /* Returns true if we can interpret the contents of a native encoding
7421 can_native_interpret_type_p (tree type
)
7423 switch (TREE_CODE (type
))
7429 case REFERENCE_TYPE
:
7430 case FIXED_POINT_TYPE
:
7440 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7441 TYPE at compile-time. If we're unable to perform the conversion
7442 return NULL_TREE. */
7445 fold_view_convert_expr (tree type
, tree expr
)
7447 /* We support up to 512-bit values (for V8DFmode). */
7448 unsigned char buffer
[64];
7451 /* Check that the host and target are sane. */
7452 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7455 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7459 return native_interpret_expr (type
, buffer
, len
);
7462 /* Build an expression for the address of T. Folds away INDIRECT_REF
7463 to avoid confusing the gimplify process. */
7466 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7468 /* The size of the object is not relevant when talking about its address. */
7469 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7470 t
= TREE_OPERAND (t
, 0);
7472 if (TREE_CODE (t
) == INDIRECT_REF
)
7474 t
= TREE_OPERAND (t
, 0);
7476 if (TREE_TYPE (t
) != ptrtype
)
7477 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7479 else if (TREE_CODE (t
) == MEM_REF
7480 && integer_zerop (TREE_OPERAND (t
, 1)))
7481 return TREE_OPERAND (t
, 0);
7482 else if (TREE_CODE (t
) == MEM_REF
7483 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7484 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7485 TREE_OPERAND (t
, 0),
7486 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7487 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7489 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7491 if (TREE_TYPE (t
) != ptrtype
)
7492 t
= fold_convert_loc (loc
, ptrtype
, t
);
7495 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7500 /* Build an expression for the address of T. */
7503 build_fold_addr_expr_loc (location_t loc
, tree t
)
7505 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7507 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7510 static bool vec_cst_ctor_to_array (tree
, tree
*);
7512 /* Fold a unary expression of code CODE and type TYPE with operand
7513 OP0. Return the folded expression if folding is successful.
7514 Otherwise, return NULL_TREE. */
7517 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7521 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7523 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7524 && TREE_CODE_LENGTH (code
) == 1);
7526 tem
= generic_simplify (loc
, code
, type
, op0
);
7533 if (CONVERT_EXPR_CODE_P (code
)
7534 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7536 /* Don't use STRIP_NOPS, because signedness of argument type
7538 STRIP_SIGN_NOPS (arg0
);
7542 /* Strip any conversions that don't change the mode. This
7543 is safe for every expression, except for a comparison
7544 expression because its signedness is derived from its
7547 Note that this is done as an internal manipulation within
7548 the constant folder, in order to find the simplest
7549 representation of the arguments so that their form can be
7550 studied. In any cases, the appropriate type conversions
7551 should be put back in the tree that will get out of the
7557 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7559 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7560 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7561 fold_build1_loc (loc
, code
, type
,
7562 fold_convert_loc (loc
, TREE_TYPE (op0
),
7563 TREE_OPERAND (arg0
, 1))));
7564 else if (TREE_CODE (arg0
) == COND_EXPR
)
7566 tree arg01
= TREE_OPERAND (arg0
, 1);
7567 tree arg02
= TREE_OPERAND (arg0
, 2);
7568 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7569 arg01
= fold_build1_loc (loc
, code
, type
,
7570 fold_convert_loc (loc
,
7571 TREE_TYPE (op0
), arg01
));
7572 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7573 arg02
= fold_build1_loc (loc
, code
, type
,
7574 fold_convert_loc (loc
,
7575 TREE_TYPE (op0
), arg02
));
7576 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7579 /* If this was a conversion, and all we did was to move into
7580 inside the COND_EXPR, bring it back out. But leave it if
7581 it is a conversion from integer to integer and the
7582 result precision is no wider than a word since such a
7583 conversion is cheap and may be optimized away by combine,
7584 while it couldn't if it were outside the COND_EXPR. Then return
7585 so we don't get into an infinite recursion loop taking the
7586 conversion out and then back in. */
7588 if ((CONVERT_EXPR_CODE_P (code
)
7589 || code
== NON_LVALUE_EXPR
)
7590 && TREE_CODE (tem
) == COND_EXPR
7591 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7592 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7593 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7594 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7595 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7596 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7597 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7599 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7600 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7601 || flag_syntax_only
))
7602 tem
= build1_loc (loc
, code
, type
,
7604 TREE_TYPE (TREE_OPERAND
7605 (TREE_OPERAND (tem
, 1), 0)),
7606 TREE_OPERAND (tem
, 0),
7607 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7608 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7616 case NON_LVALUE_EXPR
:
7617 if (!maybe_lvalue_p (op0
))
7618 return fold_convert_loc (loc
, type
, op0
);
7623 case FIX_TRUNC_EXPR
:
7624 if (COMPARISON_CLASS_P (op0
))
7626 /* If we have (type) (a CMP b) and type is an integral type, return
7627 new expression involving the new type. Canonicalize
7628 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7630 Do not fold the result as that would not simplify further, also
7631 folding again results in recursions. */
7632 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7633 return build2_loc (loc
, TREE_CODE (op0
), type
,
7634 TREE_OPERAND (op0
, 0),
7635 TREE_OPERAND (op0
, 1));
7636 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7637 && TREE_CODE (type
) != VECTOR_TYPE
)
7638 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7639 constant_boolean_node (true, type
),
7640 constant_boolean_node (false, type
));
7643 /* Handle (T *)&A.B.C for A being of type T and B and C
7644 living at offset zero. This occurs frequently in
7645 C++ upcasting and then accessing the base. */
7646 if (TREE_CODE (op0
) == ADDR_EXPR
7647 && POINTER_TYPE_P (type
)
7648 && handled_component_p (TREE_OPERAND (op0
, 0)))
7650 HOST_WIDE_INT bitsize
, bitpos
;
7653 int unsignedp
, volatilep
;
7654 tree base
= TREE_OPERAND (op0
, 0);
7655 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7656 &mode
, &unsignedp
, &volatilep
, false);
7657 /* If the reference was to a (constant) zero offset, we can use
7658 the address of the base if it has the same base type
7659 as the result type and the pointer type is unqualified. */
7660 if (! offset
&& bitpos
== 0
7661 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7662 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7663 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7664 return fold_convert_loc (loc
, type
,
7665 build_fold_addr_expr_loc (loc
, base
));
7668 if (TREE_CODE (op0
) == MODIFY_EXPR
7669 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7670 /* Detect assigning a bitfield. */
7671 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7673 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7675 /* Don't leave an assignment inside a conversion
7676 unless assigning a bitfield. */
7677 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7678 /* First do the assignment, then return converted constant. */
7679 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7680 TREE_NO_WARNING (tem
) = 1;
7681 TREE_USED (tem
) = 1;
7685 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7686 constants (if x has signed type, the sign bit cannot be set
7687 in c). This folds extension into the BIT_AND_EXPR.
7688 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7689 very likely don't have maximal range for their precision and this
7690 transformation effectively doesn't preserve non-maximal ranges. */
7691 if (TREE_CODE (type
) == INTEGER_TYPE
7692 && TREE_CODE (op0
) == BIT_AND_EXPR
7693 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7695 tree and_expr
= op0
;
7696 tree and0
= TREE_OPERAND (and_expr
, 0);
7697 tree and1
= TREE_OPERAND (and_expr
, 1);
7700 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7701 || (TYPE_PRECISION (type
)
7702 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7704 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7705 <= HOST_BITS_PER_WIDE_INT
7706 && tree_fits_uhwi_p (and1
))
7708 unsigned HOST_WIDE_INT cst
;
7710 cst
= tree_to_uhwi (and1
);
7711 cst
&= HOST_WIDE_INT_M1U
7712 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7713 change
= (cst
== 0);
7714 #ifdef LOAD_EXTEND_OP
7716 && !flag_syntax_only
7717 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7720 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7721 and0
= fold_convert_loc (loc
, uns
, and0
);
7722 and1
= fold_convert_loc (loc
, uns
, and1
);
7728 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7729 TREE_OVERFLOW (and1
));
7730 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7731 fold_convert_loc (loc
, type
, and0
), tem
);
7735 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7736 when one of the new casts will fold away. Conservatively we assume
7737 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7738 if (POINTER_TYPE_P (type
)
7739 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7740 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7741 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7742 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7743 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7745 tree arg00
= TREE_OPERAND (arg0
, 0);
7746 tree arg01
= TREE_OPERAND (arg0
, 1);
7748 return fold_build_pointer_plus_loc
7749 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7752 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7753 of the same precision, and X is an integer type not narrower than
7754 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7755 if (INTEGRAL_TYPE_P (type
)
7756 && TREE_CODE (op0
) == BIT_NOT_EXPR
7757 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7758 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7759 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7761 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7762 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7763 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7764 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7765 fold_convert_loc (loc
, type
, tem
));
7768 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7769 type of X and Y (integer types only). */
7770 if (INTEGRAL_TYPE_P (type
)
7771 && TREE_CODE (op0
) == MULT_EXPR
7772 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7773 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7775 /* Be careful not to introduce new overflows. */
7777 if (TYPE_OVERFLOW_WRAPS (type
))
7780 mult_type
= unsigned_type_for (type
);
7782 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7784 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7785 fold_convert_loc (loc
, mult_type
,
7786 TREE_OPERAND (op0
, 0)),
7787 fold_convert_loc (loc
, mult_type
,
7788 TREE_OPERAND (op0
, 1)));
7789 return fold_convert_loc (loc
, type
, tem
);
7793 tem
= fold_convert_const (code
, type
, arg0
);
7794 return tem
? tem
: NULL_TREE
;
7796 case ADDR_SPACE_CONVERT_EXPR
:
7797 if (integer_zerop (arg0
))
7798 return fold_convert_const (code
, type
, arg0
);
7801 case FIXED_CONVERT_EXPR
:
7802 tem
= fold_convert_const (code
, type
, arg0
);
7803 return tem
? tem
: NULL_TREE
;
7805 case VIEW_CONVERT_EXPR
:
7806 if (TREE_CODE (op0
) == MEM_REF
)
7807 return fold_build2_loc (loc
, MEM_REF
, type
,
7808 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7810 return fold_view_convert_expr (type
, op0
);
7813 tem
= fold_negate_expr (loc
, arg0
);
7815 return fold_convert_loc (loc
, type
, tem
);
7819 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7820 return fold_abs_const (arg0
, type
);
7821 /* Convert fabs((double)float) into (double)fabsf(float). */
7822 else if (TREE_CODE (arg0
) == NOP_EXPR
7823 && TREE_CODE (type
) == REAL_TYPE
)
7825 tree targ0
= strip_float_extensions (arg0
);
7827 return fold_convert_loc (loc
, type
,
7828 fold_build1_loc (loc
, ABS_EXPR
,
7832 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7833 else if (TREE_CODE (arg0
) == ABS_EXPR
)
7836 /* Strip sign ops from argument. */
7837 if (TREE_CODE (type
) == REAL_TYPE
)
7839 tem
= fold_strip_sign_ops (arg0
);
7841 return fold_build1_loc (loc
, ABS_EXPR
, type
,
7842 fold_convert_loc (loc
, type
, tem
));
7847 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7848 return fold_convert_loc (loc
, type
, arg0
);
7849 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7851 tree itype
= TREE_TYPE (type
);
7852 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
7853 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
7854 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
7855 negate_expr (ipart
));
7857 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7859 tree itype
= TREE_TYPE (type
);
7860 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
7861 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
7862 return build_complex (type
, rpart
, negate_expr (ipart
));
7864 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7865 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
7869 if (TREE_CODE (arg0
) == INTEGER_CST
)
7870 return fold_not_const (arg0
, type
);
7871 /* Convert ~ (-A) to A - 1. */
7872 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7873 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
7874 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
7875 build_int_cst (type
, 1));
7876 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7877 else if (INTEGRAL_TYPE_P (type
)
7878 && ((TREE_CODE (arg0
) == MINUS_EXPR
7879 && integer_onep (TREE_OPERAND (arg0
, 1)))
7880 || (TREE_CODE (arg0
) == PLUS_EXPR
7881 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7882 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
7883 fold_convert_loc (loc
, type
,
7884 TREE_OPERAND (arg0
, 0)));
7885 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7886 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7887 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7888 fold_convert_loc (loc
, type
,
7889 TREE_OPERAND (arg0
, 0)))))
7890 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7891 fold_convert_loc (loc
, type
,
7892 TREE_OPERAND (arg0
, 1)));
7893 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7894 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7895 fold_convert_loc (loc
, type
,
7896 TREE_OPERAND (arg0
, 1)))))
7897 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7898 fold_convert_loc (loc
, type
,
7899 TREE_OPERAND (arg0
, 0)), tem
);
7900 /* Perform BIT_NOT_EXPR on each element individually. */
7901 else if (TREE_CODE (arg0
) == VECTOR_CST
)
7905 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
7907 elements
= XALLOCAVEC (tree
, count
);
7908 for (i
= 0; i
< count
; i
++)
7910 elem
= VECTOR_CST_ELT (arg0
, i
);
7911 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
7912 if (elem
== NULL_TREE
)
7917 return build_vector (type
, elements
);
7922 case TRUTH_NOT_EXPR
:
7923 /* Note that the operand of this must be an int
7924 and its values must be 0 or 1.
7925 ("true" is a fixed value perhaps depending on the language,
7926 but we don't handle values other than 1 correctly yet.) */
7927 tem
= fold_truth_not_expr (loc
, arg0
);
7930 return fold_convert_loc (loc
, type
, tem
);
7933 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7934 return fold_convert_loc (loc
, type
, arg0
);
7935 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7936 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
7937 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7939 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7940 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7941 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7942 TREE_OPERAND (arg0
, 0)),
7943 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7944 TREE_OPERAND (arg0
, 1)));
7945 return fold_convert_loc (loc
, type
, tem
);
7947 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7949 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7950 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7951 TREE_OPERAND (arg0
, 0));
7952 return fold_convert_loc (loc
, type
, tem
);
7954 if (TREE_CODE (arg0
) == CALL_EXPR
)
7956 tree fn
= get_callee_fndecl (arg0
);
7957 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7958 switch (DECL_FUNCTION_CODE (fn
))
7960 CASE_FLT_FN (BUILT_IN_CEXPI
):
7961 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
7963 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
7973 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7974 return build_zero_cst (type
);
7975 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7976 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
7977 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7979 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7980 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7981 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
7982 TREE_OPERAND (arg0
, 0)),
7983 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
7984 TREE_OPERAND (arg0
, 1)));
7985 return fold_convert_loc (loc
, type
, tem
);
7987 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7989 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7990 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7991 return fold_convert_loc (loc
, type
, negate_expr (tem
));
7993 if (TREE_CODE (arg0
) == CALL_EXPR
)
7995 tree fn
= get_callee_fndecl (arg0
);
7996 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7997 switch (DECL_FUNCTION_CODE (fn
))
7999 CASE_FLT_FN (BUILT_IN_CEXPI
):
8000 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8002 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8012 /* Fold *&X to X if X is an lvalue. */
8013 if (TREE_CODE (op0
) == ADDR_EXPR
)
8015 tree op00
= TREE_OPERAND (op0
, 0);
8016 if ((TREE_CODE (op00
) == VAR_DECL
8017 || TREE_CODE (op00
) == PARM_DECL
8018 || TREE_CODE (op00
) == RESULT_DECL
)
8019 && !TREE_READONLY (op00
))
8024 case VEC_UNPACK_LO_EXPR
:
8025 case VEC_UNPACK_HI_EXPR
:
8026 case VEC_UNPACK_FLOAT_LO_EXPR
:
8027 case VEC_UNPACK_FLOAT_HI_EXPR
:
8029 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8031 enum tree_code subcode
;
8033 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8034 if (TREE_CODE (arg0
) != VECTOR_CST
)
8037 elts
= XALLOCAVEC (tree
, nelts
* 2);
8038 if (!vec_cst_ctor_to_array (arg0
, elts
))
8041 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8042 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8045 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8048 subcode
= FLOAT_EXPR
;
8050 for (i
= 0; i
< nelts
; i
++)
8052 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8053 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8057 return build_vector (type
, elts
);
8060 case REDUC_MIN_EXPR
:
8061 case REDUC_MAX_EXPR
:
8062 case REDUC_PLUS_EXPR
:
8064 unsigned int nelts
, i
;
8066 enum tree_code subcode
;
8068 if (TREE_CODE (op0
) != VECTOR_CST
)
8070 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0
));
8072 elts
= XALLOCAVEC (tree
, nelts
);
8073 if (!vec_cst_ctor_to_array (op0
, elts
))
8078 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8079 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8080 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8081 default: gcc_unreachable ();
8084 for (i
= 1; i
< nelts
; i
++)
8086 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8087 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8096 } /* switch (code) */
8100 /* If the operation was a conversion do _not_ mark a resulting constant
8101 with TREE_OVERFLOW if the original constant was not. These conversions
8102 have implementation defined behavior and retaining the TREE_OVERFLOW
8103 flag here would confuse later passes such as VRP. */
8105 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8106 tree type
, tree op0
)
8108 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8110 && TREE_CODE (res
) == INTEGER_CST
8111 && TREE_CODE (op0
) == INTEGER_CST
8112 && CONVERT_EXPR_CODE_P (code
))
8113 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8118 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8119 operands OP0 and OP1. LOC is the location of the resulting expression.
8120 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8121 Return the folded expression if folding is successful. Otherwise,
8122 return NULL_TREE. */
8124 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8125 tree arg0
, tree arg1
, tree op0
, tree op1
)
8129 /* We only do these simplifications if we are optimizing. */
8133 /* Check for things like (A || B) && (A || C). We can convert this
8134 to A || (B && C). Note that either operator can be any of the four
8135 truth and/or operations and the transformation will still be
8136 valid. Also note that we only care about order for the
8137 ANDIF and ORIF operators. If B contains side effects, this
8138 might change the truth-value of A. */
8139 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8140 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8141 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8142 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8143 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8144 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8146 tree a00
= TREE_OPERAND (arg0
, 0);
8147 tree a01
= TREE_OPERAND (arg0
, 1);
8148 tree a10
= TREE_OPERAND (arg1
, 0);
8149 tree a11
= TREE_OPERAND (arg1
, 1);
8150 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8151 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8152 && (code
== TRUTH_AND_EXPR
8153 || code
== TRUTH_OR_EXPR
));
8155 if (operand_equal_p (a00
, a10
, 0))
8156 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8157 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8158 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8159 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8160 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8161 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8162 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8163 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8165 /* This case if tricky because we must either have commutative
8166 operators or else A10 must not have side-effects. */
8168 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8169 && operand_equal_p (a01
, a11
, 0))
8170 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8171 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8175 /* See if we can build a range comparison. */
8176 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8179 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8180 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8182 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8184 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8187 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8188 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8190 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8192 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8195 /* Check for the possibility of merging component references. If our
8196 lhs is another similar operation, try to merge its rhs with our
8197 rhs. Then try to merge our lhs and rhs. */
8198 if (TREE_CODE (arg0
) == code
8199 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8200 TREE_OPERAND (arg0
, 1), arg1
)))
8201 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8203 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8206 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8207 && (code
== TRUTH_AND_EXPR
8208 || code
== TRUTH_ANDIF_EXPR
8209 || code
== TRUTH_OR_EXPR
8210 || code
== TRUTH_ORIF_EXPR
))
8212 enum tree_code ncode
, icode
;
8214 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8215 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8216 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8218 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8219 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8220 We don't want to pack more than two leafs to a non-IF AND/OR
8222 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8223 equal to IF-CODE, then we don't want to add right-hand operand.
8224 If the inner right-hand side of left-hand operand has
8225 side-effects, or isn't simple, then we can't add to it,
8226 as otherwise we might destroy if-sequence. */
8227 if (TREE_CODE (arg0
) == icode
8228 && simple_operand_p_2 (arg1
)
8229 /* Needed for sequence points to handle trappings, and
8231 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8233 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8235 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8238 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8239 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8240 else if (TREE_CODE (arg1
) == icode
8241 && simple_operand_p_2 (arg0
)
8242 /* Needed for sequence points to handle trappings, and
8244 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8246 tem
= fold_build2_loc (loc
, ncode
, type
,
8247 arg0
, TREE_OPERAND (arg1
, 0));
8248 return fold_build2_loc (loc
, icode
, type
, tem
,
8249 TREE_OPERAND (arg1
, 1));
8251 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8253 For sequence point consistancy, we need to check for trapping,
8254 and side-effects. */
8255 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8256 && simple_operand_p_2 (arg1
))
8257 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8263 /* Fold a binary expression of code CODE and type TYPE with operands
8264 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8265 Return the folded expression if folding is successful. Otherwise,
8266 return NULL_TREE. */
8269 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8271 enum tree_code compl_code
;
8273 if (code
== MIN_EXPR
)
8274 compl_code
= MAX_EXPR
;
8275 else if (code
== MAX_EXPR
)
8276 compl_code
= MIN_EXPR
;
8280 /* MIN (MAX (a, b), b) == b. */
8281 if (TREE_CODE (op0
) == compl_code
8282 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8283 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8285 /* MIN (MAX (b, a), b) == b. */
8286 if (TREE_CODE (op0
) == compl_code
8287 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8288 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8289 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8291 /* MIN (a, MAX (a, b)) == a. */
8292 if (TREE_CODE (op1
) == compl_code
8293 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8294 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8295 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8297 /* MIN (a, MAX (b, a)) == a. */
8298 if (TREE_CODE (op1
) == compl_code
8299 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8300 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8301 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8306 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8307 by changing CODE to reduce the magnitude of constants involved in
8308 ARG0 of the comparison.
8309 Returns a canonicalized comparison tree if a simplification was
8310 possible, otherwise returns NULL_TREE.
8311 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8312 valid if signed overflow is undefined. */
8315 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8316 tree arg0
, tree arg1
,
8317 bool *strict_overflow_p
)
8319 enum tree_code code0
= TREE_CODE (arg0
);
8320 tree t
, cst0
= NULL_TREE
;
8324 /* Match A +- CST code arg1 and CST code arg1. We can change the
8325 first form only if overflow is undefined. */
8326 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8327 /* In principle pointers also have undefined overflow behavior,
8328 but that causes problems elsewhere. */
8329 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8330 && (code0
== MINUS_EXPR
8331 || code0
== PLUS_EXPR
)
8332 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8333 || code0
== INTEGER_CST
))
8336 /* Identify the constant in arg0 and its sign. */
8337 if (code0
== INTEGER_CST
)
8340 cst0
= TREE_OPERAND (arg0
, 1);
8341 sgn0
= tree_int_cst_sgn (cst0
);
8343 /* Overflowed constants and zero will cause problems. */
8344 if (integer_zerop (cst0
)
8345 || TREE_OVERFLOW (cst0
))
8348 /* See if we can reduce the magnitude of the constant in
8349 arg0 by changing the comparison code. */
8350 if (code0
== INTEGER_CST
)
8352 /* CST <= arg1 -> CST-1 < arg1. */
8353 if (code
== LE_EXPR
&& sgn0
== 1)
8355 /* -CST < arg1 -> -CST-1 <= arg1. */
8356 else if (code
== LT_EXPR
&& sgn0
== -1)
8358 /* CST > arg1 -> CST-1 >= arg1. */
8359 else if (code
== GT_EXPR
&& sgn0
== 1)
8361 /* -CST >= arg1 -> -CST-1 > arg1. */
8362 else if (code
== GE_EXPR
&& sgn0
== -1)
8366 /* arg1 code' CST' might be more canonical. */
8371 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8373 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8375 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8376 else if (code
== GT_EXPR
8377 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8379 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8380 else if (code
== LE_EXPR
8381 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8383 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8384 else if (code
== GE_EXPR
8385 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8389 *strict_overflow_p
= true;
8392 /* Now build the constant reduced in magnitude. But not if that
8393 would produce one outside of its types range. */
8394 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8396 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8397 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8399 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8400 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8401 /* We cannot swap the comparison here as that would cause us to
8402 endlessly recurse. */
8405 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8406 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8407 if (code0
!= INTEGER_CST
)
8408 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8409 t
= fold_convert (TREE_TYPE (arg1
), t
);
8411 /* If swapping might yield to a more canonical form, do so. */
8413 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8415 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8418 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8419 overflow further. Try to decrease the magnitude of constants involved
8420 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8421 and put sole constants at the second argument position.
8422 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8425 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8426 tree arg0
, tree arg1
)
8429 bool strict_overflow_p
;
8430 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8431 "when reducing constant in comparison");
8433 /* Try canonicalization by simplifying arg0. */
8434 strict_overflow_p
= false;
8435 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8436 &strict_overflow_p
);
8439 if (strict_overflow_p
)
8440 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8444 /* Try canonicalization by simplifying arg1 using the swapped
8446 code
= swap_tree_comparison (code
);
8447 strict_overflow_p
= false;
8448 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8449 &strict_overflow_p
);
8450 if (t
&& strict_overflow_p
)
8451 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8455 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8456 space. This is used to avoid issuing overflow warnings for
8457 expressions like &p->x which can not wrap. */
8460 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8462 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8469 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8470 if (offset
== NULL_TREE
)
8471 wi_offset
= wi::zero (precision
);
8472 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8478 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8479 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8483 if (!wi::fits_uhwi_p (total
))
8486 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8490 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8492 if (TREE_CODE (base
) == ADDR_EXPR
)
8494 HOST_WIDE_INT base_size
;
8496 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8497 if (base_size
> 0 && size
< base_size
)
8501 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8504 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8505 kind INTEGER_CST. This makes sure to properly sign-extend the
8508 static HOST_WIDE_INT
8509 size_low_cst (const_tree t
)
8511 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8512 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8513 if (prec
< HOST_BITS_PER_WIDE_INT
)
8514 return sext_hwi (w
, prec
);
8518 /* Subroutine of fold_binary. This routine performs all of the
8519 transformations that are common to the equality/inequality
8520 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8521 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8522 fold_binary should call fold_binary. Fold a comparison with
8523 tree code CODE and type TYPE with operands OP0 and OP1. Return
8524 the folded comparison or NULL_TREE. */
8527 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8530 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8531 tree arg0
, arg1
, tem
;
8536 STRIP_SIGN_NOPS (arg0
);
8537 STRIP_SIGN_NOPS (arg1
);
8539 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8540 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8541 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8542 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8543 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8544 && TREE_CODE (arg1
) == INTEGER_CST
8545 && !TREE_OVERFLOW (arg1
))
8547 const enum tree_code
8548 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8549 tree const1
= TREE_OPERAND (arg0
, 1);
8550 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8551 tree variable
= TREE_OPERAND (arg0
, 0);
8552 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8554 /* If the constant operation overflowed this can be
8555 simplified as a comparison against INT_MAX/INT_MIN. */
8556 if (TREE_OVERFLOW (new_const
)
8557 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8559 int const1_sgn
= tree_int_cst_sgn (const1
);
8560 enum tree_code code2
= code
;
8562 /* Get the sign of the constant on the lhs if the
8563 operation were VARIABLE + CONST1. */
8564 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8565 const1_sgn
= -const1_sgn
;
8567 /* The sign of the constant determines if we overflowed
8568 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8569 Canonicalize to the INT_MIN overflow by swapping the comparison
8571 if (const1_sgn
== -1)
8572 code2
= swap_tree_comparison (code
);
8574 /* We now can look at the canonicalized case
8575 VARIABLE + 1 CODE2 INT_MIN
8576 and decide on the result. */
8583 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8589 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8598 fold_overflow_warning ("assuming signed overflow does not occur "
8599 "when changing X +- C1 cmp C2 to "
8601 WARN_STRICT_OVERFLOW_COMPARISON
);
8602 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8606 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8607 if (TREE_CODE (arg0
) == MINUS_EXPR
8609 && integer_zerop (arg1
))
8611 /* ??? The transformation is valid for the other operators if overflow
8612 is undefined for the type, but performing it here badly interacts
8613 with the transformation in fold_cond_expr_with_comparison which
8614 attempts to synthetize ABS_EXPR. */
8616 fold_overflow_warning ("assuming signed overflow does not occur "
8617 "when changing X - Y cmp 0 to X cmp Y",
8618 WARN_STRICT_OVERFLOW_COMPARISON
);
8619 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8620 TREE_OPERAND (arg0
, 1));
8623 /* For comparisons of pointers we can decompose it to a compile time
8624 comparison of the base objects and the offsets into the object.
8625 This requires at least one operand being an ADDR_EXPR or a
8626 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8627 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8628 && (TREE_CODE (arg0
) == ADDR_EXPR
8629 || TREE_CODE (arg1
) == ADDR_EXPR
8630 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8631 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8633 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8634 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8636 int volatilep
, unsignedp
;
8637 bool indirect_base0
= false, indirect_base1
= false;
8639 /* Get base and offset for the access. Strip ADDR_EXPR for
8640 get_inner_reference, but put it back by stripping INDIRECT_REF
8641 off the base object if possible. indirect_baseN will be true
8642 if baseN is not an address but refers to the object itself. */
8644 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8646 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8647 &bitsize
, &bitpos0
, &offset0
, &mode
,
8648 &unsignedp
, &volatilep
, false);
8649 if (TREE_CODE (base0
) == INDIRECT_REF
)
8650 base0
= TREE_OPERAND (base0
, 0);
8652 indirect_base0
= true;
8654 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8656 base0
= TREE_OPERAND (arg0
, 0);
8657 STRIP_SIGN_NOPS (base0
);
8658 if (TREE_CODE (base0
) == ADDR_EXPR
)
8660 base0
= TREE_OPERAND (base0
, 0);
8661 indirect_base0
= true;
8663 offset0
= TREE_OPERAND (arg0
, 1);
8664 if (tree_fits_shwi_p (offset0
))
8666 HOST_WIDE_INT off
= size_low_cst (offset0
);
8667 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8669 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8671 bitpos0
= off
* BITS_PER_UNIT
;
8672 offset0
= NULL_TREE
;
8678 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8680 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8681 &bitsize
, &bitpos1
, &offset1
, &mode
,
8682 &unsignedp
, &volatilep
, false);
8683 if (TREE_CODE (base1
) == INDIRECT_REF
)
8684 base1
= TREE_OPERAND (base1
, 0);
8686 indirect_base1
= true;
8688 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8690 base1
= TREE_OPERAND (arg1
, 0);
8691 STRIP_SIGN_NOPS (base1
);
8692 if (TREE_CODE (base1
) == ADDR_EXPR
)
8694 base1
= TREE_OPERAND (base1
, 0);
8695 indirect_base1
= true;
8697 offset1
= TREE_OPERAND (arg1
, 1);
8698 if (tree_fits_shwi_p (offset1
))
8700 HOST_WIDE_INT off
= size_low_cst (offset1
);
8701 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8703 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8705 bitpos1
= off
* BITS_PER_UNIT
;
8706 offset1
= NULL_TREE
;
8711 /* A local variable can never be pointed to by
8712 the default SSA name of an incoming parameter. */
8713 if ((TREE_CODE (arg0
) == ADDR_EXPR
8715 && TREE_CODE (base0
) == VAR_DECL
8716 && auto_var_in_fn_p (base0
, current_function_decl
)
8718 && TREE_CODE (base1
) == SSA_NAME
8719 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8720 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8721 || (TREE_CODE (arg1
) == ADDR_EXPR
8723 && TREE_CODE (base1
) == VAR_DECL
8724 && auto_var_in_fn_p (base1
, current_function_decl
)
8726 && TREE_CODE (base0
) == SSA_NAME
8727 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8728 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8730 if (code
== NE_EXPR
)
8731 return constant_boolean_node (1, type
);
8732 else if (code
== EQ_EXPR
)
8733 return constant_boolean_node (0, type
);
8735 /* If we have equivalent bases we might be able to simplify. */
8736 else if (indirect_base0
== indirect_base1
8737 && operand_equal_p (base0
, base1
, 0))
8739 /* We can fold this expression to a constant if the non-constant
8740 offset parts are equal. */
8741 if ((offset0
== offset1
8742 || (offset0
&& offset1
8743 && operand_equal_p (offset0
, offset1
, 0)))
8746 || (indirect_base0
&& DECL_P (base0
))
8747 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8751 && bitpos0
!= bitpos1
8752 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8753 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8754 fold_overflow_warning (("assuming pointer wraparound does not "
8755 "occur when comparing P +- C1 with "
8757 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8762 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8764 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8766 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8768 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8770 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8772 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8776 /* We can simplify the comparison to a comparison of the variable
8777 offset parts if the constant offset parts are equal.
8778 Be careful to use signed sizetype here because otherwise we
8779 mess with array offsets in the wrong way. This is possible
8780 because pointer arithmetic is restricted to retain within an
8781 object and overflow on pointer differences is undefined as of
8782 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8783 else if (bitpos0
== bitpos1
8785 || (indirect_base0
&& DECL_P (base0
))
8786 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8788 /* By converting to signed sizetype we cover middle-end pointer
8789 arithmetic which operates on unsigned pointer types of size
8790 type size and ARRAY_REF offsets which are properly sign or
8791 zero extended from their type in case it is narrower than
8793 if (offset0
== NULL_TREE
)
8794 offset0
= build_int_cst (ssizetype
, 0);
8796 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8797 if (offset1
== NULL_TREE
)
8798 offset1
= build_int_cst (ssizetype
, 0);
8800 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8803 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8804 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8805 fold_overflow_warning (("assuming pointer wraparound does not "
8806 "occur when comparing P +- C1 with "
8808 WARN_STRICT_OVERFLOW_COMPARISON
);
8810 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8813 /* For non-equal bases we can simplify if they are addresses
8814 of local binding decls or constants. */
8815 else if (indirect_base0
&& indirect_base1
8816 /* We know that !operand_equal_p (base0, base1, 0)
8817 because the if condition was false. But make
8818 sure two decls are not the same. */
8820 && TREE_CODE (arg0
) == ADDR_EXPR
8821 && TREE_CODE (arg1
) == ADDR_EXPR
8822 && (((TREE_CODE (base0
) == VAR_DECL
8823 || TREE_CODE (base0
) == PARM_DECL
)
8824 && (targetm
.binds_local_p (base0
)
8825 || CONSTANT_CLASS_P (base1
)))
8826 || CONSTANT_CLASS_P (base0
))
8827 && (((TREE_CODE (base1
) == VAR_DECL
8828 || TREE_CODE (base1
) == PARM_DECL
)
8829 && (targetm
.binds_local_p (base1
)
8830 || CONSTANT_CLASS_P (base0
)))
8831 || CONSTANT_CLASS_P (base1
)))
8833 if (code
== EQ_EXPR
)
8834 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8836 else if (code
== NE_EXPR
)
8837 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8840 /* For equal offsets we can simplify to a comparison of the
8842 else if (bitpos0
== bitpos1
8844 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8846 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8847 && ((offset0
== offset1
)
8848 || (offset0
&& offset1
8849 && operand_equal_p (offset0
, offset1
, 0))))
8852 base0
= build_fold_addr_expr_loc (loc
, base0
);
8854 base1
= build_fold_addr_expr_loc (loc
, base1
);
8855 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8859 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8860 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8861 the resulting offset is smaller in absolute value than the
8862 original one and has the same sign. */
8863 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8864 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8865 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8866 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8867 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8868 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8869 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8871 tree const1
= TREE_OPERAND (arg0
, 1);
8872 tree const2
= TREE_OPERAND (arg1
, 1);
8873 tree variable1
= TREE_OPERAND (arg0
, 0);
8874 tree variable2
= TREE_OPERAND (arg1
, 0);
8876 const char * const warnmsg
= G_("assuming signed overflow does not "
8877 "occur when combining constants around "
8880 /* Put the constant on the side where it doesn't overflow and is
8881 of lower absolute value and of same sign than before. */
8882 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8883 ? MINUS_EXPR
: PLUS_EXPR
,
8885 if (!TREE_OVERFLOW (cst
)
8886 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8887 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8889 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8890 return fold_build2_loc (loc
, code
, type
,
8892 fold_build2_loc (loc
, TREE_CODE (arg1
),
8897 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8898 ? MINUS_EXPR
: PLUS_EXPR
,
8900 if (!TREE_OVERFLOW (cst
)
8901 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8902 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8904 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8905 return fold_build2_loc (loc
, code
, type
,
8906 fold_build2_loc (loc
, TREE_CODE (arg0
),
8913 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8914 signed arithmetic case. That form is created by the compiler
8915 often enough for folding it to be of value. One example is in
8916 computing loop trip counts after Operator Strength Reduction. */
8917 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8918 && TREE_CODE (arg0
) == MULT_EXPR
8919 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8920 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8921 && integer_zerop (arg1
))
8923 tree const1
= TREE_OPERAND (arg0
, 1);
8924 tree const2
= arg1
; /* zero */
8925 tree variable1
= TREE_OPERAND (arg0
, 0);
8926 enum tree_code cmp_code
= code
;
8928 /* Handle unfolded multiplication by zero. */
8929 if (integer_zerop (const1
))
8930 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
8932 fold_overflow_warning (("assuming signed overflow does not occur when "
8933 "eliminating multiplication in comparison "
8935 WARN_STRICT_OVERFLOW_COMPARISON
);
8937 /* If const1 is negative we swap the sense of the comparison. */
8938 if (tree_int_cst_sgn (const1
) < 0)
8939 cmp_code
= swap_tree_comparison (cmp_code
);
8941 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
8944 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8948 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8950 tree targ0
= strip_float_extensions (arg0
);
8951 tree targ1
= strip_float_extensions (arg1
);
8952 tree newtype
= TREE_TYPE (targ0
);
8954 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8955 newtype
= TREE_TYPE (targ1
);
8957 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8958 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8959 return fold_build2_loc (loc
, code
, type
,
8960 fold_convert_loc (loc
, newtype
, targ0
),
8961 fold_convert_loc (loc
, newtype
, targ1
));
8963 /* (-a) CMP (-b) -> b CMP a */
8964 if (TREE_CODE (arg0
) == NEGATE_EXPR
8965 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8966 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
8967 TREE_OPERAND (arg0
, 0));
8969 if (TREE_CODE (arg1
) == REAL_CST
)
8971 REAL_VALUE_TYPE cst
;
8972 cst
= TREE_REAL_CST (arg1
);
8974 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8975 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8976 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
8977 TREE_OPERAND (arg0
, 0),
8978 build_real (TREE_TYPE (arg1
),
8979 real_value_negate (&cst
)));
8981 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8982 /* a CMP (-0) -> a CMP 0 */
8983 if (REAL_VALUE_MINUS_ZERO (cst
))
8984 return fold_build2_loc (loc
, code
, type
, arg0
,
8985 build_real (TREE_TYPE (arg1
), dconst0
));
8987 /* x != NaN is always true, other ops are always false. */
8988 if (REAL_VALUE_ISNAN (cst
)
8989 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8991 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8992 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
8995 /* Fold comparisons against infinity. */
8996 if (REAL_VALUE_ISINF (cst
)
8997 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
8999 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9000 if (tem
!= NULL_TREE
)
9005 /* If this is a comparison of a real constant with a PLUS_EXPR
9006 or a MINUS_EXPR of a real constant, we can convert it into a
9007 comparison with a revised real constant as long as no overflow
9008 occurs when unsafe_math_optimizations are enabled. */
9009 if (flag_unsafe_math_optimizations
9010 && TREE_CODE (arg1
) == REAL_CST
9011 && (TREE_CODE (arg0
) == PLUS_EXPR
9012 || TREE_CODE (arg0
) == MINUS_EXPR
)
9013 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9014 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9015 ? MINUS_EXPR
: PLUS_EXPR
,
9016 arg1
, TREE_OPERAND (arg0
, 1)))
9017 && !TREE_OVERFLOW (tem
))
9018 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9020 /* Likewise, we can simplify a comparison of a real constant with
9021 a MINUS_EXPR whose first operand is also a real constant, i.e.
9022 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9023 floating-point types only if -fassociative-math is set. */
9024 if (flag_associative_math
9025 && TREE_CODE (arg1
) == REAL_CST
9026 && TREE_CODE (arg0
) == MINUS_EXPR
9027 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9028 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9030 && !TREE_OVERFLOW (tem
))
9031 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9032 TREE_OPERAND (arg0
, 1), tem
);
9034 /* Fold comparisons against built-in math functions. */
9035 if (TREE_CODE (arg1
) == REAL_CST
9036 && flag_unsafe_math_optimizations
9037 && ! flag_errno_math
)
9039 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9041 if (fcode
!= END_BUILTINS
)
9043 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9044 if (tem
!= NULL_TREE
)
9050 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9051 && CONVERT_EXPR_P (arg0
))
9053 /* If we are widening one operand of an integer comparison,
9054 see if the other operand is similarly being widened. Perhaps we
9055 can do the comparison in the narrower type. */
9056 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9060 /* Or if we are changing signedness. */
9061 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9066 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9067 constant, we can simplify it. */
9068 if (TREE_CODE (arg1
) == INTEGER_CST
9069 && (TREE_CODE (arg0
) == MIN_EXPR
9070 || TREE_CODE (arg0
) == MAX_EXPR
)
9071 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9073 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9078 /* Simplify comparison of something with itself. (For IEEE
9079 floating-point, we can only do some of these simplifications.) */
9080 if (operand_equal_p (arg0
, arg1
, 0))
9085 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9086 || ! HONOR_NANS (element_mode (arg0
)))
9087 return constant_boolean_node (1, type
);
9092 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9093 || ! HONOR_NANS (element_mode (arg0
)))
9094 return constant_boolean_node (1, type
);
9095 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9098 /* For NE, we can only do this simplification if integer
9099 or we don't honor IEEE floating point NaNs. */
9100 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9101 && HONOR_NANS (element_mode (arg0
)))
9103 /* ... fall through ... */
9106 return constant_boolean_node (0, type
);
9112 /* If we are comparing an expression that just has comparisons
9113 of two integer values, arithmetic expressions of those comparisons,
9114 and constants, we can simplify it. There are only three cases
9115 to check: the two values can either be equal, the first can be
9116 greater, or the second can be greater. Fold the expression for
9117 those three values. Since each value must be 0 or 1, we have
9118 eight possibilities, each of which corresponds to the constant 0
9119 or 1 or one of the six possible comparisons.
9121 This handles common cases like (a > b) == 0 but also handles
9122 expressions like ((x > y) - (y > x)) > 0, which supposedly
9123 occur in macroized code. */
9125 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9127 tree cval1
= 0, cval2
= 0;
9130 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9131 /* Don't handle degenerate cases here; they should already
9132 have been handled anyway. */
9133 && cval1
!= 0 && cval2
!= 0
9134 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9135 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9136 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9137 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9138 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9139 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9140 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9142 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9143 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9145 /* We can't just pass T to eval_subst in case cval1 or cval2
9146 was the same as ARG1. */
9149 = fold_build2_loc (loc
, code
, type
,
9150 eval_subst (loc
, arg0
, cval1
, maxval
,
9154 = fold_build2_loc (loc
, code
, type
,
9155 eval_subst (loc
, arg0
, cval1
, maxval
,
9159 = fold_build2_loc (loc
, code
, type
,
9160 eval_subst (loc
, arg0
, cval1
, minval
,
9164 /* All three of these results should be 0 or 1. Confirm they are.
9165 Then use those values to select the proper code to use. */
9167 if (TREE_CODE (high_result
) == INTEGER_CST
9168 && TREE_CODE (equal_result
) == INTEGER_CST
9169 && TREE_CODE (low_result
) == INTEGER_CST
)
9171 /* Make a 3-bit mask with the high-order bit being the
9172 value for `>', the next for '=', and the low for '<'. */
9173 switch ((integer_onep (high_result
) * 4)
9174 + (integer_onep (equal_result
) * 2)
9175 + integer_onep (low_result
))
9179 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9200 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9205 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9206 SET_EXPR_LOCATION (tem
, loc
);
9209 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9214 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9215 into a single range test. */
9216 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9217 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9218 && TREE_CODE (arg1
) == INTEGER_CST
9219 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9220 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9221 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9222 && !TREE_OVERFLOW (arg1
))
9224 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9225 if (tem
!= NULL_TREE
)
9229 /* Fold ~X op ~Y as Y op X. */
9230 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9231 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9233 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9234 return fold_build2_loc (loc
, code
, type
,
9235 fold_convert_loc (loc
, cmp_type
,
9236 TREE_OPERAND (arg1
, 0)),
9237 TREE_OPERAND (arg0
, 0));
9240 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9241 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9242 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9244 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9245 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9246 TREE_OPERAND (arg0
, 0),
9247 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9248 fold_convert_loc (loc
, cmp_type
, arg1
)));
9255 /* Subroutine of fold_binary. Optimize complex multiplications of the
9256 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9257 argument EXPR represents the expression "z" of type TYPE. */
9260 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9262 tree itype
= TREE_TYPE (type
);
9263 tree rpart
, ipart
, tem
;
9265 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9267 rpart
= TREE_OPERAND (expr
, 0);
9268 ipart
= TREE_OPERAND (expr
, 1);
9270 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9272 rpart
= TREE_REALPART (expr
);
9273 ipart
= TREE_IMAGPART (expr
);
9277 expr
= save_expr (expr
);
9278 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9279 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9282 rpart
= save_expr (rpart
);
9283 ipart
= save_expr (ipart
);
9284 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9285 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9286 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9287 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9288 build_zero_cst (itype
));
9292 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9293 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9294 guarantees that P and N have the same least significant log2(M) bits.
9295 N is not otherwise constrained. In particular, N is not normalized to
9296 0 <= N < M as is common. In general, the precise value of P is unknown.
9297 M is chosen as large as possible such that constant N can be determined.
9299 Returns M and sets *RESIDUE to N.
9301 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9302 account. This is not always possible due to PR 35705.
9305 static unsigned HOST_WIDE_INT
9306 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9307 bool allow_func_align
)
9309 enum tree_code code
;
9313 code
= TREE_CODE (expr
);
9314 if (code
== ADDR_EXPR
)
9316 unsigned int bitalign
;
9317 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9318 *residue
/= BITS_PER_UNIT
;
9319 return bitalign
/ BITS_PER_UNIT
;
9321 else if (code
== POINTER_PLUS_EXPR
)
9324 unsigned HOST_WIDE_INT modulus
;
9325 enum tree_code inner_code
;
9327 op0
= TREE_OPERAND (expr
, 0);
9329 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9332 op1
= TREE_OPERAND (expr
, 1);
9334 inner_code
= TREE_CODE (op1
);
9335 if (inner_code
== INTEGER_CST
)
9337 *residue
+= TREE_INT_CST_LOW (op1
);
9340 else if (inner_code
== MULT_EXPR
)
9342 op1
= TREE_OPERAND (op1
, 1);
9343 if (TREE_CODE (op1
) == INTEGER_CST
)
9345 unsigned HOST_WIDE_INT align
;
9347 /* Compute the greatest power-of-2 divisor of op1. */
9348 align
= TREE_INT_CST_LOW (op1
);
9351 /* If align is non-zero and less than *modulus, replace
9352 *modulus with align., If align is 0, then either op1 is 0
9353 or the greatest power-of-2 divisor of op1 doesn't fit in an
9354 unsigned HOST_WIDE_INT. In either case, no additional
9355 constraint is imposed. */
9357 modulus
= MIN (modulus
, align
);
9364 /* If we get here, we were unable to determine anything useful about the
9369 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9370 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9373 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9375 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9377 if (TREE_CODE (arg
) == VECTOR_CST
)
9379 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9380 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9382 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9384 constructor_elt
*elt
;
9386 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9387 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9390 elts
[i
] = elt
->value
;
9394 for (; i
< nelts
; i
++)
9396 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9400 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9401 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9402 NULL_TREE otherwise. */
9405 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9407 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9409 bool need_ctor
= false;
9411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9412 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9413 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9414 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9417 elts
= XALLOCAVEC (tree
, nelts
* 3);
9418 if (!vec_cst_ctor_to_array (arg0
, elts
)
9419 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9422 for (i
= 0; i
< nelts
; i
++)
9424 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9426 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9431 vec
<constructor_elt
, va_gc
> *v
;
9432 vec_alloc (v
, nelts
);
9433 for (i
= 0; i
< nelts
; i
++)
9434 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9435 return build_constructor (type
, v
);
9438 return build_vector (type
, &elts
[2 * nelts
]);
9441 /* Try to fold a pointer difference of type TYPE two address expressions of
9442 array references AREF0 and AREF1 using location LOC. Return a
9443 simplified expression for the difference or NULL_TREE. */
9446 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9447 tree aref0
, tree aref1
)
9449 tree base0
= TREE_OPERAND (aref0
, 0);
9450 tree base1
= TREE_OPERAND (aref1
, 0);
9451 tree base_offset
= build_int_cst (type
, 0);
9453 /* If the bases are array references as well, recurse. If the bases
9454 are pointer indirections compute the difference of the pointers.
9455 If the bases are equal, we are set. */
9456 if ((TREE_CODE (base0
) == ARRAY_REF
9457 && TREE_CODE (base1
) == ARRAY_REF
9459 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9460 || (INDIRECT_REF_P (base0
)
9461 && INDIRECT_REF_P (base1
)
9462 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9463 TREE_OPERAND (base0
, 0),
9464 TREE_OPERAND (base1
, 0))))
9465 || operand_equal_p (base0
, base1
, 0))
9467 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9468 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9469 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9470 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9471 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9473 fold_build2_loc (loc
, MULT_EXPR
, type
,
9479 /* If the real or vector real constant CST of type TYPE has an exact
9480 inverse, return it, else return NULL. */
9483 exact_inverse (tree type
, tree cst
)
9486 tree unit_type
, *elts
;
9488 unsigned vec_nelts
, i
;
9490 switch (TREE_CODE (cst
))
9493 r
= TREE_REAL_CST (cst
);
9495 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9496 return build_real (type
, r
);
9501 vec_nelts
= VECTOR_CST_NELTS (cst
);
9502 elts
= XALLOCAVEC (tree
, vec_nelts
);
9503 unit_type
= TREE_TYPE (type
);
9504 mode
= TYPE_MODE (unit_type
);
9506 for (i
= 0; i
< vec_nelts
; i
++)
9508 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9509 if (!exact_real_inverse (mode
, &r
))
9511 elts
[i
] = build_real (unit_type
, r
);
9514 return build_vector (type
, elts
);
9521 /* Mask out the tz least significant bits of X of type TYPE where
9522 tz is the number of trailing zeroes in Y. */
9524 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9526 int tz
= wi::ctz (y
);
9528 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9532 /* Return true when T is an address and is known to be nonzero.
9533 For floating point we further ensure that T is not denormal.
9534 Similar logic is present in nonzero_address in rtlanal.h.
9536 If the return value is based on the assumption that signed overflow
9537 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9538 change *STRICT_OVERFLOW_P. */
9541 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9543 tree type
= TREE_TYPE (t
);
9544 enum tree_code code
;
9546 /* Doing something useful for floating point would need more work. */
9547 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9550 code
= TREE_CODE (t
);
9551 switch (TREE_CODE_CLASS (code
))
9554 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9557 case tcc_comparison
:
9558 return tree_binary_nonzero_warnv_p (code
, type
,
9559 TREE_OPERAND (t
, 0),
9560 TREE_OPERAND (t
, 1),
9563 case tcc_declaration
:
9565 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9573 case TRUTH_NOT_EXPR
:
9574 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9577 case TRUTH_AND_EXPR
:
9579 case TRUTH_XOR_EXPR
:
9580 return tree_binary_nonzero_warnv_p (code
, type
,
9581 TREE_OPERAND (t
, 0),
9582 TREE_OPERAND (t
, 1),
9590 case WITH_SIZE_EXPR
:
9592 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9597 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9601 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9606 tree fndecl
= get_callee_fndecl (t
);
9607 if (!fndecl
) return false;
9608 if (flag_delete_null_pointer_checks
&& !flag_check_new
9609 && DECL_IS_OPERATOR_NEW (fndecl
)
9610 && !TREE_NOTHROW (fndecl
))
9612 if (flag_delete_null_pointer_checks
9613 && lookup_attribute ("returns_nonnull",
9614 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9616 return alloca_call_p (t
);
9625 /* Return true when T is an address and is known to be nonzero.
9626 Handle warnings about undefined signed overflow. */
9629 tree_expr_nonzero_p (tree t
)
9631 bool ret
, strict_overflow_p
;
9633 strict_overflow_p
= false;
9634 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9635 if (strict_overflow_p
)
9636 fold_overflow_warning (("assuming signed overflow does not occur when "
9637 "determining that expression is always "
9639 WARN_STRICT_OVERFLOW_MISC
);
9643 /* Fold a binary expression of code CODE and type TYPE with operands
9644 OP0 and OP1. LOC is the location of the resulting expression.
9645 Return the folded expression if folding is successful. Otherwise,
9646 return NULL_TREE. */
9649 fold_binary_loc (location_t loc
,
9650 enum tree_code code
, tree type
, tree op0
, tree op1
)
9652 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9653 tree arg0
, arg1
, tem
;
9654 tree t1
= NULL_TREE
;
9655 bool strict_overflow_p
;
9658 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9659 && TREE_CODE_LENGTH (code
) == 2
9661 && op1
!= NULL_TREE
);
9666 /* Strip any conversions that don't change the mode. This is
9667 safe for every expression, except for a comparison expression
9668 because its signedness is derived from its operands. So, in
9669 the latter case, only strip conversions that don't change the
9670 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9673 Note that this is done as an internal manipulation within the
9674 constant folder, in order to find the simplest representation
9675 of the arguments so that their form can be studied. In any
9676 cases, the appropriate type conversions should be put back in
9677 the tree that will get out of the constant folder. */
9679 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9681 STRIP_SIGN_NOPS (arg0
);
9682 STRIP_SIGN_NOPS (arg1
);
9690 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9691 constant but we can't do arithmetic on them. */
9692 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9693 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9694 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9695 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9696 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9697 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9698 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9700 if (kind
== tcc_binary
)
9702 /* Make sure type and arg0 have the same saturating flag. */
9703 gcc_assert (TYPE_SATURATING (type
)
9704 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9705 tem
= const_binop (code
, arg0
, arg1
);
9707 else if (kind
== tcc_comparison
)
9708 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9712 if (tem
!= NULL_TREE
)
9714 if (TREE_TYPE (tem
) != type
)
9715 tem
= fold_convert_loc (loc
, type
, tem
);
9720 /* If this is a commutative operation, and ARG0 is a constant, move it
9721 to ARG1 to reduce the number of tests below. */
9722 if (commutative_tree_code (code
)
9723 && tree_swap_operands_p (arg0
, arg1
, true))
9724 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9726 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9727 to ARG1 to reduce the number of tests below. */
9728 if (kind
== tcc_comparison
9729 && tree_swap_operands_p (arg0
, arg1
, true))
9730 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9732 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9736 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9738 First check for cases where an arithmetic operation is applied to a
9739 compound, conditional, or comparison operation. Push the arithmetic
9740 operation inside the compound or conditional to see if any folding
9741 can then be done. Convert comparison to conditional for this purpose.
9742 The also optimizes non-constant cases that used to be done in
9745 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9746 one of the operands is a comparison and the other is a comparison, a
9747 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9748 code below would make the expression more complex. Change it to a
9749 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9750 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9752 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9753 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9754 && TREE_CODE (type
) != VECTOR_TYPE
9755 && ((truth_value_p (TREE_CODE (arg0
))
9756 && (truth_value_p (TREE_CODE (arg1
))
9757 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9758 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9759 || (truth_value_p (TREE_CODE (arg1
))
9760 && (truth_value_p (TREE_CODE (arg0
))
9761 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9762 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9764 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9765 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9768 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9769 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9771 if (code
== EQ_EXPR
)
9772 tem
= invert_truthvalue_loc (loc
, tem
);
9774 return fold_convert_loc (loc
, type
, tem
);
9777 if (TREE_CODE_CLASS (code
) == tcc_binary
9778 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9780 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9782 tem
= fold_build2_loc (loc
, code
, type
,
9783 fold_convert_loc (loc
, TREE_TYPE (op0
),
9784 TREE_OPERAND (arg0
, 1)), op1
);
9785 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9788 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9789 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9791 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9792 fold_convert_loc (loc
, TREE_TYPE (op1
),
9793 TREE_OPERAND (arg1
, 1)));
9794 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9798 if (TREE_CODE (arg0
) == COND_EXPR
9799 || TREE_CODE (arg0
) == VEC_COND_EXPR
9800 || COMPARISON_CLASS_P (arg0
))
9802 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9804 /*cond_first_p=*/1);
9805 if (tem
!= NULL_TREE
)
9809 if (TREE_CODE (arg1
) == COND_EXPR
9810 || TREE_CODE (arg1
) == VEC_COND_EXPR
9811 || COMPARISON_CLASS_P (arg1
))
9813 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9815 /*cond_first_p=*/0);
9816 if (tem
!= NULL_TREE
)
9824 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9825 if (TREE_CODE (arg0
) == ADDR_EXPR
9826 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9828 tree iref
= TREE_OPERAND (arg0
, 0);
9829 return fold_build2 (MEM_REF
, type
,
9830 TREE_OPERAND (iref
, 0),
9831 int_const_binop (PLUS_EXPR
, arg1
,
9832 TREE_OPERAND (iref
, 1)));
9835 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9836 if (TREE_CODE (arg0
) == ADDR_EXPR
9837 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9840 HOST_WIDE_INT coffset
;
9841 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9845 return fold_build2 (MEM_REF
, type
,
9846 build_fold_addr_expr (base
),
9847 int_const_binop (PLUS_EXPR
, arg1
,
9848 size_int (coffset
)));
9853 case POINTER_PLUS_EXPR
:
9854 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9855 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9856 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9857 return fold_convert_loc (loc
, type
,
9858 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9859 fold_convert_loc (loc
, sizetype
,
9861 fold_convert_loc (loc
, sizetype
,
9864 /* PTR_CST +p CST -> CST1 */
9865 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9866 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9867 fold_convert_loc (loc
, type
, arg1
));
9872 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9874 /* X + (X / CST) * -CST is X % CST. */
9875 if (TREE_CODE (arg1
) == MULT_EXPR
9876 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9877 && operand_equal_p (arg0
,
9878 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9880 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9881 tree cst1
= TREE_OPERAND (arg1
, 1);
9882 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9884 if (sum
&& integer_zerop (sum
))
9885 return fold_convert_loc (loc
, type
,
9886 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9887 TREE_TYPE (arg0
), arg0
,
9892 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9893 one. Make sure the type is not saturating and has the signedness of
9894 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9895 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9896 if ((TREE_CODE (arg0
) == MULT_EXPR
9897 || TREE_CODE (arg1
) == MULT_EXPR
)
9898 && !TYPE_SATURATING (type
)
9899 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9900 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9901 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9903 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9908 if (! FLOAT_TYPE_P (type
))
9910 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9911 with a constant, and the two constants have no bits in common,
9912 we should treat this as a BIT_IOR_EXPR since this may produce more
9914 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9915 && TREE_CODE (arg1
) == BIT_AND_EXPR
9916 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9917 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9918 && wi::bit_and (TREE_OPERAND (arg0
, 1),
9919 TREE_OPERAND (arg1
, 1)) == 0)
9921 code
= BIT_IOR_EXPR
;
9925 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9926 (plus (plus (mult) (mult)) (foo)) so that we can
9927 take advantage of the factoring cases below. */
9928 if (TYPE_OVERFLOW_WRAPS (type
)
9929 && (((TREE_CODE (arg0
) == PLUS_EXPR
9930 || TREE_CODE (arg0
) == MINUS_EXPR
)
9931 && TREE_CODE (arg1
) == MULT_EXPR
)
9932 || ((TREE_CODE (arg1
) == PLUS_EXPR
9933 || TREE_CODE (arg1
) == MINUS_EXPR
)
9934 && TREE_CODE (arg0
) == MULT_EXPR
)))
9936 tree parg0
, parg1
, parg
, marg
;
9937 enum tree_code pcode
;
9939 if (TREE_CODE (arg1
) == MULT_EXPR
)
9940 parg
= arg0
, marg
= arg1
;
9942 parg
= arg1
, marg
= arg0
;
9943 pcode
= TREE_CODE (parg
);
9944 parg0
= TREE_OPERAND (parg
, 0);
9945 parg1
= TREE_OPERAND (parg
, 1);
9949 if (TREE_CODE (parg0
) == MULT_EXPR
9950 && TREE_CODE (parg1
) != MULT_EXPR
)
9951 return fold_build2_loc (loc
, pcode
, type
,
9952 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9953 fold_convert_loc (loc
, type
,
9955 fold_convert_loc (loc
, type
,
9957 fold_convert_loc (loc
, type
, parg1
));
9958 if (TREE_CODE (parg0
) != MULT_EXPR
9959 && TREE_CODE (parg1
) == MULT_EXPR
)
9961 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9962 fold_convert_loc (loc
, type
, parg0
),
9963 fold_build2_loc (loc
, pcode
, type
,
9964 fold_convert_loc (loc
, type
, marg
),
9965 fold_convert_loc (loc
, type
,
9971 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9972 to __complex__ ( x, y ). This is not the same for SNaNs or
9973 if signed zeros are involved. */
9974 if (!HONOR_SNANS (element_mode (arg0
))
9975 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9976 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9978 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9979 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9980 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9981 bool arg0rz
= false, arg0iz
= false;
9982 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9983 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9985 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9986 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9987 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9989 tree rp
= arg1r
? arg1r
9990 : build1 (REALPART_EXPR
, rtype
, arg1
);
9991 tree ip
= arg0i
? arg0i
9992 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9993 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9995 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9997 tree rp
= arg0r
? arg0r
9998 : build1 (REALPART_EXPR
, rtype
, arg0
);
9999 tree ip
= arg1i
? arg1i
10000 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10001 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10006 if (flag_unsafe_math_optimizations
10007 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10008 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10009 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10012 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10013 We associate floats only if the user has specified
10014 -fassociative-math. */
10015 if (flag_associative_math
10016 && TREE_CODE (arg1
) == PLUS_EXPR
10017 && TREE_CODE (arg0
) != MULT_EXPR
)
10019 tree tree10
= TREE_OPERAND (arg1
, 0);
10020 tree tree11
= TREE_OPERAND (arg1
, 1);
10021 if (TREE_CODE (tree11
) == MULT_EXPR
10022 && TREE_CODE (tree10
) == MULT_EXPR
)
10025 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10026 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10029 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10030 We associate floats only if the user has specified
10031 -fassociative-math. */
10032 if (flag_associative_math
10033 && TREE_CODE (arg0
) == PLUS_EXPR
10034 && TREE_CODE (arg1
) != MULT_EXPR
)
10036 tree tree00
= TREE_OPERAND (arg0
, 0);
10037 tree tree01
= TREE_OPERAND (arg0
, 1);
10038 if (TREE_CODE (tree01
) == MULT_EXPR
10039 && TREE_CODE (tree00
) == MULT_EXPR
)
10042 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10043 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10049 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10050 is a rotate of A by C1 bits. */
10051 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10052 is a rotate of A by B bits. */
10054 enum tree_code code0
, code1
;
10056 code0
= TREE_CODE (arg0
);
10057 code1
= TREE_CODE (arg1
);
10058 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10059 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10060 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10061 TREE_OPERAND (arg1
, 0), 0)
10062 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10063 TYPE_UNSIGNED (rtype
))
10064 /* Only create rotates in complete modes. Other cases are not
10065 expanded properly. */
10066 && (element_precision (rtype
)
10067 == element_precision (TYPE_MODE (rtype
))))
10069 tree tree01
, tree11
;
10070 enum tree_code code01
, code11
;
10072 tree01
= TREE_OPERAND (arg0
, 1);
10073 tree11
= TREE_OPERAND (arg1
, 1);
10074 STRIP_NOPS (tree01
);
10075 STRIP_NOPS (tree11
);
10076 code01
= TREE_CODE (tree01
);
10077 code11
= TREE_CODE (tree11
);
10078 if (code01
== INTEGER_CST
10079 && code11
== INTEGER_CST
10080 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10081 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10083 tem
= build2_loc (loc
, LROTATE_EXPR
,
10084 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10085 TREE_OPERAND (arg0
, 0),
10086 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10087 return fold_convert_loc (loc
, type
, tem
);
10089 else if (code11
== MINUS_EXPR
)
10091 tree tree110
, tree111
;
10092 tree110
= TREE_OPERAND (tree11
, 0);
10093 tree111
= TREE_OPERAND (tree11
, 1);
10094 STRIP_NOPS (tree110
);
10095 STRIP_NOPS (tree111
);
10096 if (TREE_CODE (tree110
) == INTEGER_CST
10097 && 0 == compare_tree_int (tree110
,
10099 (TREE_TYPE (TREE_OPERAND
10101 && operand_equal_p (tree01
, tree111
, 0))
10103 fold_convert_loc (loc
, type
,
10104 build2 ((code0
== LSHIFT_EXPR
10107 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10108 TREE_OPERAND (arg0
, 0), tree01
));
10110 else if (code01
== MINUS_EXPR
)
10112 tree tree010
, tree011
;
10113 tree010
= TREE_OPERAND (tree01
, 0);
10114 tree011
= TREE_OPERAND (tree01
, 1);
10115 STRIP_NOPS (tree010
);
10116 STRIP_NOPS (tree011
);
10117 if (TREE_CODE (tree010
) == INTEGER_CST
10118 && 0 == compare_tree_int (tree010
,
10120 (TREE_TYPE (TREE_OPERAND
10122 && operand_equal_p (tree11
, tree011
, 0))
10123 return fold_convert_loc
10125 build2 ((code0
!= LSHIFT_EXPR
10128 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10129 TREE_OPERAND (arg0
, 0), tree11
));
10135 /* In most languages, can't associate operations on floats through
10136 parentheses. Rather than remember where the parentheses were, we
10137 don't associate floats at all, unless the user has specified
10138 -fassociative-math.
10139 And, we need to make sure type is not saturating. */
10141 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10142 && !TYPE_SATURATING (type
))
10144 tree var0
, con0
, lit0
, minus_lit0
;
10145 tree var1
, con1
, lit1
, minus_lit1
;
10149 /* Split both trees into variables, constants, and literals. Then
10150 associate each group together, the constants with literals,
10151 then the result with variables. This increases the chances of
10152 literals being recombined later and of generating relocatable
10153 expressions for the sum of a constant and literal. */
10154 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10155 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10156 code
== MINUS_EXPR
);
10158 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10159 if (code
== MINUS_EXPR
)
10162 /* With undefined overflow prefer doing association in a type
10163 which wraps on overflow, if that is one of the operand types. */
10164 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10165 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10167 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10168 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10169 atype
= TREE_TYPE (arg0
);
10170 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10171 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10172 atype
= TREE_TYPE (arg1
);
10173 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10176 /* With undefined overflow we can only associate constants with one
10177 variable, and constants whose association doesn't overflow. */
10178 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10179 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10186 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10187 tmp0
= TREE_OPERAND (tmp0
, 0);
10188 if (CONVERT_EXPR_P (tmp0
)
10189 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10190 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10191 <= TYPE_PRECISION (atype
)))
10192 tmp0
= TREE_OPERAND (tmp0
, 0);
10193 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10194 tmp1
= TREE_OPERAND (tmp1
, 0);
10195 if (CONVERT_EXPR_P (tmp1
)
10196 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10197 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10198 <= TYPE_PRECISION (atype
)))
10199 tmp1
= TREE_OPERAND (tmp1
, 0);
10200 /* The only case we can still associate with two variables
10201 is if they are the same, modulo negation and bit-pattern
10202 preserving conversions. */
10203 if (!operand_equal_p (tmp0
, tmp1
, 0))
10208 /* Only do something if we found more than two objects. Otherwise,
10209 nothing has changed and we risk infinite recursion. */
10211 && (2 < ((var0
!= 0) + (var1
!= 0)
10212 + (con0
!= 0) + (con1
!= 0)
10213 + (lit0
!= 0) + (lit1
!= 0)
10214 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10216 bool any_overflows
= false;
10217 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10218 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10219 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10220 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10221 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10222 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10223 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10224 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10227 /* Preserve the MINUS_EXPR if the negative part of the literal is
10228 greater than the positive part. Otherwise, the multiplicative
10229 folding code (i.e extract_muldiv) may be fooled in case
10230 unsigned constants are subtracted, like in the following
10231 example: ((X*2 + 4) - 8U)/2. */
10232 if (minus_lit0
&& lit0
)
10234 if (TREE_CODE (lit0
) == INTEGER_CST
10235 && TREE_CODE (minus_lit0
) == INTEGER_CST
10236 && tree_int_cst_lt (lit0
, minus_lit0
))
10238 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10239 MINUS_EXPR
, atype
);
10244 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10245 MINUS_EXPR
, atype
);
10250 /* Don't introduce overflows through reassociation. */
10252 && ((lit0
&& TREE_OVERFLOW (lit0
))
10253 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10260 fold_convert_loc (loc
, type
,
10261 associate_trees (loc
, var0
, minus_lit0
,
10262 MINUS_EXPR
, atype
));
10265 con0
= associate_trees (loc
, con0
, minus_lit0
,
10266 MINUS_EXPR
, atype
);
10268 fold_convert_loc (loc
, type
,
10269 associate_trees (loc
, var0
, con0
,
10270 PLUS_EXPR
, atype
));
10274 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10276 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10284 /* Pointer simplifications for subtraction, simple reassociations. */
10285 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10287 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10288 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10289 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10291 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10292 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10293 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10294 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10295 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10296 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10298 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10301 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10302 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10304 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10305 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10306 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10307 fold_convert_loc (loc
, type
, arg1
));
10309 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10311 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10313 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10315 tree arg10
= fold_convert_loc (loc
, type
,
10316 TREE_OPERAND (arg1
, 0));
10317 tree arg11
= fold_convert_loc (loc
, type
,
10318 TREE_OPERAND (arg1
, 1));
10319 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10320 fold_convert_loc (loc
, type
, arg0
),
10323 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10326 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10327 if (TREE_CODE (arg0
) == NEGATE_EXPR
10328 && negate_expr_p (arg1
)
10329 && reorder_operands_p (arg0
, arg1
))
10330 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10331 fold_convert_loc (loc
, type
,
10332 negate_expr (arg1
)),
10333 fold_convert_loc (loc
, type
,
10334 TREE_OPERAND (arg0
, 0)));
10335 /* Convert -A - 1 to ~A. */
10336 if (TREE_CODE (arg0
) == NEGATE_EXPR
10337 && integer_each_onep (arg1
)
10338 && !TYPE_OVERFLOW_TRAPS (type
))
10339 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10340 fold_convert_loc (loc
, type
,
10341 TREE_OPERAND (arg0
, 0)));
10343 /* Convert -1 - A to ~A. */
10344 if (TREE_CODE (type
) != COMPLEX_TYPE
10345 && integer_all_onesp (arg0
))
10346 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10349 /* X - (X / Y) * Y is X % Y. */
10350 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10351 && TREE_CODE (arg1
) == MULT_EXPR
10352 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10353 && operand_equal_p (arg0
,
10354 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10355 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10356 TREE_OPERAND (arg1
, 1), 0))
10358 fold_convert_loc (loc
, type
,
10359 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10360 arg0
, TREE_OPERAND (arg1
, 1)));
10362 if (! FLOAT_TYPE_P (type
))
10364 /* Fold A - (A & B) into ~B & A. */
10365 if (!TREE_SIDE_EFFECTS (arg0
)
10366 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10368 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10370 tree arg10
= fold_convert_loc (loc
, type
,
10371 TREE_OPERAND (arg1
, 0));
10372 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10373 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10375 fold_convert_loc (loc
, type
, arg0
));
10377 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10379 tree arg11
= fold_convert_loc (loc
,
10380 type
, TREE_OPERAND (arg1
, 1));
10381 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10382 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10384 fold_convert_loc (loc
, type
, arg0
));
10388 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10389 any power of 2 minus 1. */
10390 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10391 && TREE_CODE (arg1
) == BIT_AND_EXPR
10392 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10393 TREE_OPERAND (arg1
, 0), 0))
10395 tree mask0
= TREE_OPERAND (arg0
, 1);
10396 tree mask1
= TREE_OPERAND (arg1
, 1);
10397 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10399 if (operand_equal_p (tem
, mask1
, 0))
10401 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10402 TREE_OPERAND (arg0
, 0), mask1
);
10403 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10408 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10409 __complex__ ( x, -y ). This is not the same for SNaNs or if
10410 signed zeros are involved. */
10411 if (!HONOR_SNANS (element_mode (arg0
))
10412 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10413 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10415 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10416 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10417 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10418 bool arg0rz
= false, arg0iz
= false;
10419 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10420 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10422 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10423 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10424 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10426 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10428 : build1 (REALPART_EXPR
, rtype
, arg1
));
10429 tree ip
= arg0i
? arg0i
10430 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10431 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10433 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10435 tree rp
= arg0r
? arg0r
10436 : build1 (REALPART_EXPR
, rtype
, arg0
);
10437 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10439 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10440 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10445 /* A - B -> A + (-B) if B is easily negatable. */
10446 if (negate_expr_p (arg1
)
10447 && !TYPE_OVERFLOW_SANITIZED (type
)
10448 && ((FLOAT_TYPE_P (type
)
10449 /* Avoid this transformation if B is a positive REAL_CST. */
10450 && (TREE_CODE (arg1
) != REAL_CST
10451 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10452 || INTEGRAL_TYPE_P (type
)))
10453 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10454 fold_convert_loc (loc
, type
, arg0
),
10455 fold_convert_loc (loc
, type
,
10456 negate_expr (arg1
)));
10458 /* Try folding difference of addresses. */
10460 HOST_WIDE_INT diff
;
10462 if ((TREE_CODE (arg0
) == ADDR_EXPR
10463 || TREE_CODE (arg1
) == ADDR_EXPR
)
10464 && ptr_difference_const (arg0
, arg1
, &diff
))
10465 return build_int_cst_type (type
, diff
);
10468 /* Fold &a[i] - &a[j] to i-j. */
10469 if (TREE_CODE (arg0
) == ADDR_EXPR
10470 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10471 && TREE_CODE (arg1
) == ADDR_EXPR
10472 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10474 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10475 TREE_OPERAND (arg0
, 0),
10476 TREE_OPERAND (arg1
, 0));
10481 if (FLOAT_TYPE_P (type
)
10482 && flag_unsafe_math_optimizations
10483 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10484 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10485 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10488 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10489 one. Make sure the type is not saturating and has the signedness of
10490 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10491 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10492 if ((TREE_CODE (arg0
) == MULT_EXPR
10493 || TREE_CODE (arg1
) == MULT_EXPR
)
10494 && !TYPE_SATURATING (type
)
10495 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10496 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10497 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10499 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10507 /* (-A) * (-B) -> A * B */
10508 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10509 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10510 fold_convert_loc (loc
, type
,
10511 TREE_OPERAND (arg0
, 0)),
10512 fold_convert_loc (loc
, type
,
10513 negate_expr (arg1
)));
10514 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10515 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10516 fold_convert_loc (loc
, type
,
10517 negate_expr (arg0
)),
10518 fold_convert_loc (loc
, type
,
10519 TREE_OPERAND (arg1
, 0)));
10521 if (! FLOAT_TYPE_P (type
))
10523 /* Transform x * -C into -x * C if x is easily negatable. */
10524 if (TREE_CODE (arg1
) == INTEGER_CST
10525 && tree_int_cst_sgn (arg1
) == -1
10526 && negate_expr_p (arg0
)
10527 && (tem
= negate_expr (arg1
)) != arg1
10528 && !TREE_OVERFLOW (tem
))
10529 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10530 fold_convert_loc (loc
, type
,
10531 negate_expr (arg0
)),
10534 /* (a * (1 << b)) is (a << b) */
10535 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10536 && integer_onep (TREE_OPERAND (arg1
, 0)))
10537 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10538 TREE_OPERAND (arg1
, 1));
10539 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10540 && integer_onep (TREE_OPERAND (arg0
, 0)))
10541 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10542 TREE_OPERAND (arg0
, 1));
10544 /* (A + A) * C -> A * 2 * C */
10545 if (TREE_CODE (arg0
) == PLUS_EXPR
10546 && TREE_CODE (arg1
) == INTEGER_CST
10547 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10548 TREE_OPERAND (arg0
, 1), 0))
10549 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10550 omit_one_operand_loc (loc
, type
,
10551 TREE_OPERAND (arg0
, 0),
10552 TREE_OPERAND (arg0
, 1)),
10553 fold_build2_loc (loc
, MULT_EXPR
, type
,
10554 build_int_cst (type
, 2) , arg1
));
10556 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10557 sign-changing only. */
10558 if (TREE_CODE (arg1
) == INTEGER_CST
10559 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10560 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10561 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10563 strict_overflow_p
= false;
10564 if (TREE_CODE (arg1
) == INTEGER_CST
10565 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10566 &strict_overflow_p
)))
10568 if (strict_overflow_p
)
10569 fold_overflow_warning (("assuming signed overflow does not "
10570 "occur when simplifying "
10572 WARN_STRICT_OVERFLOW_MISC
);
10573 return fold_convert_loc (loc
, type
, tem
);
10576 /* Optimize z * conj(z) for integer complex numbers. */
10577 if (TREE_CODE (arg0
) == CONJ_EXPR
10578 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10579 return fold_mult_zconjz (loc
, type
, arg1
);
10580 if (TREE_CODE (arg1
) == CONJ_EXPR
10581 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10582 return fold_mult_zconjz (loc
, type
, arg0
);
10586 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10587 the result for floating point types due to rounding so it is applied
10588 only if -fassociative-math was specify. */
10589 if (flag_associative_math
10590 && TREE_CODE (arg0
) == RDIV_EXPR
10591 && TREE_CODE (arg1
) == REAL_CST
10592 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10594 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10597 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10598 TREE_OPERAND (arg0
, 1));
10601 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10602 if (operand_equal_p (arg0
, arg1
, 0))
10604 tree tem
= fold_strip_sign_ops (arg0
);
10605 if (tem
!= NULL_TREE
)
10607 tem
= fold_convert_loc (loc
, type
, tem
);
10608 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10612 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10613 This is not the same for NaNs or if signed zeros are
10615 if (!HONOR_NANS (element_mode (arg0
))
10616 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10617 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10618 && TREE_CODE (arg1
) == COMPLEX_CST
10619 && real_zerop (TREE_REALPART (arg1
)))
10621 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10622 if (real_onep (TREE_IMAGPART (arg1
)))
10624 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10625 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10627 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10628 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10630 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10631 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10632 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10636 /* Optimize z * conj(z) for floating point complex numbers.
10637 Guarded by flag_unsafe_math_optimizations as non-finite
10638 imaginary components don't produce scalar results. */
10639 if (flag_unsafe_math_optimizations
10640 && TREE_CODE (arg0
) == CONJ_EXPR
10641 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10642 return fold_mult_zconjz (loc
, type
, arg1
);
10643 if (flag_unsafe_math_optimizations
10644 && TREE_CODE (arg1
) == CONJ_EXPR
10645 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10646 return fold_mult_zconjz (loc
, type
, arg0
);
10648 if (flag_unsafe_math_optimizations
)
10650 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10651 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10653 /* Optimizations of root(...)*root(...). */
10654 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10657 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10658 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10660 /* Optimize sqrt(x)*sqrt(x) as x. */
10661 if (BUILTIN_SQRT_P (fcode0
)
10662 && operand_equal_p (arg00
, arg10
, 0)
10663 && ! HONOR_SNANS (element_mode (type
)))
10666 /* Optimize root(x)*root(y) as root(x*y). */
10667 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10668 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10669 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10672 /* Optimize expN(x)*expN(y) as expN(x+y). */
10673 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10675 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10676 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10677 CALL_EXPR_ARG (arg0
, 0),
10678 CALL_EXPR_ARG (arg1
, 0));
10679 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10682 /* Optimizations of pow(...)*pow(...). */
10683 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10684 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10685 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10687 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10688 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10689 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10690 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10692 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10693 if (operand_equal_p (arg01
, arg11
, 0))
10695 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10696 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10698 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10701 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10702 if (operand_equal_p (arg00
, arg10
, 0))
10704 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10705 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10707 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10711 /* Optimize tan(x)*cos(x) as sin(x). */
10712 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10713 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10714 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10715 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10716 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10717 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10718 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10719 CALL_EXPR_ARG (arg1
, 0), 0))
10721 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10723 if (sinfn
!= NULL_TREE
)
10724 return build_call_expr_loc (loc
, sinfn
, 1,
10725 CALL_EXPR_ARG (arg0
, 0));
10728 /* Optimize x*pow(x,c) as pow(x,c+1). */
10729 if (fcode1
== BUILT_IN_POW
10730 || fcode1
== BUILT_IN_POWF
10731 || fcode1
== BUILT_IN_POWL
)
10733 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10734 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10735 if (TREE_CODE (arg11
) == REAL_CST
10736 && !TREE_OVERFLOW (arg11
)
10737 && operand_equal_p (arg0
, arg10
, 0))
10739 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10743 c
= TREE_REAL_CST (arg11
);
10744 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10745 arg
= build_real (type
, c
);
10746 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10750 /* Optimize pow(x,c)*x as pow(x,c+1). */
10751 if (fcode0
== BUILT_IN_POW
10752 || fcode0
== BUILT_IN_POWF
10753 || fcode0
== BUILT_IN_POWL
)
10755 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10756 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10757 if (TREE_CODE (arg01
) == REAL_CST
10758 && !TREE_OVERFLOW (arg01
)
10759 && operand_equal_p (arg1
, arg00
, 0))
10761 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10765 c
= TREE_REAL_CST (arg01
);
10766 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10767 arg
= build_real (type
, c
);
10768 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10772 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10773 if (!in_gimple_form
10775 && operand_equal_p (arg0
, arg1
, 0))
10777 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10781 tree arg
= build_real (type
, dconst2
);
10782 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10791 /* ~X | X is -1. */
10792 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10793 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10795 t1
= build_zero_cst (type
);
10796 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10797 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10800 /* X | ~X is -1. */
10801 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10802 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10804 t1
= build_zero_cst (type
);
10805 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10806 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10809 /* Canonicalize (X & C1) | C2. */
10810 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10811 && TREE_CODE (arg1
) == INTEGER_CST
10812 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10814 int width
= TYPE_PRECISION (type
), w
;
10815 wide_int c1
= TREE_OPERAND (arg0
, 1);
10816 wide_int c2
= arg1
;
10818 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10819 if ((c1
& c2
) == c1
)
10820 return omit_one_operand_loc (loc
, type
, arg1
,
10821 TREE_OPERAND (arg0
, 0));
10823 wide_int msk
= wi::mask (width
, false,
10824 TYPE_PRECISION (TREE_TYPE (arg1
)));
10826 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10827 if (msk
.and_not (c1
| c2
) == 0)
10828 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10829 TREE_OPERAND (arg0
, 0), arg1
);
10831 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10832 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10833 mode which allows further optimizations. */
10836 wide_int c3
= c1
.and_not (c2
);
10837 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10839 wide_int mask
= wi::mask (w
, false,
10840 TYPE_PRECISION (type
));
10841 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10849 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10850 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10851 TREE_OPERAND (arg0
, 0),
10852 wide_int_to_tree (type
,
10857 /* (X & ~Y) | (~X & Y) is X ^ Y */
10858 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10859 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10861 tree a0
, a1
, l0
, l1
, n0
, n1
;
10863 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10864 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10866 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10867 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10869 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10870 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10872 if ((operand_equal_p (n0
, a0
, 0)
10873 && operand_equal_p (n1
, a1
, 0))
10874 || (operand_equal_p (n0
, a1
, 0)
10875 && operand_equal_p (n1
, a0
, 0)))
10876 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10879 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10880 if (t1
!= NULL_TREE
)
10883 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10885 This results in more efficient code for machines without a NAND
10886 instruction. Combine will canonicalize to the first form
10887 which will allow use of NAND instructions provided by the
10888 backend if they exist. */
10889 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10890 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10893 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10894 build2 (BIT_AND_EXPR
, type
,
10895 fold_convert_loc (loc
, type
,
10896 TREE_OPERAND (arg0
, 0)),
10897 fold_convert_loc (loc
, type
,
10898 TREE_OPERAND (arg1
, 0))));
10901 /* See if this can be simplified into a rotate first. If that
10902 is unsuccessful continue in the association code. */
10906 /* ~X ^ X is -1. */
10907 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10908 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10910 t1
= build_zero_cst (type
);
10911 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10912 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10915 /* X ^ ~X is -1. */
10916 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10917 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10919 t1
= build_zero_cst (type
);
10920 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10921 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10924 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10925 with a constant, and the two constants have no bits in common,
10926 we should treat this as a BIT_IOR_EXPR since this may produce more
10927 simplifications. */
10928 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10929 && TREE_CODE (arg1
) == BIT_AND_EXPR
10930 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10931 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10932 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10933 TREE_OPERAND (arg1
, 1)) == 0)
10935 code
= BIT_IOR_EXPR
;
10939 /* (X | Y) ^ X -> Y & ~ X*/
10940 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10941 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10943 tree t2
= TREE_OPERAND (arg0
, 1);
10944 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10946 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10947 fold_convert_loc (loc
, type
, t2
),
10948 fold_convert_loc (loc
, type
, t1
));
10952 /* (Y | X) ^ X -> Y & ~ X*/
10953 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10954 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10956 tree t2
= TREE_OPERAND (arg0
, 0);
10957 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10959 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10960 fold_convert_loc (loc
, type
, t2
),
10961 fold_convert_loc (loc
, type
, t1
));
10965 /* X ^ (X | Y) -> Y & ~ X*/
10966 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10969 tree t2
= TREE_OPERAND (arg1
, 1);
10970 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10972 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10973 fold_convert_loc (loc
, type
, t2
),
10974 fold_convert_loc (loc
, type
, t1
));
10978 /* X ^ (Y | X) -> Y & ~ X*/
10979 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10980 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10982 tree t2
= TREE_OPERAND (arg1
, 0);
10983 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10985 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10986 fold_convert_loc (loc
, type
, t2
),
10987 fold_convert_loc (loc
, type
, t1
));
10991 /* Convert ~X ^ ~Y to X ^ Y. */
10992 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10993 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10994 return fold_build2_loc (loc
, code
, type
,
10995 fold_convert_loc (loc
, type
,
10996 TREE_OPERAND (arg0
, 0)),
10997 fold_convert_loc (loc
, type
,
10998 TREE_OPERAND (arg1
, 0)));
11000 /* Convert ~X ^ C to X ^ ~C. */
11001 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11002 && TREE_CODE (arg1
) == INTEGER_CST
)
11003 return fold_build2_loc (loc
, code
, type
,
11004 fold_convert_loc (loc
, type
,
11005 TREE_OPERAND (arg0
, 0)),
11006 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11008 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11009 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11010 && INTEGRAL_TYPE_P (type
)
11011 && integer_onep (TREE_OPERAND (arg0
, 1))
11012 && integer_onep (arg1
))
11013 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11014 build_zero_cst (TREE_TYPE (arg0
)));
11016 /* Fold (X & Y) ^ Y as ~X & Y. */
11017 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11018 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11020 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11021 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11022 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11023 fold_convert_loc (loc
, type
, arg1
));
11025 /* Fold (X & Y) ^ X as ~Y & X. */
11026 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11027 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11028 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11030 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11031 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11032 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11033 fold_convert_loc (loc
, type
, arg1
));
11035 /* Fold X ^ (X & Y) as X & ~Y. */
11036 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11037 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11039 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11040 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11041 fold_convert_loc (loc
, type
, arg0
),
11042 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11044 /* Fold X ^ (Y & X) as ~Y & X. */
11045 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11046 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11047 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11049 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11050 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11051 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11052 fold_convert_loc (loc
, type
, arg0
));
11055 /* See if this can be simplified into a rotate first. If that
11056 is unsuccessful continue in the association code. */
11060 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11061 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11062 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11063 || (TREE_CODE (arg0
) == EQ_EXPR
11064 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11065 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11066 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11068 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11069 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11070 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11071 || (TREE_CODE (arg1
) == EQ_EXPR
11072 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11073 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11074 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11076 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11077 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11078 && INTEGRAL_TYPE_P (type
)
11079 && integer_onep (TREE_OPERAND (arg0
, 1))
11080 && integer_onep (arg1
))
11083 tem
= TREE_OPERAND (arg0
, 0);
11084 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11085 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11087 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11088 build_zero_cst (TREE_TYPE (tem
)));
11090 /* Fold ~X & 1 as (X & 1) == 0. */
11091 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11092 && INTEGRAL_TYPE_P (type
)
11093 && integer_onep (arg1
))
11096 tem
= TREE_OPERAND (arg0
, 0);
11097 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11098 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11100 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11101 build_zero_cst (TREE_TYPE (tem
)));
11103 /* Fold !X & 1 as X == 0. */
11104 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11105 && integer_onep (arg1
))
11107 tem
= TREE_OPERAND (arg0
, 0);
11108 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11109 build_zero_cst (TREE_TYPE (tem
)));
11112 /* Fold (X ^ Y) & Y as ~X & Y. */
11113 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11114 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11116 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11117 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11118 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11119 fold_convert_loc (loc
, type
, arg1
));
11121 /* Fold (X ^ Y) & X as ~Y & X. */
11122 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11123 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11124 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11126 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11127 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11128 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11129 fold_convert_loc (loc
, type
, arg1
));
11131 /* Fold X & (X ^ Y) as X & ~Y. */
11132 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11133 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11135 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11136 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11137 fold_convert_loc (loc
, type
, arg0
),
11138 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11140 /* Fold X & (Y ^ X) as ~Y & X. */
11141 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11142 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11143 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11145 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11146 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11147 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11148 fold_convert_loc (loc
, type
, arg0
));
11151 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11152 multiple of 1 << CST. */
11153 if (TREE_CODE (arg1
) == INTEGER_CST
)
11155 wide_int cst1
= arg1
;
11156 wide_int ncst1
= -cst1
;
11157 if ((cst1
& ncst1
) == ncst1
11158 && multiple_of_p (type
, arg0
,
11159 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11160 return fold_convert_loc (loc
, type
, arg0
);
11163 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11165 if (TREE_CODE (arg1
) == INTEGER_CST
11166 && TREE_CODE (arg0
) == MULT_EXPR
11167 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11169 wide_int warg1
= arg1
;
11170 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11173 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11175 else if (masked
!= warg1
)
11177 /* Avoid the transform if arg1 is a mask of some
11178 mode which allows further optimizations. */
11179 int pop
= wi::popcount (warg1
);
11180 if (!(pop
>= BITS_PER_UNIT
11181 && exact_log2 (pop
) != -1
11182 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11183 return fold_build2_loc (loc
, code
, type
, op0
,
11184 wide_int_to_tree (type
, masked
));
11188 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11189 ((A & N) + B) & M -> (A + B) & M
11190 Similarly if (N & M) == 0,
11191 ((A | N) + B) & M -> (A + B) & M
11192 and for - instead of + (or unary - instead of +)
11193 and/or ^ instead of |.
11194 If B is constant and (B & M) == 0, fold into A & M. */
11195 if (TREE_CODE (arg1
) == INTEGER_CST
)
11197 wide_int cst1
= arg1
;
11198 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11199 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11200 && (TREE_CODE (arg0
) == PLUS_EXPR
11201 || TREE_CODE (arg0
) == MINUS_EXPR
11202 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11203 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11204 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11210 /* Now we know that arg0 is (C + D) or (C - D) or
11211 -C and arg1 (M) is == (1LL << cst) - 1.
11212 Store C into PMOP[0] and D into PMOP[1]. */
11213 pmop
[0] = TREE_OPERAND (arg0
, 0);
11215 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11217 pmop
[1] = TREE_OPERAND (arg0
, 1);
11221 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11224 for (; which
>= 0; which
--)
11225 switch (TREE_CODE (pmop
[which
]))
11230 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11233 cst0
= TREE_OPERAND (pmop
[which
], 1);
11235 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11240 else if (cst0
!= 0)
11242 /* If C or D is of the form (A & N) where
11243 (N & M) == M, or of the form (A | N) or
11244 (A ^ N) where (N & M) == 0, replace it with A. */
11245 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11248 /* If C or D is a N where (N & M) == 0, it can be
11249 omitted (assumed 0). */
11250 if ((TREE_CODE (arg0
) == PLUS_EXPR
11251 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11252 && (cst1
& pmop
[which
]) == 0)
11253 pmop
[which
] = NULL
;
11259 /* Only build anything new if we optimized one or both arguments
11261 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11262 || (TREE_CODE (arg0
) != NEGATE_EXPR
11263 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11265 tree utype
= TREE_TYPE (arg0
);
11266 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11268 /* Perform the operations in a type that has defined
11269 overflow behavior. */
11270 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11271 if (pmop
[0] != NULL
)
11272 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11273 if (pmop
[1] != NULL
)
11274 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11277 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11278 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11279 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11281 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11282 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11284 else if (pmop
[0] != NULL
)
11286 else if (pmop
[1] != NULL
)
11289 return build_int_cst (type
, 0);
11291 else if (pmop
[0] == NULL
)
11292 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11294 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11296 /* TEM is now the new binary +, - or unary - replacement. */
11297 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11298 fold_convert_loc (loc
, utype
, arg1
));
11299 return fold_convert_loc (loc
, type
, tem
);
11304 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11305 if (t1
!= NULL_TREE
)
11307 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11308 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11309 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11311 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11313 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11316 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11319 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11321 This results in more efficient code for machines without a NOR
11322 instruction. Combine will canonicalize to the first form
11323 which will allow use of NOR instructions provided by the
11324 backend if they exist. */
11325 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11326 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11328 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11329 build2 (BIT_IOR_EXPR
, type
,
11330 fold_convert_loc (loc
, type
,
11331 TREE_OPERAND (arg0
, 0)),
11332 fold_convert_loc (loc
, type
,
11333 TREE_OPERAND (arg1
, 0))));
11336 /* If arg0 is derived from the address of an object or function, we may
11337 be able to fold this expression using the object or function's
11339 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11341 unsigned HOST_WIDE_INT modulus
, residue
;
11342 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11344 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11345 integer_onep (arg1
));
11347 /* This works because modulus is a power of 2. If this weren't the
11348 case, we'd have to replace it by its greatest power-of-2
11349 divisor: modulus & -modulus. */
11351 return build_int_cst (type
, residue
& low
);
11354 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11355 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11356 if the new mask might be further optimized. */
11357 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11358 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11359 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11360 && TREE_CODE (arg1
) == INTEGER_CST
11361 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11362 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11363 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11364 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11366 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11367 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11368 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11369 tree shift_type
= TREE_TYPE (arg0
);
11371 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11372 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11373 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11374 && TYPE_PRECISION (TREE_TYPE (arg0
))
11375 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11377 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11378 tree arg00
= TREE_OPERAND (arg0
, 0);
11379 /* See if more bits can be proven as zero because of
11381 if (TREE_CODE (arg00
) == NOP_EXPR
11382 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11384 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11385 if (TYPE_PRECISION (inner_type
)
11386 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11387 && TYPE_PRECISION (inner_type
) < prec
)
11389 prec
= TYPE_PRECISION (inner_type
);
11390 /* See if we can shorten the right shift. */
11392 shift_type
= inner_type
;
11393 /* Otherwise X >> C1 is all zeros, so we'll optimize
11394 it into (X, 0) later on by making sure zerobits
11398 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11401 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11402 zerobits
<<= prec
- shiftc
;
11404 /* For arithmetic shift if sign bit could be set, zerobits
11405 can contain actually sign bits, so no transformation is
11406 possible, unless MASK masks them all away. In that
11407 case the shift needs to be converted into logical shift. */
11408 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11409 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11411 if ((mask
& zerobits
) == 0)
11412 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11418 /* ((X << 16) & 0xff00) is (X, 0). */
11419 if ((mask
& zerobits
) == mask
)
11420 return omit_one_operand_loc (loc
, type
,
11421 build_int_cst (type
, 0), arg0
);
11423 newmask
= mask
| zerobits
;
11424 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11426 /* Only do the transformation if NEWMASK is some integer
11428 for (prec
= BITS_PER_UNIT
;
11429 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11430 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11432 if (prec
< HOST_BITS_PER_WIDE_INT
11433 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11437 if (shift_type
!= TREE_TYPE (arg0
))
11439 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11440 fold_convert_loc (loc
, shift_type
,
11441 TREE_OPERAND (arg0
, 0)),
11442 TREE_OPERAND (arg0
, 1));
11443 tem
= fold_convert_loc (loc
, type
, tem
);
11447 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11448 if (!tree_int_cst_equal (newmaskt
, arg1
))
11449 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11457 /* Don't touch a floating-point divide by zero unless the mode
11458 of the constant can represent infinity. */
11459 if (TREE_CODE (arg1
) == REAL_CST
11460 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11461 && real_zerop (arg1
))
11464 /* (-A) / (-B) -> A / B */
11465 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11466 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11467 TREE_OPERAND (arg0
, 0),
11468 negate_expr (arg1
));
11469 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11470 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11471 negate_expr (arg0
),
11472 TREE_OPERAND (arg1
, 0));
11474 /* Convert A/B/C to A/(B*C). */
11475 if (flag_reciprocal_math
11476 && TREE_CODE (arg0
) == RDIV_EXPR
)
11477 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11478 fold_build2_loc (loc
, MULT_EXPR
, type
,
11479 TREE_OPERAND (arg0
, 1), arg1
));
11481 /* Convert A/(B/C) to (A/B)*C. */
11482 if (flag_reciprocal_math
11483 && TREE_CODE (arg1
) == RDIV_EXPR
)
11484 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11485 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11486 TREE_OPERAND (arg1
, 0)),
11487 TREE_OPERAND (arg1
, 1));
11489 /* Convert C1/(X*C2) into (C1/C2)/X. */
11490 if (flag_reciprocal_math
11491 && TREE_CODE (arg1
) == MULT_EXPR
11492 && TREE_CODE (arg0
) == REAL_CST
11493 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11495 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11496 TREE_OPERAND (arg1
, 1));
11498 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11499 TREE_OPERAND (arg1
, 0));
11502 if (flag_unsafe_math_optimizations
)
11504 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11505 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11507 /* Optimize sin(x)/cos(x) as tan(x). */
11508 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11509 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11510 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11511 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11512 CALL_EXPR_ARG (arg1
, 0), 0))
11514 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11516 if (tanfn
!= NULL_TREE
)
11517 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11520 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11521 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11522 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11523 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11524 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11525 CALL_EXPR_ARG (arg1
, 0), 0))
11527 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11529 if (tanfn
!= NULL_TREE
)
11531 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11532 CALL_EXPR_ARG (arg0
, 0));
11533 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11534 build_real (type
, dconst1
), tmp
);
11538 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11539 NaNs or Infinities. */
11540 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11541 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11542 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11544 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11545 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11547 if (! HONOR_NANS (element_mode (arg00
))
11548 && ! HONOR_INFINITIES (element_mode (arg00
))
11549 && operand_equal_p (arg00
, arg01
, 0))
11551 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11553 if (cosfn
!= NULL_TREE
)
11554 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11558 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11559 NaNs or Infinities. */
11560 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11561 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11562 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11564 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11565 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11567 if (! HONOR_NANS (element_mode (arg00
))
11568 && ! HONOR_INFINITIES (element_mode (arg00
))
11569 && operand_equal_p (arg00
, arg01
, 0))
11571 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11573 if (cosfn
!= NULL_TREE
)
11575 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11576 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11577 build_real (type
, dconst1
),
11583 /* Optimize pow(x,c)/x as pow(x,c-1). */
11584 if (fcode0
== BUILT_IN_POW
11585 || fcode0
== BUILT_IN_POWF
11586 || fcode0
== BUILT_IN_POWL
)
11588 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11589 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11590 if (TREE_CODE (arg01
) == REAL_CST
11591 && !TREE_OVERFLOW (arg01
)
11592 && operand_equal_p (arg1
, arg00
, 0))
11594 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11598 c
= TREE_REAL_CST (arg01
);
11599 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11600 arg
= build_real (type
, c
);
11601 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11605 /* Optimize a/root(b/c) into a*root(c/b). */
11606 if (BUILTIN_ROOT_P (fcode1
))
11608 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11610 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11612 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11613 tree b
= TREE_OPERAND (rootarg
, 0);
11614 tree c
= TREE_OPERAND (rootarg
, 1);
11616 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11618 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11619 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11623 /* Optimize x/expN(y) into x*expN(-y). */
11624 if (BUILTIN_EXPONENT_P (fcode1
))
11626 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11627 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11628 arg1
= build_call_expr_loc (loc
,
11630 fold_convert_loc (loc
, type
, arg
));
11631 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11634 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11635 if (fcode1
== BUILT_IN_POW
11636 || fcode1
== BUILT_IN_POWF
11637 || fcode1
== BUILT_IN_POWL
)
11639 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11640 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11641 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11642 tree neg11
= fold_convert_loc (loc
, type
,
11643 negate_expr (arg11
));
11644 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11645 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11650 case TRUNC_DIV_EXPR
:
11651 /* Optimize (X & (-A)) / A where A is a power of 2,
11653 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11654 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11655 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11657 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11658 arg1
, TREE_OPERAND (arg0
, 1));
11659 if (sum
&& integer_zerop (sum
)) {
11660 tree pow2
= build_int_cst (integer_type_node
,
11661 wi::exact_log2 (arg1
));
11662 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11663 TREE_OPERAND (arg0
, 0), pow2
);
11669 case FLOOR_DIV_EXPR
:
11670 /* Simplify A / (B << N) where A and B are positive and B is
11671 a power of 2, to A >> (N + log2(B)). */
11672 strict_overflow_p
= false;
11673 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11674 && (TYPE_UNSIGNED (type
)
11675 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11677 tree sval
= TREE_OPERAND (arg1
, 0);
11678 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11680 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11681 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11682 wi::exact_log2 (sval
));
11684 if (strict_overflow_p
)
11685 fold_overflow_warning (("assuming signed overflow does not "
11686 "occur when simplifying A / (B << N)"),
11687 WARN_STRICT_OVERFLOW_MISC
);
11689 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11691 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11692 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11698 case ROUND_DIV_EXPR
:
11699 case CEIL_DIV_EXPR
:
11700 case EXACT_DIV_EXPR
:
11701 if (integer_zerop (arg1
))
11704 /* Convert -A / -B to A / B when the type is signed and overflow is
11706 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11707 && TREE_CODE (arg0
) == NEGATE_EXPR
11708 && negate_expr_p (arg1
))
11710 if (INTEGRAL_TYPE_P (type
))
11711 fold_overflow_warning (("assuming signed overflow does not occur "
11712 "when distributing negation across "
11714 WARN_STRICT_OVERFLOW_MISC
);
11715 return fold_build2_loc (loc
, code
, type
,
11716 fold_convert_loc (loc
, type
,
11717 TREE_OPERAND (arg0
, 0)),
11718 fold_convert_loc (loc
, type
,
11719 negate_expr (arg1
)));
11721 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11722 && TREE_CODE (arg1
) == NEGATE_EXPR
11723 && negate_expr_p (arg0
))
11725 if (INTEGRAL_TYPE_P (type
))
11726 fold_overflow_warning (("assuming signed overflow does not occur "
11727 "when distributing negation across "
11729 WARN_STRICT_OVERFLOW_MISC
);
11730 return fold_build2_loc (loc
, code
, type
,
11731 fold_convert_loc (loc
, type
,
11732 negate_expr (arg0
)),
11733 fold_convert_loc (loc
, type
,
11734 TREE_OPERAND (arg1
, 0)));
11737 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11738 operation, EXACT_DIV_EXPR.
11740 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11741 At one time others generated faster code, it's not clear if they do
11742 after the last round to changes to the DIV code in expmed.c. */
11743 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11744 && multiple_of_p (type
, arg0
, arg1
))
11745 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11747 strict_overflow_p
= false;
11748 if (TREE_CODE (arg1
) == INTEGER_CST
11749 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11750 &strict_overflow_p
)))
11752 if (strict_overflow_p
)
11753 fold_overflow_warning (("assuming signed overflow does not occur "
11754 "when simplifying division"),
11755 WARN_STRICT_OVERFLOW_MISC
);
11756 return fold_convert_loc (loc
, type
, tem
);
11761 case CEIL_MOD_EXPR
:
11762 case FLOOR_MOD_EXPR
:
11763 case ROUND_MOD_EXPR
:
11764 case TRUNC_MOD_EXPR
:
11765 /* X % -Y is the same as X % Y. */
11766 if (code
== TRUNC_MOD_EXPR
11767 && !TYPE_UNSIGNED (type
)
11768 && TREE_CODE (arg1
) == NEGATE_EXPR
11769 && !TYPE_OVERFLOW_TRAPS (type
))
11770 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11771 fold_convert_loc (loc
, type
,
11772 TREE_OPERAND (arg1
, 0)));
11774 strict_overflow_p
= false;
11775 if (TREE_CODE (arg1
) == INTEGER_CST
11776 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11777 &strict_overflow_p
)))
11779 if (strict_overflow_p
)
11780 fold_overflow_warning (("assuming signed overflow does not occur "
11781 "when simplifying modulus"),
11782 WARN_STRICT_OVERFLOW_MISC
);
11783 return fold_convert_loc (loc
, type
, tem
);
11786 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11787 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11788 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11789 && (TYPE_UNSIGNED (type
)
11790 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11793 /* Also optimize A % (C << N) where C is a power of 2,
11794 to A & ((C << N) - 1). */
11795 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11796 c
= TREE_OPERAND (arg1
, 0);
11798 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11801 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11802 build_int_cst (TREE_TYPE (arg1
), 1));
11803 if (strict_overflow_p
)
11804 fold_overflow_warning (("assuming signed overflow does not "
11805 "occur when simplifying "
11806 "X % (power of two)"),
11807 WARN_STRICT_OVERFLOW_MISC
);
11808 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11809 fold_convert_loc (loc
, type
, arg0
),
11810 fold_convert_loc (loc
, type
, mask
));
11820 /* Since negative shift count is not well-defined,
11821 don't try to compute it in the compiler. */
11822 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11825 prec
= element_precision (type
);
11827 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11828 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
11829 && tree_to_uhwi (arg1
) < prec
11830 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11831 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11833 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11834 + tree_to_uhwi (arg1
));
11836 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11837 being well defined. */
11840 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11842 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11843 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
11844 TREE_OPERAND (arg0
, 0));
11849 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11850 build_int_cst (TREE_TYPE (arg1
), low
));
11853 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11854 into x & ((unsigned)-1 >> c) for unsigned types. */
11855 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11856 || (TYPE_UNSIGNED (type
)
11857 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11858 && tree_fits_uhwi_p (arg1
)
11859 && tree_to_uhwi (arg1
) < prec
11860 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11861 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11863 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11864 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
11870 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11872 lshift
= build_minus_one_cst (type
);
11873 lshift
= const_binop (code
, lshift
, arg1
);
11875 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11879 /* If we have a rotate of a bit operation with the rotate count and
11880 the second operand of the bit operation both constant,
11881 permute the two operations. */
11882 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11883 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11884 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11885 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11886 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11887 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11888 fold_build2_loc (loc
, code
, type
,
11889 TREE_OPERAND (arg0
, 0), arg1
),
11890 fold_build2_loc (loc
, code
, type
,
11891 TREE_OPERAND (arg0
, 1), arg1
));
11893 /* Two consecutive rotates adding up to the some integer
11894 multiple of the precision of the type can be ignored. */
11895 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11896 && TREE_CODE (arg0
) == RROTATE_EXPR
11897 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11898 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
11900 return TREE_OPERAND (arg0
, 0);
11902 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11903 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11904 if the latter can be further optimized. */
11905 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11906 && TREE_CODE (arg0
) == BIT_AND_EXPR
11907 && TREE_CODE (arg1
) == INTEGER_CST
11908 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11910 tree mask
= fold_build2_loc (loc
, code
, type
,
11911 fold_convert_loc (loc
, type
,
11912 TREE_OPERAND (arg0
, 1)),
11914 tree shift
= fold_build2_loc (loc
, code
, type
,
11915 fold_convert_loc (loc
, type
,
11916 TREE_OPERAND (arg0
, 0)),
11918 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
11926 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11932 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11937 case TRUTH_ANDIF_EXPR
:
11938 /* Note that the operands of this must be ints
11939 and their values must be 0 or 1.
11940 ("true" is a fixed value perhaps depending on the language.) */
11941 /* If first arg is constant zero, return it. */
11942 if (integer_zerop (arg0
))
11943 return fold_convert_loc (loc
, type
, arg0
);
11944 case TRUTH_AND_EXPR
:
11945 /* If either arg is constant true, drop it. */
11946 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11947 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11948 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11949 /* Preserve sequence points. */
11950 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11951 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11952 /* If second arg is constant zero, result is zero, but first arg
11953 must be evaluated. */
11954 if (integer_zerop (arg1
))
11955 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11956 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11957 case will be handled here. */
11958 if (integer_zerop (arg0
))
11959 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11961 /* !X && X is always false. */
11962 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11963 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11964 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11965 /* X && !X is always false. */
11966 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11967 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11968 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11970 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11971 means A >= Y && A != MAX, but in this case we know that
11974 if (!TREE_SIDE_EFFECTS (arg0
)
11975 && !TREE_SIDE_EFFECTS (arg1
))
11977 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11978 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11979 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11981 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11982 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11983 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11986 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11992 case TRUTH_ORIF_EXPR
:
11993 /* Note that the operands of this must be ints
11994 and their values must be 0 or true.
11995 ("true" is a fixed value perhaps depending on the language.) */
11996 /* If first arg is constant true, return it. */
11997 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11998 return fold_convert_loc (loc
, type
, arg0
);
11999 case TRUTH_OR_EXPR
:
12000 /* If either arg is constant zero, drop it. */
12001 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12002 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12003 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12004 /* Preserve sequence points. */
12005 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12006 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12007 /* If second arg is constant true, result is true, but we must
12008 evaluate first arg. */
12009 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12010 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12011 /* Likewise for first arg, but note this only occurs here for
12013 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12014 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12016 /* !X || X is always true. */
12017 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12018 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12019 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12020 /* X || !X is always true. */
12021 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12022 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12023 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12025 /* (X && !Y) || (!X && Y) is X ^ Y */
12026 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12027 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12029 tree a0
, a1
, l0
, l1
, n0
, n1
;
12031 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12032 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12034 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12035 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12037 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12038 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12040 if ((operand_equal_p (n0
, a0
, 0)
12041 && operand_equal_p (n1
, a1
, 0))
12042 || (operand_equal_p (n0
, a1
, 0)
12043 && operand_equal_p (n1
, a0
, 0)))
12044 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12047 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12053 case TRUTH_XOR_EXPR
:
12054 /* If the second arg is constant zero, drop it. */
12055 if (integer_zerop (arg1
))
12056 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12057 /* If the second arg is constant true, this is a logical inversion. */
12058 if (integer_onep (arg1
))
12060 tem
= invert_truthvalue_loc (loc
, arg0
);
12061 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12063 /* Identical arguments cancel to zero. */
12064 if (operand_equal_p (arg0
, arg1
, 0))
12065 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12067 /* !X ^ X is always true. */
12068 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12069 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12070 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12072 /* X ^ !X is always true. */
12073 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12074 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12075 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12084 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12085 if (tem
!= NULL_TREE
)
12088 /* bool_var != 0 becomes bool_var. */
12089 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12090 && code
== NE_EXPR
)
12091 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12093 /* bool_var == 1 becomes bool_var. */
12094 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12095 && code
== EQ_EXPR
)
12096 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12098 /* bool_var != 1 becomes !bool_var. */
12099 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12100 && code
== NE_EXPR
)
12101 return fold_convert_loc (loc
, type
,
12102 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12103 TREE_TYPE (arg0
), arg0
));
12105 /* bool_var == 0 becomes !bool_var. */
12106 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12107 && code
== EQ_EXPR
)
12108 return fold_convert_loc (loc
, type
,
12109 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12110 TREE_TYPE (arg0
), arg0
));
12112 /* !exp != 0 becomes !exp */
12113 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12114 && code
== NE_EXPR
)
12115 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12117 /* If this is an equality comparison of the address of two non-weak,
12118 unaliased symbols neither of which are extern (since we do not
12119 have access to attributes for externs), then we know the result. */
12120 if (TREE_CODE (arg0
) == ADDR_EXPR
12121 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12122 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12123 && ! lookup_attribute ("alias",
12124 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12125 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12126 && TREE_CODE (arg1
) == ADDR_EXPR
12127 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12128 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12129 && ! lookup_attribute ("alias",
12130 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12131 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12133 /* We know that we're looking at the address of two
12134 non-weak, unaliased, static _DECL nodes.
12136 It is both wasteful and incorrect to call operand_equal_p
12137 to compare the two ADDR_EXPR nodes. It is wasteful in that
12138 all we need to do is test pointer equality for the arguments
12139 to the two ADDR_EXPR nodes. It is incorrect to use
12140 operand_equal_p as that function is NOT equivalent to a
12141 C equality test. It can in fact return false for two
12142 objects which would test as equal using the C equality
12144 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12145 return constant_boolean_node (equal
12146 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12150 /* Similarly for a NEGATE_EXPR. */
12151 if (TREE_CODE (arg0
) == NEGATE_EXPR
12152 && TREE_CODE (arg1
) == INTEGER_CST
12153 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12155 && TREE_CODE (tem
) == INTEGER_CST
12156 && !TREE_OVERFLOW (tem
))
12157 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12159 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12160 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12161 && TREE_CODE (arg1
) == INTEGER_CST
12162 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12163 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12164 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12165 fold_convert_loc (loc
,
12168 TREE_OPERAND (arg0
, 1)));
12170 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12171 if ((TREE_CODE (arg0
) == PLUS_EXPR
12172 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12173 || TREE_CODE (arg0
) == MINUS_EXPR
)
12174 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12177 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12178 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12180 tree val
= TREE_OPERAND (arg0
, 1);
12181 return omit_two_operands_loc (loc
, type
,
12182 fold_build2_loc (loc
, code
, type
,
12184 build_int_cst (TREE_TYPE (val
),
12186 TREE_OPERAND (arg0
, 0), arg1
);
12189 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12190 if (TREE_CODE (arg0
) == MINUS_EXPR
12191 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12192 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12195 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12197 return omit_two_operands_loc (loc
, type
,
12199 ? boolean_true_node
: boolean_false_node
,
12200 TREE_OPERAND (arg0
, 1), arg1
);
12203 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12204 if (TREE_CODE (arg0
) == ABS_EXPR
12205 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12206 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12208 /* If this is an EQ or NE comparison with zero and ARG0 is
12209 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12210 two operations, but the latter can be done in one less insn
12211 on machines that have only two-operand insns or on which a
12212 constant cannot be the first operand. */
12213 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12214 && integer_zerop (arg1
))
12216 tree arg00
= TREE_OPERAND (arg0
, 0);
12217 tree arg01
= TREE_OPERAND (arg0
, 1);
12218 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12219 && integer_onep (TREE_OPERAND (arg00
, 0)))
12221 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12222 arg01
, TREE_OPERAND (arg00
, 1));
12223 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12224 build_int_cst (TREE_TYPE (arg0
), 1));
12225 return fold_build2_loc (loc
, code
, type
,
12226 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12229 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12230 && integer_onep (TREE_OPERAND (arg01
, 0)))
12232 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12233 arg00
, TREE_OPERAND (arg01
, 1));
12234 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12235 build_int_cst (TREE_TYPE (arg0
), 1));
12236 return fold_build2_loc (loc
, code
, type
,
12237 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12242 /* If this is an NE or EQ comparison of zero against the result of a
12243 signed MOD operation whose second operand is a power of 2, make
12244 the MOD operation unsigned since it is simpler and equivalent. */
12245 if (integer_zerop (arg1
)
12246 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12247 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12248 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12249 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12250 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12251 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12253 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12254 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12255 fold_convert_loc (loc
, newtype
,
12256 TREE_OPERAND (arg0
, 0)),
12257 fold_convert_loc (loc
, newtype
,
12258 TREE_OPERAND (arg0
, 1)));
12260 return fold_build2_loc (loc
, code
, type
, newmod
,
12261 fold_convert_loc (loc
, newtype
, arg1
));
12264 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12265 C1 is a valid shift constant, and C2 is a power of two, i.e.
12267 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12268 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12269 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12271 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12272 && integer_zerop (arg1
))
12274 tree itype
= TREE_TYPE (arg0
);
12275 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12276 prec
= TYPE_PRECISION (itype
);
12278 /* Check for a valid shift count. */
12279 if (wi::ltu_p (arg001
, prec
))
12281 tree arg01
= TREE_OPERAND (arg0
, 1);
12282 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12283 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12284 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12285 can be rewritten as (X & (C2 << C1)) != 0. */
12286 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12288 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12289 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12290 return fold_build2_loc (loc
, code
, type
, tem
,
12291 fold_convert_loc (loc
, itype
, arg1
));
12293 /* Otherwise, for signed (arithmetic) shifts,
12294 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12295 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12296 else if (!TYPE_UNSIGNED (itype
))
12297 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12298 arg000
, build_int_cst (itype
, 0));
12299 /* Otherwise, of unsigned (logical) shifts,
12300 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12301 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12303 return omit_one_operand_loc (loc
, type
,
12304 code
== EQ_EXPR
? integer_one_node
12305 : integer_zero_node
,
12310 /* If we have (A & C) == C where C is a power of 2, convert this into
12311 (A & C) != 0. Similarly for NE_EXPR. */
12312 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12313 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12314 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12315 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12316 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12317 integer_zero_node
));
12319 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12320 bit, then fold the expression into A < 0 or A >= 0. */
12321 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12325 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12326 Similarly for NE_EXPR. */
12327 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12328 && TREE_CODE (arg1
) == INTEGER_CST
12329 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12331 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12332 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12333 TREE_OPERAND (arg0
, 1));
12335 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12336 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12338 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12339 if (integer_nonzerop (dandnotc
))
12340 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12343 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12344 Similarly for NE_EXPR. */
12345 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12346 && TREE_CODE (arg1
) == INTEGER_CST
12347 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12349 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12351 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12352 TREE_OPERAND (arg0
, 1),
12353 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12354 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12355 if (integer_nonzerop (candnotd
))
12356 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12359 /* If this is a comparison of a field, we may be able to simplify it. */
12360 if ((TREE_CODE (arg0
) == COMPONENT_REF
12361 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12362 /* Handle the constant case even without -O
12363 to make sure the warnings are given. */
12364 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12366 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12371 /* Optimize comparisons of strlen vs zero to a compare of the
12372 first character of the string vs zero. To wit,
12373 strlen(ptr) == 0 => *ptr == 0
12374 strlen(ptr) != 0 => *ptr != 0
12375 Other cases should reduce to one of these two (or a constant)
12376 due to the return value of strlen being unsigned. */
12377 if (TREE_CODE (arg0
) == CALL_EXPR
12378 && integer_zerop (arg1
))
12380 tree fndecl
= get_callee_fndecl (arg0
);
12383 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12384 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12385 && call_expr_nargs (arg0
) == 1
12386 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12388 tree iref
= build_fold_indirect_ref_loc (loc
,
12389 CALL_EXPR_ARG (arg0
, 0));
12390 return fold_build2_loc (loc
, code
, type
, iref
,
12391 build_int_cst (TREE_TYPE (iref
), 0));
12395 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12396 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12397 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12398 && integer_zerop (arg1
)
12399 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12401 tree arg00
= TREE_OPERAND (arg0
, 0);
12402 tree arg01
= TREE_OPERAND (arg0
, 1);
12403 tree itype
= TREE_TYPE (arg00
);
12404 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
12406 if (TYPE_UNSIGNED (itype
))
12408 itype
= signed_type_for (itype
);
12409 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12411 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12412 type
, arg00
, build_zero_cst (itype
));
12416 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12417 if (integer_zerop (arg1
)
12418 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12419 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12420 TREE_OPERAND (arg0
, 1));
12422 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12423 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12424 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12425 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12426 build_zero_cst (TREE_TYPE (arg0
)));
12427 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12428 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12429 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12430 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12431 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12432 build_zero_cst (TREE_TYPE (arg0
)));
12434 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12435 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12436 && TREE_CODE (arg1
) == INTEGER_CST
12437 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12438 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12439 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12440 TREE_OPERAND (arg0
, 1), arg1
));
12442 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12443 (X & C) == 0 when C is a single bit. */
12444 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12445 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12446 && integer_zerop (arg1
)
12447 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12449 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12450 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12451 TREE_OPERAND (arg0
, 1));
12452 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12454 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12458 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12459 constant C is a power of two, i.e. a single bit. */
12460 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12461 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12462 && integer_zerop (arg1
)
12463 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12464 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12465 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12467 tree arg00
= TREE_OPERAND (arg0
, 0);
12468 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12469 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12472 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12473 when is C is a power of two, i.e. a single bit. */
12474 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12475 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12476 && integer_zerop (arg1
)
12477 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12478 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12479 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12481 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12482 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12483 arg000
, TREE_OPERAND (arg0
, 1));
12484 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12485 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12488 if (integer_zerop (arg1
)
12489 && tree_expr_nonzero_p (arg0
))
12491 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12492 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12495 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12496 if (TREE_CODE (arg0
) == NEGATE_EXPR
12497 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12498 return fold_build2_loc (loc
, code
, type
,
12499 TREE_OPERAND (arg0
, 0),
12500 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12501 TREE_OPERAND (arg1
, 0)));
12503 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12504 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12505 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12507 tree arg00
= TREE_OPERAND (arg0
, 0);
12508 tree arg01
= TREE_OPERAND (arg0
, 1);
12509 tree arg10
= TREE_OPERAND (arg1
, 0);
12510 tree arg11
= TREE_OPERAND (arg1
, 1);
12511 tree itype
= TREE_TYPE (arg0
);
12513 if (operand_equal_p (arg01
, arg11
, 0))
12514 return fold_build2_loc (loc
, code
, type
,
12515 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12516 fold_build2_loc (loc
,
12517 BIT_XOR_EXPR
, itype
,
12520 build_zero_cst (itype
));
12522 if (operand_equal_p (arg01
, arg10
, 0))
12523 return fold_build2_loc (loc
, code
, type
,
12524 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12525 fold_build2_loc (loc
,
12526 BIT_XOR_EXPR
, itype
,
12529 build_zero_cst (itype
));
12531 if (operand_equal_p (arg00
, arg11
, 0))
12532 return fold_build2_loc (loc
, code
, type
,
12533 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12534 fold_build2_loc (loc
,
12535 BIT_XOR_EXPR
, itype
,
12538 build_zero_cst (itype
));
12540 if (operand_equal_p (arg00
, arg10
, 0))
12541 return fold_build2_loc (loc
, code
, type
,
12542 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12543 fold_build2_loc (loc
,
12544 BIT_XOR_EXPR
, itype
,
12547 build_zero_cst (itype
));
12550 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12551 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12553 tree arg00
= TREE_OPERAND (arg0
, 0);
12554 tree arg01
= TREE_OPERAND (arg0
, 1);
12555 tree arg10
= TREE_OPERAND (arg1
, 0);
12556 tree arg11
= TREE_OPERAND (arg1
, 1);
12557 tree itype
= TREE_TYPE (arg0
);
12559 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12560 operand_equal_p guarantees no side-effects so we don't need
12561 to use omit_one_operand on Z. */
12562 if (operand_equal_p (arg01
, arg11
, 0))
12563 return fold_build2_loc (loc
, code
, type
, arg00
,
12564 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12566 if (operand_equal_p (arg01
, arg10
, 0))
12567 return fold_build2_loc (loc
, code
, type
, arg00
,
12568 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12570 if (operand_equal_p (arg00
, arg11
, 0))
12571 return fold_build2_loc (loc
, code
, type
, arg01
,
12572 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12574 if (operand_equal_p (arg00
, arg10
, 0))
12575 return fold_build2_loc (loc
, code
, type
, arg01
,
12576 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12579 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12580 if (TREE_CODE (arg01
) == INTEGER_CST
12581 && TREE_CODE (arg11
) == INTEGER_CST
)
12583 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12584 fold_convert_loc (loc
, itype
, arg11
));
12585 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12586 return fold_build2_loc (loc
, code
, type
, tem
,
12587 fold_convert_loc (loc
, itype
, arg10
));
12591 /* Attempt to simplify equality/inequality comparisons of complex
12592 values. Only lower the comparison if the result is known or
12593 can be simplified to a single scalar comparison. */
12594 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12595 || TREE_CODE (arg0
) == COMPLEX_CST
)
12596 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12597 || TREE_CODE (arg1
) == COMPLEX_CST
))
12599 tree real0
, imag0
, real1
, imag1
;
12602 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12604 real0
= TREE_OPERAND (arg0
, 0);
12605 imag0
= TREE_OPERAND (arg0
, 1);
12609 real0
= TREE_REALPART (arg0
);
12610 imag0
= TREE_IMAGPART (arg0
);
12613 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12615 real1
= TREE_OPERAND (arg1
, 0);
12616 imag1
= TREE_OPERAND (arg1
, 1);
12620 real1
= TREE_REALPART (arg1
);
12621 imag1
= TREE_IMAGPART (arg1
);
12624 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12625 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12627 if (integer_zerop (rcond
))
12629 if (code
== EQ_EXPR
)
12630 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12632 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12636 if (code
== NE_EXPR
)
12637 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12639 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12643 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12644 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12646 if (integer_zerop (icond
))
12648 if (code
== EQ_EXPR
)
12649 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12651 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12655 if (code
== NE_EXPR
)
12656 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12658 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12669 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12670 if (tem
!= NULL_TREE
)
12673 /* Transform comparisons of the form X +- C CMP X. */
12674 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12675 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12676 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12677 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12678 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12679 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12681 tree arg01
= TREE_OPERAND (arg0
, 1);
12682 enum tree_code code0
= TREE_CODE (arg0
);
12685 if (TREE_CODE (arg01
) == REAL_CST
)
12686 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12688 is_positive
= tree_int_cst_sgn (arg01
);
12690 /* (X - c) > X becomes false. */
12691 if (code
== GT_EXPR
12692 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12693 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12695 if (TREE_CODE (arg01
) == INTEGER_CST
12696 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12697 fold_overflow_warning (("assuming signed overflow does not "
12698 "occur when assuming that (X - c) > X "
12699 "is always false"),
12700 WARN_STRICT_OVERFLOW_ALL
);
12701 return constant_boolean_node (0, type
);
12704 /* Likewise (X + c) < X becomes false. */
12705 if (code
== LT_EXPR
12706 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12707 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12709 if (TREE_CODE (arg01
) == INTEGER_CST
12710 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12711 fold_overflow_warning (("assuming signed overflow does not "
12712 "occur when assuming that "
12713 "(X + c) < X is always false"),
12714 WARN_STRICT_OVERFLOW_ALL
);
12715 return constant_boolean_node (0, type
);
12718 /* Convert (X - c) <= X to true. */
12719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12721 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12722 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12724 if (TREE_CODE (arg01
) == INTEGER_CST
12725 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12726 fold_overflow_warning (("assuming signed overflow does not "
12727 "occur when assuming that "
12728 "(X - c) <= X is always true"),
12729 WARN_STRICT_OVERFLOW_ALL
);
12730 return constant_boolean_node (1, type
);
12733 /* Convert (X + c) >= X to true. */
12734 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12736 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12737 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12739 if (TREE_CODE (arg01
) == INTEGER_CST
12740 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12741 fold_overflow_warning (("assuming signed overflow does not "
12742 "occur when assuming that "
12743 "(X + c) >= X is always true"),
12744 WARN_STRICT_OVERFLOW_ALL
);
12745 return constant_boolean_node (1, type
);
12748 if (TREE_CODE (arg01
) == INTEGER_CST
)
12750 /* Convert X + c > X and X - c < X to true for integers. */
12751 if (code
== GT_EXPR
12752 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12753 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12755 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12756 fold_overflow_warning (("assuming signed overflow does "
12757 "not occur when assuming that "
12758 "(X + c) > X is always true"),
12759 WARN_STRICT_OVERFLOW_ALL
);
12760 return constant_boolean_node (1, type
);
12763 if (code
== LT_EXPR
12764 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12765 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12767 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12768 fold_overflow_warning (("assuming signed overflow does "
12769 "not occur when assuming that "
12770 "(X - c) < X is always true"),
12771 WARN_STRICT_OVERFLOW_ALL
);
12772 return constant_boolean_node (1, type
);
12775 /* Convert X + c <= X and X - c >= X to false for integers. */
12776 if (code
== LE_EXPR
12777 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12778 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12780 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12781 fold_overflow_warning (("assuming signed overflow does "
12782 "not occur when assuming that "
12783 "(X + c) <= X is always false"),
12784 WARN_STRICT_OVERFLOW_ALL
);
12785 return constant_boolean_node (0, type
);
12788 if (code
== GE_EXPR
12789 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12790 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12792 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12793 fold_overflow_warning (("assuming signed overflow does "
12794 "not occur when assuming that "
12795 "(X - c) >= X is always false"),
12796 WARN_STRICT_OVERFLOW_ALL
);
12797 return constant_boolean_node (0, type
);
12802 /* Comparisons with the highest or lowest possible integer of
12803 the specified precision will have known values. */
12805 tree arg1_type
= TREE_TYPE (arg1
);
12806 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12808 if (TREE_CODE (arg1
) == INTEGER_CST
12809 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12811 wide_int max
= wi::max_value (arg1_type
);
12812 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12813 wide_int min
= wi::min_value (arg1_type
);
12815 if (wi::eq_p (arg1
, max
))
12819 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12822 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12825 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12828 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12830 /* The GE_EXPR and LT_EXPR cases above are not normally
12831 reached because of previous transformations. */
12836 else if (wi::eq_p (arg1
, max
- 1))
12840 arg1
= const_binop (PLUS_EXPR
, arg1
,
12841 build_int_cst (TREE_TYPE (arg1
), 1));
12842 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12843 fold_convert_loc (loc
,
12844 TREE_TYPE (arg1
), arg0
),
12847 arg1
= const_binop (PLUS_EXPR
, arg1
,
12848 build_int_cst (TREE_TYPE (arg1
), 1));
12849 return fold_build2_loc (loc
, NE_EXPR
, type
,
12850 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12856 else if (wi::eq_p (arg1
, min
))
12860 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12863 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12866 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12869 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12874 else if (wi::eq_p (arg1
, min
+ 1))
12878 arg1
= const_binop (MINUS_EXPR
, arg1
,
12879 build_int_cst (TREE_TYPE (arg1
), 1));
12880 return fold_build2_loc (loc
, NE_EXPR
, type
,
12881 fold_convert_loc (loc
,
12882 TREE_TYPE (arg1
), arg0
),
12885 arg1
= const_binop (MINUS_EXPR
, arg1
,
12886 build_int_cst (TREE_TYPE (arg1
), 1));
12887 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12888 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12895 else if (wi::eq_p (arg1
, signed_max
)
12896 && TYPE_UNSIGNED (arg1_type
)
12897 /* We will flip the signedness of the comparison operator
12898 associated with the mode of arg1, so the sign bit is
12899 specified by this mode. Check that arg1 is the signed
12900 max associated with this sign bit. */
12901 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
12902 /* signed_type does not work on pointer types. */
12903 && INTEGRAL_TYPE_P (arg1_type
))
12905 /* The following case also applies to X < signed_max+1
12906 and X >= signed_max+1 because previous transformations. */
12907 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12909 tree st
= signed_type_for (arg1_type
);
12910 return fold_build2_loc (loc
,
12911 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12912 type
, fold_convert_loc (loc
, st
, arg0
),
12913 build_int_cst (st
, 0));
12919 /* If we are comparing an ABS_EXPR with a constant, we can
12920 convert all the cases into explicit comparisons, but they may
12921 well not be faster than doing the ABS and one comparison.
12922 But ABS (X) <= C is a range comparison, which becomes a subtraction
12923 and a comparison, and is probably faster. */
12924 if (code
== LE_EXPR
12925 && TREE_CODE (arg1
) == INTEGER_CST
12926 && TREE_CODE (arg0
) == ABS_EXPR
12927 && ! TREE_SIDE_EFFECTS (arg0
)
12928 && (0 != (tem
= negate_expr (arg1
)))
12929 && TREE_CODE (tem
) == INTEGER_CST
12930 && !TREE_OVERFLOW (tem
))
12931 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12932 build2 (GE_EXPR
, type
,
12933 TREE_OPERAND (arg0
, 0), tem
),
12934 build2 (LE_EXPR
, type
,
12935 TREE_OPERAND (arg0
, 0), arg1
));
12937 /* Convert ABS_EXPR<x> >= 0 to true. */
12938 strict_overflow_p
= false;
12939 if (code
== GE_EXPR
12940 && (integer_zerop (arg1
)
12941 || (! HONOR_NANS (element_mode (arg0
))
12942 && real_zerop (arg1
)))
12943 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12945 if (strict_overflow_p
)
12946 fold_overflow_warning (("assuming signed overflow does not occur "
12947 "when simplifying comparison of "
12948 "absolute value and zero"),
12949 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12950 return omit_one_operand_loc (loc
, type
,
12951 constant_boolean_node (true, type
),
12955 /* Convert ABS_EXPR<x> < 0 to false. */
12956 strict_overflow_p
= false;
12957 if (code
== LT_EXPR
12958 && (integer_zerop (arg1
) || real_zerop (arg1
))
12959 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12961 if (strict_overflow_p
)
12962 fold_overflow_warning (("assuming signed overflow does not occur "
12963 "when simplifying comparison of "
12964 "absolute value and zero"),
12965 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12966 return omit_one_operand_loc (loc
, type
,
12967 constant_boolean_node (false, type
),
12971 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12972 and similarly for >= into !=. */
12973 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12974 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12975 && TREE_CODE (arg1
) == LSHIFT_EXPR
12976 && integer_onep (TREE_OPERAND (arg1
, 0)))
12977 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12978 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12979 TREE_OPERAND (arg1
, 1)),
12980 build_zero_cst (TREE_TYPE (arg0
)));
12982 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12983 otherwise Y might be >= # of bits in X's type and thus e.g.
12984 (unsigned char) (1 << Y) for Y 15 might be 0.
12985 If the cast is widening, then 1 << Y should have unsigned type,
12986 otherwise if Y is number of bits in the signed shift type minus 1,
12987 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12988 31 might be 0xffffffff80000000. */
12989 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12990 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12991 && CONVERT_EXPR_P (arg1
)
12992 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12993 && (element_precision (TREE_TYPE (arg1
))
12994 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12995 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12996 || (element_precision (TREE_TYPE (arg1
))
12997 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12998 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13000 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13001 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13002 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13003 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13004 build_zero_cst (TREE_TYPE (arg0
)));
13009 case UNORDERED_EXPR
:
13017 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13019 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13020 if (t1
!= NULL_TREE
)
13024 /* If the first operand is NaN, the result is constant. */
13025 if (TREE_CODE (arg0
) == REAL_CST
13026 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13027 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13029 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13030 ? integer_zero_node
13031 : integer_one_node
;
13032 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13035 /* If the second operand is NaN, the result is constant. */
13036 if (TREE_CODE (arg1
) == REAL_CST
13037 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13038 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13040 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13041 ? integer_zero_node
13042 : integer_one_node
;
13043 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13046 /* Simplify unordered comparison of something with itself. */
13047 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13048 && operand_equal_p (arg0
, arg1
, 0))
13049 return constant_boolean_node (1, type
);
13051 if (code
== LTGT_EXPR
13052 && !flag_trapping_math
13053 && operand_equal_p (arg0
, arg1
, 0))
13054 return constant_boolean_node (0, type
);
13056 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13058 tree targ0
= strip_float_extensions (arg0
);
13059 tree targ1
= strip_float_extensions (arg1
);
13060 tree newtype
= TREE_TYPE (targ0
);
13062 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13063 newtype
= TREE_TYPE (targ1
);
13065 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13066 return fold_build2_loc (loc
, code
, type
,
13067 fold_convert_loc (loc
, newtype
, targ0
),
13068 fold_convert_loc (loc
, newtype
, targ1
));
13073 case COMPOUND_EXPR
:
13074 /* When pedantic, a compound expression can be neither an lvalue
13075 nor an integer constant expression. */
13076 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13078 /* Don't let (0, 0) be null pointer constant. */
13079 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13080 : fold_convert_loc (loc
, type
, arg1
);
13081 return pedantic_non_lvalue_loc (loc
, tem
);
13084 if ((TREE_CODE (arg0
) == REAL_CST
13085 && TREE_CODE (arg1
) == REAL_CST
)
13086 || (TREE_CODE (arg0
) == INTEGER_CST
13087 && TREE_CODE (arg1
) == INTEGER_CST
))
13088 return build_complex (type
, arg0
, arg1
);
13092 /* An ASSERT_EXPR should never be passed to fold_binary. */
13093 gcc_unreachable ();
13095 case VEC_PACK_TRUNC_EXPR
:
13096 case VEC_PACK_FIX_TRUNC_EXPR
:
13098 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13101 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13102 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13103 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13106 elts
= XALLOCAVEC (tree
, nelts
);
13107 if (!vec_cst_ctor_to_array (arg0
, elts
)
13108 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13111 for (i
= 0; i
< nelts
; i
++)
13113 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13114 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13115 TREE_TYPE (type
), elts
[i
]);
13116 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13120 return build_vector (type
, elts
);
13123 case VEC_WIDEN_MULT_LO_EXPR
:
13124 case VEC_WIDEN_MULT_HI_EXPR
:
13125 case VEC_WIDEN_MULT_EVEN_EXPR
:
13126 case VEC_WIDEN_MULT_ODD_EXPR
:
13128 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13129 unsigned int out
, ofs
, scale
;
13132 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13133 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13134 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13137 elts
= XALLOCAVEC (tree
, nelts
* 4);
13138 if (!vec_cst_ctor_to_array (arg0
, elts
)
13139 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13142 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13143 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13144 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13145 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13146 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13147 scale
= 1, ofs
= 0;
13148 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13149 scale
= 1, ofs
= 1;
13151 for (out
= 0; out
< nelts
; out
++)
13153 unsigned int in1
= (out
<< scale
) + ofs
;
13154 unsigned int in2
= in1
+ nelts
* 2;
13157 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13158 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13160 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13162 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13163 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13167 return build_vector (type
, elts
);
13172 } /* switch (code) */
13175 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13176 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13180 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13182 switch (TREE_CODE (*tp
))
13188 *walk_subtrees
= 0;
13190 /* ... fall through ... */
13197 /* Return whether the sub-tree ST contains a label which is accessible from
13198 outside the sub-tree. */
13201 contains_label_p (tree st
)
13204 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13207 /* Fold a ternary expression of code CODE and type TYPE with operands
13208 OP0, OP1, and OP2. Return the folded expression if folding is
13209 successful. Otherwise, return NULL_TREE. */
13212 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13213 tree op0
, tree op1
, tree op2
)
13216 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13217 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13219 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13220 && TREE_CODE_LENGTH (code
) == 3);
13222 /* If this is a commutative operation, and OP0 is a constant, move it
13223 to OP1 to reduce the number of tests below. */
13224 if (commutative_ternary_tree_code (code
)
13225 && tree_swap_operands_p (op0
, op1
, true))
13226 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13228 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13232 /* Strip any conversions that don't change the mode. This is safe
13233 for every expression, except for a comparison expression because
13234 its signedness is derived from its operands. So, in the latter
13235 case, only strip conversions that don't change the signedness.
13237 Note that this is done as an internal manipulation within the
13238 constant folder, in order to find the simplest representation of
13239 the arguments so that their form can be studied. In any cases,
13240 the appropriate type conversions should be put back in the tree
13241 that will get out of the constant folder. */
13262 case COMPONENT_REF
:
13263 if (TREE_CODE (arg0
) == CONSTRUCTOR
13264 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13266 unsigned HOST_WIDE_INT idx
;
13268 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13275 case VEC_COND_EXPR
:
13276 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13277 so all simple results must be passed through pedantic_non_lvalue. */
13278 if (TREE_CODE (arg0
) == INTEGER_CST
)
13280 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13281 tem
= integer_zerop (arg0
) ? op2
: op1
;
13282 /* Only optimize constant conditions when the selected branch
13283 has the same type as the COND_EXPR. This avoids optimizing
13284 away "c ? x : throw", where the throw has a void type.
13285 Avoid throwing away that operand which contains label. */
13286 if ((!TREE_SIDE_EFFECTS (unused_op
)
13287 || !contains_label_p (unused_op
))
13288 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13289 || VOID_TYPE_P (type
)))
13290 return pedantic_non_lvalue_loc (loc
, tem
);
13293 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13295 if ((TREE_CODE (arg1
) == VECTOR_CST
13296 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13297 && (TREE_CODE (arg2
) == VECTOR_CST
13298 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13300 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13301 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13302 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13303 for (i
= 0; i
< nelts
; i
++)
13305 tree val
= VECTOR_CST_ELT (arg0
, i
);
13306 if (integer_all_onesp (val
))
13308 else if (integer_zerop (val
))
13309 sel
[i
] = nelts
+ i
;
13310 else /* Currently unreachable. */
13313 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13314 if (t
!= NULL_TREE
)
13319 /* If we have A op B ? A : C, we may be able to convert this to a
13320 simpler expression, depending on the operation and the values
13321 of B and C. Signed zeros prevent all of these transformations,
13322 for reasons given above each one.
13324 Also try swapping the arguments and inverting the conditional. */
13325 if (COMPARISON_CLASS_P (arg0
)
13326 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13327 arg1
, TREE_OPERAND (arg0
, 1))
13328 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
13330 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13335 if (COMPARISON_CLASS_P (arg0
)
13336 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13338 TREE_OPERAND (arg0
, 1))
13339 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
13341 location_t loc0
= expr_location_or (arg0
, loc
);
13342 tem
= fold_invert_truthvalue (loc0
, arg0
);
13343 if (tem
&& COMPARISON_CLASS_P (tem
))
13345 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13351 /* If the second operand is simpler than the third, swap them
13352 since that produces better jump optimization results. */
13353 if (truth_value_p (TREE_CODE (arg0
))
13354 && tree_swap_operands_p (op1
, op2
, false))
13356 location_t loc0
= expr_location_or (arg0
, loc
);
13357 /* See if this can be inverted. If it can't, possibly because
13358 it was a floating-point inequality comparison, don't do
13360 tem
= fold_invert_truthvalue (loc0
, arg0
);
13362 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13365 /* Convert A ? 1 : 0 to simply A. */
13366 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13367 : (integer_onep (op1
)
13368 && !VECTOR_TYPE_P (type
)))
13369 && integer_zerop (op2
)
13370 /* If we try to convert OP0 to our type, the
13371 call to fold will try to move the conversion inside
13372 a COND, which will recurse. In that case, the COND_EXPR
13373 is probably the best choice, so leave it alone. */
13374 && type
== TREE_TYPE (arg0
))
13375 return pedantic_non_lvalue_loc (loc
, arg0
);
13377 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13378 over COND_EXPR in cases such as floating point comparisons. */
13379 if (integer_zerop (op1
)
13380 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13381 : (integer_onep (op2
)
13382 && !VECTOR_TYPE_P (type
)))
13383 && truth_value_p (TREE_CODE (arg0
)))
13384 return pedantic_non_lvalue_loc (loc
,
13385 fold_convert_loc (loc
, type
,
13386 invert_truthvalue_loc (loc
,
13389 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13390 if (TREE_CODE (arg0
) == LT_EXPR
13391 && integer_zerop (TREE_OPERAND (arg0
, 1))
13392 && integer_zerop (op2
)
13393 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13395 /* sign_bit_p looks through both zero and sign extensions,
13396 but for this optimization only sign extensions are
13398 tree tem2
= TREE_OPERAND (arg0
, 0);
13399 while (tem
!= tem2
)
13401 if (TREE_CODE (tem2
) != NOP_EXPR
13402 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13407 tem2
= TREE_OPERAND (tem2
, 0);
13409 /* sign_bit_p only checks ARG1 bits within A's precision.
13410 If <sign bit of A> has wider type than A, bits outside
13411 of A's precision in <sign bit of A> need to be checked.
13412 If they are all 0, this optimization needs to be done
13413 in unsigned A's type, if they are all 1 in signed A's type,
13414 otherwise this can't be done. */
13416 && TYPE_PRECISION (TREE_TYPE (tem
))
13417 < TYPE_PRECISION (TREE_TYPE (arg1
))
13418 && TYPE_PRECISION (TREE_TYPE (tem
))
13419 < TYPE_PRECISION (type
))
13421 int inner_width
, outer_width
;
13424 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13425 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13426 if (outer_width
> TYPE_PRECISION (type
))
13427 outer_width
= TYPE_PRECISION (type
);
13429 wide_int mask
= wi::shifted_mask
13430 (inner_width
, outer_width
- inner_width
, false,
13431 TYPE_PRECISION (TREE_TYPE (arg1
)));
13433 wide_int common
= mask
& arg1
;
13434 if (common
== mask
)
13436 tem_type
= signed_type_for (TREE_TYPE (tem
));
13437 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13439 else if (common
== 0)
13441 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13442 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13450 fold_convert_loc (loc
, type
,
13451 fold_build2_loc (loc
, BIT_AND_EXPR
,
13452 TREE_TYPE (tem
), tem
,
13453 fold_convert_loc (loc
,
13458 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13459 already handled above. */
13460 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13461 && integer_onep (TREE_OPERAND (arg0
, 1))
13462 && integer_zerop (op2
)
13463 && integer_pow2p (arg1
))
13465 tree tem
= TREE_OPERAND (arg0
, 0);
13467 if (TREE_CODE (tem
) == RSHIFT_EXPR
13468 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13469 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13470 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13471 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13472 TREE_OPERAND (tem
, 0), arg1
);
13475 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13476 is probably obsolete because the first operand should be a
13477 truth value (that's why we have the two cases above), but let's
13478 leave it in until we can confirm this for all front-ends. */
13479 if (integer_zerop (op2
)
13480 && TREE_CODE (arg0
) == NE_EXPR
13481 && integer_zerop (TREE_OPERAND (arg0
, 1))
13482 && integer_pow2p (arg1
)
13483 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13484 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13485 arg1
, OEP_ONLY_CONST
))
13486 return pedantic_non_lvalue_loc (loc
,
13487 fold_convert_loc (loc
, type
,
13488 TREE_OPERAND (arg0
, 0)));
13490 /* Disable the transformations below for vectors, since
13491 fold_binary_op_with_conditional_arg may undo them immediately,
13492 yielding an infinite loop. */
13493 if (code
== VEC_COND_EXPR
)
13496 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13497 if (integer_zerop (op2
)
13498 && truth_value_p (TREE_CODE (arg0
))
13499 && truth_value_p (TREE_CODE (arg1
))
13500 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13501 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13502 : TRUTH_ANDIF_EXPR
,
13503 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
13505 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13506 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13507 && truth_value_p (TREE_CODE (arg0
))
13508 && truth_value_p (TREE_CODE (arg1
))
13509 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13511 location_t loc0
= expr_location_or (arg0
, loc
);
13512 /* Only perform transformation if ARG0 is easily inverted. */
13513 tem
= fold_invert_truthvalue (loc0
, arg0
);
13515 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13518 type
, fold_convert_loc (loc
, type
, tem
),
13522 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13523 if (integer_zerop (arg1
)
13524 && truth_value_p (TREE_CODE (arg0
))
13525 && truth_value_p (TREE_CODE (op2
))
13526 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13528 location_t loc0
= expr_location_or (arg0
, loc
);
13529 /* Only perform transformation if ARG0 is easily inverted. */
13530 tem
= fold_invert_truthvalue (loc0
, arg0
);
13532 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13533 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13534 type
, fold_convert_loc (loc
, type
, tem
),
13538 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13539 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13540 && truth_value_p (TREE_CODE (arg0
))
13541 && truth_value_p (TREE_CODE (op2
))
13542 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13543 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13544 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13545 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13550 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13551 of fold_ternary on them. */
13552 gcc_unreachable ();
13554 case BIT_FIELD_REF
:
13555 if ((TREE_CODE (arg0
) == VECTOR_CST
13556 || (TREE_CODE (arg0
) == CONSTRUCTOR
13557 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
13558 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13559 || (TREE_CODE (type
) == VECTOR_TYPE
13560 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
13562 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13563 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
13564 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13565 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13568 && (idx
% width
) == 0
13569 && (n
% width
) == 0
13570 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13575 if (TREE_CODE (arg0
) == VECTOR_CST
)
13578 return VECTOR_CST_ELT (arg0
, idx
);
13580 tree
*vals
= XALLOCAVEC (tree
, n
);
13581 for (unsigned i
= 0; i
< n
; ++i
)
13582 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
13583 return build_vector (type
, vals
);
13586 /* Constructor elements can be subvectors. */
13587 unsigned HOST_WIDE_INT k
= 1;
13588 if (CONSTRUCTOR_NELTS (arg0
) != 0)
13590 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
13591 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
13592 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
13595 /* We keep an exact subset of the constructor elements. */
13596 if ((idx
% k
) == 0 && (n
% k
) == 0)
13598 if (CONSTRUCTOR_NELTS (arg0
) == 0)
13599 return build_constructor (type
, NULL
);
13604 if (idx
< CONSTRUCTOR_NELTS (arg0
))
13605 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13606 return build_zero_cst (type
);
13609 vec
<constructor_elt
, va_gc
> *vals
;
13610 vec_alloc (vals
, n
);
13611 for (unsigned i
= 0;
13612 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
13614 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
13616 (arg0
, idx
+ i
)->value
);
13617 return build_constructor (type
, vals
);
13619 /* The bitfield references a single constructor element. */
13620 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
13622 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
13623 return build_zero_cst (type
);
13625 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
13627 return fold_build3_loc (loc
, code
, type
,
13628 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
13629 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
13634 /* A bit-field-ref that referenced the full argument can be stripped. */
13635 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13636 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
13637 && integer_zerop (op2
))
13638 return fold_convert_loc (loc
, type
, arg0
);
13640 /* On constants we can use native encode/interpret to constant
13641 fold (nearly) all BIT_FIELD_REFs. */
13642 if (CONSTANT_CLASS_P (arg0
)
13643 && can_native_interpret_type_p (type
)
13644 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
13645 /* This limitation should not be necessary, we just need to
13646 round this up to mode size. */
13647 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
13648 /* Need bit-shifting of the buffer to relax the following. */
13649 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
13651 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13652 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13653 unsigned HOST_WIDE_INT clen
;
13654 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
13655 /* ??? We cannot tell native_encode_expr to start at
13656 some random byte only. So limit us to a reasonable amount
13660 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
13661 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
13663 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
13665 tree v
= native_interpret_expr (type
,
13666 b
+ bitpos
/ BITS_PER_UNIT
,
13667 bitsize
/ BITS_PER_UNIT
);
13677 /* For integers we can decompose the FMA if possible. */
13678 if (TREE_CODE (arg0
) == INTEGER_CST
13679 && TREE_CODE (arg1
) == INTEGER_CST
)
13680 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13681 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13682 if (integer_zerop (arg2
))
13683 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13685 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13687 case VEC_PERM_EXPR
:
13688 if (TREE_CODE (arg2
) == VECTOR_CST
)
13690 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
13691 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
13692 unsigned char *sel2
= sel
+ nelts
;
13693 bool need_mask_canon
= false;
13694 bool need_mask_canon2
= false;
13695 bool all_in_vec0
= true;
13696 bool all_in_vec1
= true;
13697 bool maybe_identity
= true;
13698 bool single_arg
= (op0
== op1
);
13699 bool changed
= false;
13701 mask2
= 2 * nelts
- 1;
13702 mask
= single_arg
? (nelts
- 1) : mask2
;
13703 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
13704 for (i
= 0; i
< nelts
; i
++)
13706 tree val
= VECTOR_CST_ELT (arg2
, i
);
13707 if (TREE_CODE (val
) != INTEGER_CST
)
13710 /* Make sure that the perm value is in an acceptable
13713 need_mask_canon
|= wi::gtu_p (t
, mask
);
13714 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
13715 sel
[i
] = t
.to_uhwi () & mask
;
13716 sel2
[i
] = t
.to_uhwi () & mask2
;
13718 if (sel
[i
] < nelts
)
13719 all_in_vec1
= false;
13721 all_in_vec0
= false;
13723 if ((sel
[i
] & (nelts
-1)) != i
)
13724 maybe_identity
= false;
13727 if (maybe_identity
)
13737 else if (all_in_vec1
)
13740 for (i
= 0; i
< nelts
; i
++)
13742 need_mask_canon
= true;
13745 if ((TREE_CODE (op0
) == VECTOR_CST
13746 || TREE_CODE (op0
) == CONSTRUCTOR
)
13747 && (TREE_CODE (op1
) == VECTOR_CST
13748 || TREE_CODE (op1
) == CONSTRUCTOR
))
13750 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
13751 if (t
!= NULL_TREE
)
13755 if (op0
== op1
&& !single_arg
)
13758 /* Some targets are deficient and fail to expand a single
13759 argument permutation while still allowing an equivalent
13760 2-argument version. */
13761 if (need_mask_canon
&& arg2
== op2
13762 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
13763 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
13765 need_mask_canon
= need_mask_canon2
;
13769 if (need_mask_canon
&& arg2
== op2
)
13771 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
13772 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
13773 for (i
= 0; i
< nelts
; i
++)
13774 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
13775 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
13780 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
13786 } /* switch (code) */
13789 /* Perform constant folding and related simplification of EXPR.
13790 The related simplifications include x*1 => x, x*0 => 0, etc.,
13791 and application of the associative law.
13792 NOP_EXPR conversions may be removed freely (as long as we
13793 are careful not to change the type of the overall expression).
13794 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13795 but we can constant-fold them if they have constant operands. */
13797 #ifdef ENABLE_FOLD_CHECKING
13798 # define fold(x) fold_1 (x)
13799 static tree
fold_1 (tree
);
13805 const tree t
= expr
;
13806 enum tree_code code
= TREE_CODE (t
);
13807 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13809 location_t loc
= EXPR_LOCATION (expr
);
13811 /* Return right away if a constant. */
13812 if (kind
== tcc_constant
)
13815 /* CALL_EXPR-like objects with variable numbers of operands are
13816 treated specially. */
13817 if (kind
== tcc_vl_exp
)
13819 if (code
== CALL_EXPR
)
13821 tem
= fold_call_expr (loc
, expr
, false);
13822 return tem
? tem
: expr
;
13827 if (IS_EXPR_CODE_CLASS (kind
))
13829 tree type
= TREE_TYPE (t
);
13830 tree op0
, op1
, op2
;
13832 switch (TREE_CODE_LENGTH (code
))
13835 op0
= TREE_OPERAND (t
, 0);
13836 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13837 return tem
? tem
: expr
;
13839 op0
= TREE_OPERAND (t
, 0);
13840 op1
= TREE_OPERAND (t
, 1);
13841 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13842 return tem
? tem
: expr
;
13844 op0
= TREE_OPERAND (t
, 0);
13845 op1
= TREE_OPERAND (t
, 1);
13846 op2
= TREE_OPERAND (t
, 2);
13847 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13848 return tem
? tem
: expr
;
13858 tree op0
= TREE_OPERAND (t
, 0);
13859 tree op1
= TREE_OPERAND (t
, 1);
13861 if (TREE_CODE (op1
) == INTEGER_CST
13862 && TREE_CODE (op0
) == CONSTRUCTOR
13863 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13865 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
13866 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
13867 unsigned HOST_WIDE_INT begin
= 0;
13869 /* Find a matching index by means of a binary search. */
13870 while (begin
!= end
)
13872 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13873 tree index
= (*elts
)[middle
].index
;
13875 if (TREE_CODE (index
) == INTEGER_CST
13876 && tree_int_cst_lt (index
, op1
))
13877 begin
= middle
+ 1;
13878 else if (TREE_CODE (index
) == INTEGER_CST
13879 && tree_int_cst_lt (op1
, index
))
13881 else if (TREE_CODE (index
) == RANGE_EXPR
13882 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13883 begin
= middle
+ 1;
13884 else if (TREE_CODE (index
) == RANGE_EXPR
13885 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13888 return (*elts
)[middle
].value
;
13895 /* Return a VECTOR_CST if possible. */
13898 tree type
= TREE_TYPE (t
);
13899 if (TREE_CODE (type
) != VECTOR_TYPE
)
13902 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13903 unsigned HOST_WIDE_INT idx
, pos
= 0;
13906 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13908 if (!CONSTANT_CLASS_P (value
))
13910 if (TREE_CODE (value
) == VECTOR_CST
)
13912 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
13913 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
13916 vec
[pos
++] = value
;
13918 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
13919 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
13921 return build_vector (type
, vec
);
13925 return fold (DECL_INITIAL (t
));
13929 } /* switch (code) */
13932 #ifdef ENABLE_FOLD_CHECKING
13935 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13936 hash_table
<pointer_hash
<const tree_node
> > *);
13937 static void fold_check_failed (const_tree
, const_tree
);
13938 void print_fold_checksum (const_tree
);
13940 /* When --enable-checking=fold, compute a digest of expr before
13941 and after actual fold call to see if fold did not accidentally
13942 change original expr. */
13948 struct md5_ctx ctx
;
13949 unsigned char checksum_before
[16], checksum_after
[16];
13950 hash_table
<pointer_hash
<const tree_node
> > ht (32);
13952 md5_init_ctx (&ctx
);
13953 fold_checksum_tree (expr
, &ctx
, &ht
);
13954 md5_finish_ctx (&ctx
, checksum_before
);
13957 ret
= fold_1 (expr
);
13959 md5_init_ctx (&ctx
);
13960 fold_checksum_tree (expr
, &ctx
, &ht
);
13961 md5_finish_ctx (&ctx
, checksum_after
);
13963 if (memcmp (checksum_before
, checksum_after
, 16))
13964 fold_check_failed (expr
, ret
);
13970 print_fold_checksum (const_tree expr
)
13972 struct md5_ctx ctx
;
13973 unsigned char checksum
[16], cnt
;
13974 hash_table
<pointer_hash
<const tree_node
> > ht (32);
13976 md5_init_ctx (&ctx
);
13977 fold_checksum_tree (expr
, &ctx
, &ht
);
13978 md5_finish_ctx (&ctx
, checksum
);
13979 for (cnt
= 0; cnt
< 16; ++cnt
)
13980 fprintf (stderr
, "%02x", checksum
[cnt
]);
13981 putc ('\n', stderr
);
13985 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13987 internal_error ("fold check: original tree changed by fold");
13991 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
13992 hash_table
<pointer_hash
<const tree_node
> > *ht
)
13994 const tree_node
**slot
;
13995 enum tree_code code
;
13996 union tree_node buf
;
14002 slot
= ht
->find_slot (expr
, INSERT
);
14006 code
= TREE_CODE (expr
);
14007 if (TREE_CODE_CLASS (code
) == tcc_declaration
14008 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14010 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14011 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14012 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14013 expr
= (tree
) &buf
;
14015 else if (TREE_CODE_CLASS (code
) == tcc_type
14016 && (TYPE_POINTER_TO (expr
)
14017 || TYPE_REFERENCE_TO (expr
)
14018 || TYPE_CACHED_VALUES_P (expr
)
14019 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14020 || TYPE_NEXT_VARIANT (expr
)))
14022 /* Allow these fields to be modified. */
14024 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14025 expr
= tmp
= (tree
) &buf
;
14026 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14027 TYPE_POINTER_TO (tmp
) = NULL
;
14028 TYPE_REFERENCE_TO (tmp
) = NULL
;
14029 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14030 if (TYPE_CACHED_VALUES_P (tmp
))
14032 TYPE_CACHED_VALUES_P (tmp
) = 0;
14033 TYPE_CACHED_VALUES (tmp
) = NULL
;
14036 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14037 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14038 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14039 if (TREE_CODE_CLASS (code
) != tcc_type
14040 && TREE_CODE_CLASS (code
) != tcc_declaration
14041 && code
!= TREE_LIST
14042 && code
!= SSA_NAME
14043 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14044 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14045 switch (TREE_CODE_CLASS (code
))
14051 md5_process_bytes (TREE_STRING_POINTER (expr
),
14052 TREE_STRING_LENGTH (expr
), ctx
);
14055 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14056 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14059 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14060 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14066 case tcc_exceptional
:
14070 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14071 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14072 expr
= TREE_CHAIN (expr
);
14073 goto recursive_label
;
14076 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14077 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14083 case tcc_expression
:
14084 case tcc_reference
:
14085 case tcc_comparison
:
14088 case tcc_statement
:
14090 len
= TREE_OPERAND_LENGTH (expr
);
14091 for (i
= 0; i
< len
; ++i
)
14092 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14094 case tcc_declaration
:
14095 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14096 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14097 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14099 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14100 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14101 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14102 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14103 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14106 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14108 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14110 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14111 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14113 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14117 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14118 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14119 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14120 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14121 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14122 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14123 if (INTEGRAL_TYPE_P (expr
)
14124 || SCALAR_FLOAT_TYPE_P (expr
))
14126 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14127 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14129 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14130 if (TREE_CODE (expr
) == RECORD_TYPE
14131 || TREE_CODE (expr
) == UNION_TYPE
14132 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14133 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14134 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14141 /* Helper function for outputting the checksum of a tree T. When
14142 debugging with gdb, you can "define mynext" to be "next" followed
14143 by "call debug_fold_checksum (op0)", then just trace down till the
14146 DEBUG_FUNCTION
void
14147 debug_fold_checksum (const_tree t
)
14150 unsigned char checksum
[16];
14151 struct md5_ctx ctx
;
14152 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14154 md5_init_ctx (&ctx
);
14155 fold_checksum_tree (t
, &ctx
, &ht
);
14156 md5_finish_ctx (&ctx
, checksum
);
14159 for (i
= 0; i
< 16; i
++)
14160 fprintf (stderr
, "%d ", checksum
[i
]);
14162 fprintf (stderr
, "\n");
14167 /* Fold a unary tree expression with code CODE of type TYPE with an
14168 operand OP0. LOC is the location of the resulting expression.
14169 Return a folded expression if successful. Otherwise, return a tree
14170 expression with code CODE of type TYPE with an operand OP0. */
14173 fold_build1_stat_loc (location_t loc
,
14174 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14177 #ifdef ENABLE_FOLD_CHECKING
14178 unsigned char checksum_before
[16], checksum_after
[16];
14179 struct md5_ctx ctx
;
14180 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14182 md5_init_ctx (&ctx
);
14183 fold_checksum_tree (op0
, &ctx
, &ht
);
14184 md5_finish_ctx (&ctx
, checksum_before
);
14188 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14190 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14192 #ifdef ENABLE_FOLD_CHECKING
14193 md5_init_ctx (&ctx
);
14194 fold_checksum_tree (op0
, &ctx
, &ht
);
14195 md5_finish_ctx (&ctx
, checksum_after
);
14197 if (memcmp (checksum_before
, checksum_after
, 16))
14198 fold_check_failed (op0
, tem
);
14203 /* Fold a binary tree expression with code CODE of type TYPE with
14204 operands OP0 and OP1. LOC is the location of the resulting
14205 expression. Return a folded expression if successful. Otherwise,
14206 return a tree expression with code CODE of type TYPE with operands
14210 fold_build2_stat_loc (location_t loc
,
14211 enum tree_code code
, tree type
, tree op0
, tree op1
14215 #ifdef ENABLE_FOLD_CHECKING
14216 unsigned char checksum_before_op0
[16],
14217 checksum_before_op1
[16],
14218 checksum_after_op0
[16],
14219 checksum_after_op1
[16];
14220 struct md5_ctx ctx
;
14221 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14223 md5_init_ctx (&ctx
);
14224 fold_checksum_tree (op0
, &ctx
, &ht
);
14225 md5_finish_ctx (&ctx
, checksum_before_op0
);
14228 md5_init_ctx (&ctx
);
14229 fold_checksum_tree (op1
, &ctx
, &ht
);
14230 md5_finish_ctx (&ctx
, checksum_before_op1
);
14234 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14236 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14238 #ifdef ENABLE_FOLD_CHECKING
14239 md5_init_ctx (&ctx
);
14240 fold_checksum_tree (op0
, &ctx
, &ht
);
14241 md5_finish_ctx (&ctx
, checksum_after_op0
);
14244 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14245 fold_check_failed (op0
, tem
);
14247 md5_init_ctx (&ctx
);
14248 fold_checksum_tree (op1
, &ctx
, &ht
);
14249 md5_finish_ctx (&ctx
, checksum_after_op1
);
14251 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14252 fold_check_failed (op1
, tem
);
14257 /* Fold a ternary tree expression with code CODE of type TYPE with
14258 operands OP0, OP1, and OP2. Return a folded expression if
14259 successful. Otherwise, return a tree expression with code CODE of
14260 type TYPE with operands OP0, OP1, and OP2. */
14263 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14264 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14267 #ifdef ENABLE_FOLD_CHECKING
14268 unsigned char checksum_before_op0
[16],
14269 checksum_before_op1
[16],
14270 checksum_before_op2
[16],
14271 checksum_after_op0
[16],
14272 checksum_after_op1
[16],
14273 checksum_after_op2
[16];
14274 struct md5_ctx ctx
;
14275 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14277 md5_init_ctx (&ctx
);
14278 fold_checksum_tree (op0
, &ctx
, &ht
);
14279 md5_finish_ctx (&ctx
, checksum_before_op0
);
14282 md5_init_ctx (&ctx
);
14283 fold_checksum_tree (op1
, &ctx
, &ht
);
14284 md5_finish_ctx (&ctx
, checksum_before_op1
);
14287 md5_init_ctx (&ctx
);
14288 fold_checksum_tree (op2
, &ctx
, &ht
);
14289 md5_finish_ctx (&ctx
, checksum_before_op2
);
14293 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14294 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14296 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14298 #ifdef ENABLE_FOLD_CHECKING
14299 md5_init_ctx (&ctx
);
14300 fold_checksum_tree (op0
, &ctx
, &ht
);
14301 md5_finish_ctx (&ctx
, checksum_after_op0
);
14304 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14305 fold_check_failed (op0
, tem
);
14307 md5_init_ctx (&ctx
);
14308 fold_checksum_tree (op1
, &ctx
, &ht
);
14309 md5_finish_ctx (&ctx
, checksum_after_op1
);
14312 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14313 fold_check_failed (op1
, tem
);
14315 md5_init_ctx (&ctx
);
14316 fold_checksum_tree (op2
, &ctx
, &ht
);
14317 md5_finish_ctx (&ctx
, checksum_after_op2
);
14319 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14320 fold_check_failed (op2
, tem
);
14325 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14326 arguments in ARGARRAY, and a null static chain.
14327 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14328 of type TYPE from the given operands as constructed by build_call_array. */
14331 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14332 int nargs
, tree
*argarray
)
14335 #ifdef ENABLE_FOLD_CHECKING
14336 unsigned char checksum_before_fn
[16],
14337 checksum_before_arglist
[16],
14338 checksum_after_fn
[16],
14339 checksum_after_arglist
[16];
14340 struct md5_ctx ctx
;
14341 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14344 md5_init_ctx (&ctx
);
14345 fold_checksum_tree (fn
, &ctx
, &ht
);
14346 md5_finish_ctx (&ctx
, checksum_before_fn
);
14349 md5_init_ctx (&ctx
);
14350 for (i
= 0; i
< nargs
; i
++)
14351 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14352 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14356 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14358 #ifdef ENABLE_FOLD_CHECKING
14359 md5_init_ctx (&ctx
);
14360 fold_checksum_tree (fn
, &ctx
, &ht
);
14361 md5_finish_ctx (&ctx
, checksum_after_fn
);
14364 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14365 fold_check_failed (fn
, tem
);
14367 md5_init_ctx (&ctx
);
14368 for (i
= 0; i
< nargs
; i
++)
14369 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14370 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14372 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14373 fold_check_failed (NULL_TREE
, tem
);
14378 /* Perform constant folding and related simplification of initializer
14379 expression EXPR. These behave identically to "fold_buildN" but ignore
14380 potential run-time traps and exceptions that fold must preserve. */
14382 #define START_FOLD_INIT \
14383 int saved_signaling_nans = flag_signaling_nans;\
14384 int saved_trapping_math = flag_trapping_math;\
14385 int saved_rounding_math = flag_rounding_math;\
14386 int saved_trapv = flag_trapv;\
14387 int saved_folding_initializer = folding_initializer;\
14388 flag_signaling_nans = 0;\
14389 flag_trapping_math = 0;\
14390 flag_rounding_math = 0;\
14392 folding_initializer = 1;
14394 #define END_FOLD_INIT \
14395 flag_signaling_nans = saved_signaling_nans;\
14396 flag_trapping_math = saved_trapping_math;\
14397 flag_rounding_math = saved_rounding_math;\
14398 flag_trapv = saved_trapv;\
14399 folding_initializer = saved_folding_initializer;
14402 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14403 tree type
, tree op
)
14408 result
= fold_build1_loc (loc
, code
, type
, op
);
14415 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14416 tree type
, tree op0
, tree op1
)
14421 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14428 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14429 int nargs
, tree
*argarray
)
14434 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14440 #undef START_FOLD_INIT
14441 #undef END_FOLD_INIT
14443 /* Determine if first argument is a multiple of second argument. Return 0 if
14444 it is not, or we cannot easily determined it to be.
14446 An example of the sort of thing we care about (at this point; this routine
14447 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14448 fold cases do now) is discovering that
14450 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14456 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14458 This code also handles discovering that
14460 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14462 is a multiple of 8 so we don't have to worry about dealing with a
14463 possible remainder.
14465 Note that we *look* inside a SAVE_EXPR only to determine how it was
14466 calculated; it is not safe for fold to do much of anything else with the
14467 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14468 at run time. For example, the latter example above *cannot* be implemented
14469 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14470 evaluation time of the original SAVE_EXPR is not necessarily the same at
14471 the time the new expression is evaluated. The only optimization of this
14472 sort that would be valid is changing
14474 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14478 SAVE_EXPR (I) * SAVE_EXPR (J)
14480 (where the same SAVE_EXPR (J) is used in the original and the
14481 transformed version). */
14484 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14486 if (operand_equal_p (top
, bottom
, 0))
14489 if (TREE_CODE (type
) != INTEGER_TYPE
)
14492 switch (TREE_CODE (top
))
14495 /* Bitwise and provides a power of two multiple. If the mask is
14496 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14497 if (!integer_pow2p (bottom
))
14502 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14503 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14507 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14508 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14511 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14515 op1
= TREE_OPERAND (top
, 1);
14516 /* const_binop may not detect overflow correctly,
14517 so check for it explicitly here. */
14518 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
14519 && 0 != (t1
= fold_convert (type
,
14520 const_binop (LSHIFT_EXPR
,
14523 && !TREE_OVERFLOW (t1
))
14524 return multiple_of_p (type
, t1
, bottom
);
14529 /* Can't handle conversions from non-integral or wider integral type. */
14530 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14531 || (TYPE_PRECISION (type
)
14532 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14535 /* .. fall through ... */
14538 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14541 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14542 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14545 if (TREE_CODE (bottom
) != INTEGER_CST
14546 || integer_zerop (bottom
)
14547 || (TYPE_UNSIGNED (type
)
14548 && (tree_int_cst_sgn (top
) < 0
14549 || tree_int_cst_sgn (bottom
) < 0)))
14551 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14559 /* Return true if CODE or TYPE is known to be non-negative. */
14562 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14564 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14565 && truth_value_p (code
))
14566 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14567 have a signed:1 type (where the value is -1 and 0). */
14572 /* Return true if (CODE OP0) is known to be non-negative. If the return
14573 value is based on the assumption that signed overflow is undefined,
14574 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14575 *STRICT_OVERFLOW_P. */
14578 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14579 bool *strict_overflow_p
)
14581 if (TYPE_UNSIGNED (type
))
14587 /* We can't return 1 if flag_wrapv is set because
14588 ABS_EXPR<INT_MIN> = INT_MIN. */
14589 if (!INTEGRAL_TYPE_P (type
))
14591 if (TYPE_OVERFLOW_UNDEFINED (type
))
14593 *strict_overflow_p
= true;
14598 case NON_LVALUE_EXPR
:
14600 case FIX_TRUNC_EXPR
:
14601 return tree_expr_nonnegative_warnv_p (op0
,
14602 strict_overflow_p
);
14606 tree inner_type
= TREE_TYPE (op0
);
14607 tree outer_type
= type
;
14609 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14611 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14612 return tree_expr_nonnegative_warnv_p (op0
,
14613 strict_overflow_p
);
14614 if (INTEGRAL_TYPE_P (inner_type
))
14616 if (TYPE_UNSIGNED (inner_type
))
14618 return tree_expr_nonnegative_warnv_p (op0
,
14619 strict_overflow_p
);
14622 else if (INTEGRAL_TYPE_P (outer_type
))
14624 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14625 return tree_expr_nonnegative_warnv_p (op0
,
14626 strict_overflow_p
);
14627 if (INTEGRAL_TYPE_P (inner_type
))
14628 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14629 && TYPE_UNSIGNED (inner_type
);
14635 return tree_simple_nonnegative_warnv_p (code
, type
);
14638 /* We don't know sign of `t', so be conservative and return false. */
14642 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14643 value is based on the assumption that signed overflow is undefined,
14644 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14645 *STRICT_OVERFLOW_P. */
14648 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14649 tree op1
, bool *strict_overflow_p
)
14651 if (TYPE_UNSIGNED (type
))
14656 case POINTER_PLUS_EXPR
:
14658 if (FLOAT_TYPE_P (type
))
14659 return (tree_expr_nonnegative_warnv_p (op0
,
14661 && tree_expr_nonnegative_warnv_p (op1
,
14662 strict_overflow_p
));
14664 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14665 both unsigned and at least 2 bits shorter than the result. */
14666 if (TREE_CODE (type
) == INTEGER_TYPE
14667 && TREE_CODE (op0
) == NOP_EXPR
14668 && TREE_CODE (op1
) == NOP_EXPR
)
14670 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14671 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14672 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14673 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14675 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14676 TYPE_PRECISION (inner2
)) + 1;
14677 return prec
< TYPE_PRECISION (type
);
14683 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14685 /* x * x is always non-negative for floating point x
14686 or without overflow. */
14687 if (operand_equal_p (op0
, op1
, 0)
14688 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
14689 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
14691 if (TYPE_OVERFLOW_UNDEFINED (type
))
14692 *strict_overflow_p
= true;
14697 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14698 both unsigned and their total bits is shorter than the result. */
14699 if (TREE_CODE (type
) == INTEGER_TYPE
14700 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14701 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14703 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14704 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14706 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14707 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14710 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14711 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14713 if (TREE_CODE (op0
) == INTEGER_CST
)
14714 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14716 if (TREE_CODE (op1
) == INTEGER_CST
)
14717 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14719 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14720 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14722 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14723 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14724 : TYPE_PRECISION (inner0
);
14726 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14727 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14728 : TYPE_PRECISION (inner1
);
14730 return precision0
+ precision1
< TYPE_PRECISION (type
);
14737 return (tree_expr_nonnegative_warnv_p (op0
,
14739 || tree_expr_nonnegative_warnv_p (op1
,
14740 strict_overflow_p
));
14746 case TRUNC_DIV_EXPR
:
14747 case CEIL_DIV_EXPR
:
14748 case FLOOR_DIV_EXPR
:
14749 case ROUND_DIV_EXPR
:
14750 return (tree_expr_nonnegative_warnv_p (op0
,
14752 && tree_expr_nonnegative_warnv_p (op1
,
14753 strict_overflow_p
));
14755 case TRUNC_MOD_EXPR
:
14756 case CEIL_MOD_EXPR
:
14757 case FLOOR_MOD_EXPR
:
14758 case ROUND_MOD_EXPR
:
14759 return tree_expr_nonnegative_warnv_p (op0
,
14760 strict_overflow_p
);
14762 return tree_simple_nonnegative_warnv_p (code
, type
);
14765 /* We don't know sign of `t', so be conservative and return false. */
14769 /* Return true if T is known to be non-negative. If the return
14770 value is based on the assumption that signed overflow is undefined,
14771 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14772 *STRICT_OVERFLOW_P. */
14775 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14777 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14780 switch (TREE_CODE (t
))
14783 return tree_int_cst_sgn (t
) >= 0;
14786 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14789 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14792 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14794 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14795 strict_overflow_p
));
14797 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14800 /* We don't know sign of `t', so be conservative and return false. */
14804 /* Return true if T is known to be non-negative. If the return
14805 value is based on the assumption that signed overflow is undefined,
14806 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14807 *STRICT_OVERFLOW_P. */
14810 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14811 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14813 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14814 switch (DECL_FUNCTION_CODE (fndecl
))
14816 CASE_FLT_FN (BUILT_IN_ACOS
):
14817 CASE_FLT_FN (BUILT_IN_ACOSH
):
14818 CASE_FLT_FN (BUILT_IN_CABS
):
14819 CASE_FLT_FN (BUILT_IN_COSH
):
14820 CASE_FLT_FN (BUILT_IN_ERFC
):
14821 CASE_FLT_FN (BUILT_IN_EXP
):
14822 CASE_FLT_FN (BUILT_IN_EXP10
):
14823 CASE_FLT_FN (BUILT_IN_EXP2
):
14824 CASE_FLT_FN (BUILT_IN_FABS
):
14825 CASE_FLT_FN (BUILT_IN_FDIM
):
14826 CASE_FLT_FN (BUILT_IN_HYPOT
):
14827 CASE_FLT_FN (BUILT_IN_POW10
):
14828 CASE_INT_FN (BUILT_IN_FFS
):
14829 CASE_INT_FN (BUILT_IN_PARITY
):
14830 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14831 CASE_INT_FN (BUILT_IN_CLZ
):
14832 CASE_INT_FN (BUILT_IN_CLRSB
):
14833 case BUILT_IN_BSWAP32
:
14834 case BUILT_IN_BSWAP64
:
14838 CASE_FLT_FN (BUILT_IN_SQRT
):
14839 /* sqrt(-0.0) is -0.0. */
14840 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
14842 return tree_expr_nonnegative_warnv_p (arg0
,
14843 strict_overflow_p
);
14845 CASE_FLT_FN (BUILT_IN_ASINH
):
14846 CASE_FLT_FN (BUILT_IN_ATAN
):
14847 CASE_FLT_FN (BUILT_IN_ATANH
):
14848 CASE_FLT_FN (BUILT_IN_CBRT
):
14849 CASE_FLT_FN (BUILT_IN_CEIL
):
14850 CASE_FLT_FN (BUILT_IN_ERF
):
14851 CASE_FLT_FN (BUILT_IN_EXPM1
):
14852 CASE_FLT_FN (BUILT_IN_FLOOR
):
14853 CASE_FLT_FN (BUILT_IN_FMOD
):
14854 CASE_FLT_FN (BUILT_IN_FREXP
):
14855 CASE_FLT_FN (BUILT_IN_ICEIL
):
14856 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14857 CASE_FLT_FN (BUILT_IN_IRINT
):
14858 CASE_FLT_FN (BUILT_IN_IROUND
):
14859 CASE_FLT_FN (BUILT_IN_LCEIL
):
14860 CASE_FLT_FN (BUILT_IN_LDEXP
):
14861 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14862 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14863 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14864 CASE_FLT_FN (BUILT_IN_LLRINT
):
14865 CASE_FLT_FN (BUILT_IN_LLROUND
):
14866 CASE_FLT_FN (BUILT_IN_LRINT
):
14867 CASE_FLT_FN (BUILT_IN_LROUND
):
14868 CASE_FLT_FN (BUILT_IN_MODF
):
14869 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14870 CASE_FLT_FN (BUILT_IN_RINT
):
14871 CASE_FLT_FN (BUILT_IN_ROUND
):
14872 CASE_FLT_FN (BUILT_IN_SCALB
):
14873 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14874 CASE_FLT_FN (BUILT_IN_SCALBN
):
14875 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14876 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14877 CASE_FLT_FN (BUILT_IN_SINH
):
14878 CASE_FLT_FN (BUILT_IN_TANH
):
14879 CASE_FLT_FN (BUILT_IN_TRUNC
):
14880 /* True if the 1st argument is nonnegative. */
14881 return tree_expr_nonnegative_warnv_p (arg0
,
14882 strict_overflow_p
);
14884 CASE_FLT_FN (BUILT_IN_FMAX
):
14885 /* True if the 1st OR 2nd arguments are nonnegative. */
14886 return (tree_expr_nonnegative_warnv_p (arg0
,
14888 || (tree_expr_nonnegative_warnv_p (arg1
,
14889 strict_overflow_p
)));
14891 CASE_FLT_FN (BUILT_IN_FMIN
):
14892 /* True if the 1st AND 2nd arguments are nonnegative. */
14893 return (tree_expr_nonnegative_warnv_p (arg0
,
14895 && (tree_expr_nonnegative_warnv_p (arg1
,
14896 strict_overflow_p
)));
14898 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14899 /* True if the 2nd argument is nonnegative. */
14900 return tree_expr_nonnegative_warnv_p (arg1
,
14901 strict_overflow_p
);
14903 CASE_FLT_FN (BUILT_IN_POWI
):
14904 /* True if the 1st argument is nonnegative or the second
14905 argument is an even integer. */
14906 if (TREE_CODE (arg1
) == INTEGER_CST
14907 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14909 return tree_expr_nonnegative_warnv_p (arg0
,
14910 strict_overflow_p
);
14912 CASE_FLT_FN (BUILT_IN_POW
):
14913 /* True if the 1st argument is nonnegative or the second
14914 argument is an even integer valued real. */
14915 if (TREE_CODE (arg1
) == REAL_CST
)
14920 c
= TREE_REAL_CST (arg1
);
14921 n
= real_to_integer (&c
);
14924 REAL_VALUE_TYPE cint
;
14925 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14926 if (real_identical (&c
, &cint
))
14930 return tree_expr_nonnegative_warnv_p (arg0
,
14931 strict_overflow_p
);
14936 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14940 /* Return true if T is known to be non-negative. If the return
14941 value is based on the assumption that signed overflow is undefined,
14942 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14943 *STRICT_OVERFLOW_P. */
14946 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14948 enum tree_code code
= TREE_CODE (t
);
14949 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14956 tree temp
= TARGET_EXPR_SLOT (t
);
14957 t
= TARGET_EXPR_INITIAL (t
);
14959 /* If the initializer is non-void, then it's a normal expression
14960 that will be assigned to the slot. */
14961 if (!VOID_TYPE_P (t
))
14962 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14964 /* Otherwise, the initializer sets the slot in some way. One common
14965 way is an assignment statement at the end of the initializer. */
14968 if (TREE_CODE (t
) == BIND_EXPR
)
14969 t
= expr_last (BIND_EXPR_BODY (t
));
14970 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14971 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14972 t
= expr_last (TREE_OPERAND (t
, 0));
14973 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14978 if (TREE_CODE (t
) == MODIFY_EXPR
14979 && TREE_OPERAND (t
, 0) == temp
)
14980 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14981 strict_overflow_p
);
14988 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14989 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14991 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14992 get_callee_fndecl (t
),
14995 strict_overflow_p
);
14997 case COMPOUND_EXPR
:
14999 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15000 strict_overflow_p
);
15002 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15003 strict_overflow_p
);
15005 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15006 strict_overflow_p
);
15009 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15013 /* We don't know sign of `t', so be conservative and return false. */
15017 /* Return true if T is known to be non-negative. If the return
15018 value is based on the assumption that signed overflow is undefined,
15019 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15020 *STRICT_OVERFLOW_P. */
15023 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15025 enum tree_code code
;
15026 if (t
== error_mark_node
)
15029 code
= TREE_CODE (t
);
15030 switch (TREE_CODE_CLASS (code
))
15033 case tcc_comparison
:
15034 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15036 TREE_OPERAND (t
, 0),
15037 TREE_OPERAND (t
, 1),
15038 strict_overflow_p
);
15041 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15043 TREE_OPERAND (t
, 0),
15044 strict_overflow_p
);
15047 case tcc_declaration
:
15048 case tcc_reference
:
15049 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15057 case TRUTH_AND_EXPR
:
15058 case TRUTH_OR_EXPR
:
15059 case TRUTH_XOR_EXPR
:
15060 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15062 TREE_OPERAND (t
, 0),
15063 TREE_OPERAND (t
, 1),
15064 strict_overflow_p
);
15065 case TRUTH_NOT_EXPR
:
15066 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15068 TREE_OPERAND (t
, 0),
15069 strict_overflow_p
);
15076 case WITH_SIZE_EXPR
:
15078 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15081 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15085 /* Return true if `t' is known to be non-negative. Handle warnings
15086 about undefined signed overflow. */
15089 tree_expr_nonnegative_p (tree t
)
15091 bool ret
, strict_overflow_p
;
15093 strict_overflow_p
= false;
15094 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15095 if (strict_overflow_p
)
15096 fold_overflow_warning (("assuming signed overflow does not occur when "
15097 "determining that expression is always "
15099 WARN_STRICT_OVERFLOW_MISC
);
15104 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15105 For floating point we further ensure that T is not denormal.
15106 Similar logic is present in nonzero_address in rtlanal.h.
15108 If the return value is based on the assumption that signed overflow
15109 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15110 change *STRICT_OVERFLOW_P. */
15113 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15114 bool *strict_overflow_p
)
15119 return tree_expr_nonzero_warnv_p (op0
,
15120 strict_overflow_p
);
15124 tree inner_type
= TREE_TYPE (op0
);
15125 tree outer_type
= type
;
15127 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15128 && tree_expr_nonzero_warnv_p (op0
,
15129 strict_overflow_p
));
15133 case NON_LVALUE_EXPR
:
15134 return tree_expr_nonzero_warnv_p (op0
,
15135 strict_overflow_p
);
15144 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15145 For floating point we further ensure that T is not denormal.
15146 Similar logic is present in nonzero_address in rtlanal.h.
15148 If the return value is based on the assumption that signed overflow
15149 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15150 change *STRICT_OVERFLOW_P. */
15153 tree_binary_nonzero_warnv_p (enum tree_code code
,
15156 tree op1
, bool *strict_overflow_p
)
15158 bool sub_strict_overflow_p
;
15161 case POINTER_PLUS_EXPR
:
15163 if (TYPE_OVERFLOW_UNDEFINED (type
))
15165 /* With the presence of negative values it is hard
15166 to say something. */
15167 sub_strict_overflow_p
= false;
15168 if (!tree_expr_nonnegative_warnv_p (op0
,
15169 &sub_strict_overflow_p
)
15170 || !tree_expr_nonnegative_warnv_p (op1
,
15171 &sub_strict_overflow_p
))
15173 /* One of operands must be positive and the other non-negative. */
15174 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15175 overflows, on a twos-complement machine the sum of two
15176 nonnegative numbers can never be zero. */
15177 return (tree_expr_nonzero_warnv_p (op0
,
15179 || tree_expr_nonzero_warnv_p (op1
,
15180 strict_overflow_p
));
15185 if (TYPE_OVERFLOW_UNDEFINED (type
))
15187 if (tree_expr_nonzero_warnv_p (op0
,
15189 && tree_expr_nonzero_warnv_p (op1
,
15190 strict_overflow_p
))
15192 *strict_overflow_p
= true;
15199 sub_strict_overflow_p
= false;
15200 if (tree_expr_nonzero_warnv_p (op0
,
15201 &sub_strict_overflow_p
)
15202 && tree_expr_nonzero_warnv_p (op1
,
15203 &sub_strict_overflow_p
))
15205 if (sub_strict_overflow_p
)
15206 *strict_overflow_p
= true;
15211 sub_strict_overflow_p
= false;
15212 if (tree_expr_nonzero_warnv_p (op0
,
15213 &sub_strict_overflow_p
))
15215 if (sub_strict_overflow_p
)
15216 *strict_overflow_p
= true;
15218 /* When both operands are nonzero, then MAX must be too. */
15219 if (tree_expr_nonzero_warnv_p (op1
,
15220 strict_overflow_p
))
15223 /* MAX where operand 0 is positive is positive. */
15224 return tree_expr_nonnegative_warnv_p (op0
,
15225 strict_overflow_p
);
15227 /* MAX where operand 1 is positive is positive. */
15228 else if (tree_expr_nonzero_warnv_p (op1
,
15229 &sub_strict_overflow_p
)
15230 && tree_expr_nonnegative_warnv_p (op1
,
15231 &sub_strict_overflow_p
))
15233 if (sub_strict_overflow_p
)
15234 *strict_overflow_p
= true;
15240 return (tree_expr_nonzero_warnv_p (op1
,
15242 || tree_expr_nonzero_warnv_p (op0
,
15243 strict_overflow_p
));
15252 /* Return true when T is an address and is known to be nonzero.
15253 For floating point we further ensure that T is not denormal.
15254 Similar logic is present in nonzero_address in rtlanal.h.
15256 If the return value is based on the assumption that signed overflow
15257 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15258 change *STRICT_OVERFLOW_P. */
15261 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15263 bool sub_strict_overflow_p
;
15264 switch (TREE_CODE (t
))
15267 return !integer_zerop (t
);
15271 tree base
= TREE_OPERAND (t
, 0);
15273 if (!DECL_P (base
))
15274 base
= get_base_address (base
);
15279 /* For objects in symbol table check if we know they are non-zero.
15280 Don't do anything for variables and functions before symtab is built;
15281 it is quite possible that they will be declared weak later. */
15282 if (DECL_P (base
) && decl_in_symtab_p (base
))
15284 struct symtab_node
*symbol
;
15286 symbol
= symtab_node::get_create (base
);
15288 return symbol
->nonzero_address ();
15293 /* Function local objects are never NULL. */
15295 && (DECL_CONTEXT (base
)
15296 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15297 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15300 /* Constants are never weak. */
15301 if (CONSTANT_CLASS_P (base
))
15308 sub_strict_overflow_p
= false;
15309 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15310 &sub_strict_overflow_p
)
15311 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15312 &sub_strict_overflow_p
))
15314 if (sub_strict_overflow_p
)
15315 *strict_overflow_p
= true;
15326 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15327 attempt to fold the expression to a constant without modifying TYPE,
15330 If the expression could be simplified to a constant, then return
15331 the constant. If the expression would not be simplified to a
15332 constant, then return NULL_TREE. */
15335 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15337 tree tem
= fold_binary (code
, type
, op0
, op1
);
15338 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15341 /* Given the components of a unary expression CODE, TYPE and OP0,
15342 attempt to fold the expression to a constant without modifying
15345 If the expression could be simplified to a constant, then return
15346 the constant. If the expression would not be simplified to a
15347 constant, then return NULL_TREE. */
15350 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15352 tree tem
= fold_unary (code
, type
, op0
);
15353 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15356 /* If EXP represents referencing an element in a constant string
15357 (either via pointer arithmetic or array indexing), return the
15358 tree representing the value accessed, otherwise return NULL. */
15361 fold_read_from_constant_string (tree exp
)
15363 if ((TREE_CODE (exp
) == INDIRECT_REF
15364 || TREE_CODE (exp
) == ARRAY_REF
)
15365 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15367 tree exp1
= TREE_OPERAND (exp
, 0);
15370 location_t loc
= EXPR_LOCATION (exp
);
15372 if (TREE_CODE (exp
) == INDIRECT_REF
)
15373 string
= string_constant (exp1
, &index
);
15376 tree low_bound
= array_ref_low_bound (exp
);
15377 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15379 /* Optimize the special-case of a zero lower bound.
15381 We convert the low_bound to sizetype to avoid some problems
15382 with constant folding. (E.g. suppose the lower bound is 1,
15383 and its mode is QI. Without the conversion,l (ARRAY
15384 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15385 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15386 if (! integer_zerop (low_bound
))
15387 index
= size_diffop_loc (loc
, index
,
15388 fold_convert_loc (loc
, sizetype
, low_bound
));
15394 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15395 && TREE_CODE (string
) == STRING_CST
15396 && TREE_CODE (index
) == INTEGER_CST
15397 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15398 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15400 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15401 return build_int_cst_type (TREE_TYPE (exp
),
15402 (TREE_STRING_POINTER (string
)
15403 [TREE_INT_CST_LOW (index
)]));
15408 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15409 an integer constant, real, or fixed-point constant.
15411 TYPE is the type of the result. */
15414 fold_negate_const (tree arg0
, tree type
)
15416 tree t
= NULL_TREE
;
15418 switch (TREE_CODE (arg0
))
15423 wide_int val
= wi::neg (arg0
, &overflow
);
15424 t
= force_fit_type (type
, val
, 1,
15425 (overflow
| TREE_OVERFLOW (arg0
))
15426 && !TYPE_UNSIGNED (type
));
15431 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15436 FIXED_VALUE_TYPE f
;
15437 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15438 &(TREE_FIXED_CST (arg0
)), NULL
,
15439 TYPE_SATURATING (type
));
15440 t
= build_fixed (type
, f
);
15441 /* Propagate overflow flags. */
15442 if (overflow_p
| TREE_OVERFLOW (arg0
))
15443 TREE_OVERFLOW (t
) = 1;
15448 gcc_unreachable ();
15454 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15455 an integer constant or real constant.
15457 TYPE is the type of the result. */
15460 fold_abs_const (tree arg0
, tree type
)
15462 tree t
= NULL_TREE
;
15464 switch (TREE_CODE (arg0
))
15468 /* If the value is unsigned or non-negative, then the absolute value
15469 is the same as the ordinary value. */
15470 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
15473 /* If the value is negative, then the absolute value is
15478 wide_int val
= wi::neg (arg0
, &overflow
);
15479 t
= force_fit_type (type
, val
, -1,
15480 overflow
| TREE_OVERFLOW (arg0
));
15486 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15487 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15493 gcc_unreachable ();
15499 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15500 constant. TYPE is the type of the result. */
15503 fold_not_const (const_tree arg0
, tree type
)
15505 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15507 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
15510 /* Given CODE, a relational operator, the target type, TYPE and two
15511 constant operands OP0 and OP1, return the result of the
15512 relational operation. If the result is not a compile time
15513 constant, then return NULL_TREE. */
15516 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15518 int result
, invert
;
15520 /* From here on, the only cases we handle are when the result is
15521 known to be a constant. */
15523 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15525 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15526 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15528 /* Handle the cases where either operand is a NaN. */
15529 if (real_isnan (c0
) || real_isnan (c1
))
15539 case UNORDERED_EXPR
:
15553 if (flag_trapping_math
)
15559 gcc_unreachable ();
15562 return constant_boolean_node (result
, type
);
15565 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15568 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15570 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15571 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15572 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15575 /* Handle equality/inequality of complex constants. */
15576 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15578 tree rcond
= fold_relational_const (code
, type
,
15579 TREE_REALPART (op0
),
15580 TREE_REALPART (op1
));
15581 tree icond
= fold_relational_const (code
, type
,
15582 TREE_IMAGPART (op0
),
15583 TREE_IMAGPART (op1
));
15584 if (code
== EQ_EXPR
)
15585 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15586 else if (code
== NE_EXPR
)
15587 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15592 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15594 unsigned count
= VECTOR_CST_NELTS (op0
);
15595 tree
*elts
= XALLOCAVEC (tree
, count
);
15596 gcc_assert (VECTOR_CST_NELTS (op1
) == count
15597 && TYPE_VECTOR_SUBPARTS (type
) == count
);
15599 for (unsigned i
= 0; i
< count
; i
++)
15601 tree elem_type
= TREE_TYPE (type
);
15602 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15603 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15605 tree tem
= fold_relational_const (code
, elem_type
,
15608 if (tem
== NULL_TREE
)
15611 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
15614 return build_vector (type
, elts
);
15617 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15619 To compute GT, swap the arguments and do LT.
15620 To compute GE, do LT and invert the result.
15621 To compute LE, swap the arguments, do LT and invert the result.
15622 To compute NE, do EQ and invert the result.
15624 Therefore, the code below must handle only EQ and LT. */
15626 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15631 code
= swap_tree_comparison (code
);
15634 /* Note that it is safe to invert for real values here because we
15635 have already handled the one case that it matters. */
15638 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15641 code
= invert_tree_comparison (code
, false);
15644 /* Compute a result for LT or EQ if args permit;
15645 Otherwise return T. */
15646 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15648 if (code
== EQ_EXPR
)
15649 result
= tree_int_cst_equal (op0
, op1
);
15651 result
= tree_int_cst_lt (op0
, op1
);
15658 return constant_boolean_node (result
, type
);
15661 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15662 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15666 fold_build_cleanup_point_expr (tree type
, tree expr
)
15668 /* If the expression does not have side effects then we don't have to wrap
15669 it with a cleanup point expression. */
15670 if (!TREE_SIDE_EFFECTS (expr
))
15673 /* If the expression is a return, check to see if the expression inside the
15674 return has no side effects or the right hand side of the modify expression
15675 inside the return. If either don't have side effects set we don't need to
15676 wrap the expression in a cleanup point expression. Note we don't check the
15677 left hand side of the modify because it should always be a return decl. */
15678 if (TREE_CODE (expr
) == RETURN_EXPR
)
15680 tree op
= TREE_OPERAND (expr
, 0);
15681 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15683 op
= TREE_OPERAND (op
, 1);
15684 if (!TREE_SIDE_EFFECTS (op
))
15688 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15691 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15692 of an indirection through OP0, or NULL_TREE if no simplification is
15696 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15702 subtype
= TREE_TYPE (sub
);
15703 if (!POINTER_TYPE_P (subtype
))
15706 if (TREE_CODE (sub
) == ADDR_EXPR
)
15708 tree op
= TREE_OPERAND (sub
, 0);
15709 tree optype
= TREE_TYPE (op
);
15710 /* *&CONST_DECL -> to the value of the const decl. */
15711 if (TREE_CODE (op
) == CONST_DECL
)
15712 return DECL_INITIAL (op
);
15713 /* *&p => p; make sure to handle *&"str"[cst] here. */
15714 if (type
== optype
)
15716 tree fop
= fold_read_from_constant_string (op
);
15722 /* *(foo *)&fooarray => fooarray[0] */
15723 else if (TREE_CODE (optype
) == ARRAY_TYPE
15724 && type
== TREE_TYPE (optype
)
15725 && (!in_gimple_form
15726 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15728 tree type_domain
= TYPE_DOMAIN (optype
);
15729 tree min_val
= size_zero_node
;
15730 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15731 min_val
= TYPE_MIN_VALUE (type_domain
);
15733 && TREE_CODE (min_val
) != INTEGER_CST
)
15735 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15736 NULL_TREE
, NULL_TREE
);
15738 /* *(foo *)&complexfoo => __real__ complexfoo */
15739 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15740 && type
== TREE_TYPE (optype
))
15741 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15742 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15743 else if (TREE_CODE (optype
) == VECTOR_TYPE
15744 && type
== TREE_TYPE (optype
))
15746 tree part_width
= TYPE_SIZE (type
);
15747 tree index
= bitsize_int (0);
15748 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15752 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15753 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15755 tree op00
= TREE_OPERAND (sub
, 0);
15756 tree op01
= TREE_OPERAND (sub
, 1);
15759 if (TREE_CODE (op00
) == ADDR_EXPR
)
15762 op00
= TREE_OPERAND (op00
, 0);
15763 op00type
= TREE_TYPE (op00
);
15765 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15766 if (TREE_CODE (op00type
) == VECTOR_TYPE
15767 && type
== TREE_TYPE (op00type
))
15769 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
15770 tree part_width
= TYPE_SIZE (type
);
15771 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
15772 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15773 tree index
= bitsize_int (indexi
);
15775 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
15776 return fold_build3_loc (loc
,
15777 BIT_FIELD_REF
, type
, op00
,
15778 part_width
, index
);
15781 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15782 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15783 && type
== TREE_TYPE (op00type
))
15785 tree size
= TYPE_SIZE_UNIT (type
);
15786 if (tree_int_cst_equal (size
, op01
))
15787 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15789 /* ((foo *)&fooarray)[1] => fooarray[1] */
15790 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15791 && type
== TREE_TYPE (op00type
))
15793 tree type_domain
= TYPE_DOMAIN (op00type
);
15794 tree min_val
= size_zero_node
;
15795 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15796 min_val
= TYPE_MIN_VALUE (type_domain
);
15797 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15798 TYPE_SIZE_UNIT (type
));
15799 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15800 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15801 NULL_TREE
, NULL_TREE
);
15806 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15807 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15808 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15809 && (!in_gimple_form
15810 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15813 tree min_val
= size_zero_node
;
15814 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15815 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15816 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15817 min_val
= TYPE_MIN_VALUE (type_domain
);
15819 && TREE_CODE (min_val
) != INTEGER_CST
)
15821 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15828 /* Builds an expression for an indirection through T, simplifying some
15832 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15834 tree type
= TREE_TYPE (TREE_TYPE (t
));
15835 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15840 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15843 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15846 fold_indirect_ref_loc (location_t loc
, tree t
)
15848 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15856 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15857 whose result is ignored. The type of the returned tree need not be
15858 the same as the original expression. */
15861 fold_ignored_result (tree t
)
15863 if (!TREE_SIDE_EFFECTS (t
))
15864 return integer_zero_node
;
15867 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15870 t
= TREE_OPERAND (t
, 0);
15874 case tcc_comparison
:
15875 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15876 t
= TREE_OPERAND (t
, 0);
15877 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15878 t
= TREE_OPERAND (t
, 1);
15883 case tcc_expression
:
15884 switch (TREE_CODE (t
))
15886 case COMPOUND_EXPR
:
15887 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15889 t
= TREE_OPERAND (t
, 0);
15893 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15894 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15896 t
= TREE_OPERAND (t
, 0);
15909 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15912 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15914 tree div
= NULL_TREE
;
15919 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15920 have to do anything. Only do this when we are not given a const,
15921 because in that case, this check is more expensive than just
15923 if (TREE_CODE (value
) != INTEGER_CST
)
15925 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15927 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15931 /* If divisor is a power of two, simplify this to bit manipulation. */
15932 if (divisor
== (divisor
& -divisor
))
15934 if (TREE_CODE (value
) == INTEGER_CST
)
15936 wide_int val
= value
;
15939 if ((val
& (divisor
- 1)) == 0)
15942 overflow_p
= TREE_OVERFLOW (value
);
15943 val
&= ~(divisor
- 1);
15948 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
15954 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15955 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15956 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15957 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15963 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15964 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15965 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15971 /* Likewise, but round down. */
15974 round_down_loc (location_t loc
, tree value
, int divisor
)
15976 tree div
= NULL_TREE
;
15978 gcc_assert (divisor
> 0);
15982 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15983 have to do anything. Only do this when we are not given a const,
15984 because in that case, this check is more expensive than just
15986 if (TREE_CODE (value
) != INTEGER_CST
)
15988 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15990 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15994 /* If divisor is a power of two, simplify this to bit manipulation. */
15995 if (divisor
== (divisor
& -divisor
))
15999 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16000 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16005 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16006 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16007 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16013 /* Returns the pointer to the base of the object addressed by EXP and
16014 extracts the information about the offset of the access, storing it
16015 to PBITPOS and POFFSET. */
16018 split_address_to_core_and_offset (tree exp
,
16019 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16023 int unsignedp
, volatilep
;
16024 HOST_WIDE_INT bitsize
;
16025 location_t loc
= EXPR_LOCATION (exp
);
16027 if (TREE_CODE (exp
) == ADDR_EXPR
)
16029 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16030 poffset
, &mode
, &unsignedp
, &volatilep
,
16032 core
= build_fold_addr_expr_loc (loc
, core
);
16038 *poffset
= NULL_TREE
;
16044 /* Returns true if addresses of E1 and E2 differ by a constant, false
16045 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16048 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16051 HOST_WIDE_INT bitpos1
, bitpos2
;
16052 tree toffset1
, toffset2
, tdiff
, type
;
16054 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16055 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16057 if (bitpos1
% BITS_PER_UNIT
!= 0
16058 || bitpos2
% BITS_PER_UNIT
!= 0
16059 || !operand_equal_p (core1
, core2
, 0))
16062 if (toffset1
&& toffset2
)
16064 type
= TREE_TYPE (toffset1
);
16065 if (type
!= TREE_TYPE (toffset2
))
16066 toffset2
= fold_convert (type
, toffset2
);
16068 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16069 if (!cst_and_fits_in_hwi (tdiff
))
16072 *diff
= int_cst_value (tdiff
);
16074 else if (toffset1
|| toffset2
)
16076 /* If only one of the offsets is non-constant, the difference cannot
16083 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16087 /* Simplify the floating point expression EXP when the sign of the
16088 result is not significant. Return NULL_TREE if no simplification
16092 fold_strip_sign_ops (tree exp
)
16095 location_t loc
= EXPR_LOCATION (exp
);
16097 switch (TREE_CODE (exp
))
16101 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16102 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16106 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
16108 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16109 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16110 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16111 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16112 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16113 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16116 case COMPOUND_EXPR
:
16117 arg0
= TREE_OPERAND (exp
, 0);
16118 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16120 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16124 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16125 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16127 return fold_build3_loc (loc
,
16128 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16129 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16130 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16135 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16138 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16139 /* Strip copysign function call, return the 1st argument. */
16140 arg0
= CALL_EXPR_ARG (exp
, 0);
16141 arg1
= CALL_EXPR_ARG (exp
, 1);
16142 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16145 /* Strip sign ops from the argument of "odd" math functions. */
16146 if (negate_mathfn_p (fcode
))
16148 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16150 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);