1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
65 #include "gimple-expr.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
74 /* Nonzero if we are folding constants inside an initializer; zero
76 int folding_initializer
= 0;
78 /* The following constants represent a bit based encoding of GCC's
79 comparison operators. This encoding simplifies transformations
80 on relational comparison operators, such as AND and OR. */
81 enum comparison_code
{
100 static bool negate_mathfn_p (enum built_in_function
);
101 static bool negate_expr_p (tree
);
102 static tree
negate_expr (tree
);
103 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
104 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
105 static tree
const_binop (enum tree_code
, tree
, tree
);
106 static enum comparison_code
comparison_to_compcode (enum tree_code
);
107 static enum tree_code
compcode_to_comparison (enum comparison_code
);
108 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
109 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
110 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
111 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
112 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
113 static tree
make_bit_field_ref (location_t
, tree
, tree
,
114 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
115 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
117 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
119 enum machine_mode
*, int *, int *,
121 static tree
sign_bit_p (tree
, const_tree
);
122 static int simple_operand_p (const_tree
);
123 static bool simple_operand_p_2 (tree
);
124 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
125 static tree
range_predecessor (tree
);
126 static tree
range_successor (tree
);
127 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
132 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
134 static tree
fold_binary_op_with_conditional_arg (location_t
,
135 enum tree_code
, tree
,
138 static tree
fold_mathfn_compare (location_t
,
139 enum built_in_function
, enum tree_code
,
141 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
143 static bool reorder_operands_p (const_tree
, const_tree
);
144 static tree
fold_negate_const (tree
, tree
);
145 static tree
fold_not_const (const_tree
, tree
);
146 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
147 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
149 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
150 Otherwise, return LOC. */
153 expr_location_or (tree t
, location_t loc
)
155 location_t tloc
= EXPR_LOCATION (t
);
156 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
159 /* Similar to protected_set_expr_location, but never modify x in place,
160 if location can and needs to be set, unshare it. */
163 protected_set_expr_location_unshare (tree x
, location_t loc
)
165 if (CAN_HAVE_LOCATION_P (x
)
166 && EXPR_LOCATION (x
) != loc
167 && !(TREE_CODE (x
) == SAVE_EXPR
168 || TREE_CODE (x
) == TARGET_EXPR
169 || TREE_CODE (x
) == BIND_EXPR
))
172 SET_EXPR_LOCATION (x
, loc
);
177 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
178 division and returns the quotient. Otherwise returns
182 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
186 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
188 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
202 static int fold_deferring_overflow_warnings
;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning
;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings
;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
235 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
240 gcc_assert (fold_deferring_overflow_warnings
> 0);
241 --fold_deferring_overflow_warnings
;
242 if (fold_deferring_overflow_warnings
> 0)
244 if (fold_deferred_overflow_warning
!= NULL
246 && code
< (int) fold_deferred_overflow_code
)
247 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
251 warnmsg
= fold_deferred_overflow_warning
;
252 fold_deferred_overflow_warning
= NULL
;
254 if (!issue
|| warnmsg
== NULL
)
257 if (gimple_no_warning_p (stmt
))
260 /* Use the smallest code level when deciding to issue the
262 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
263 code
= fold_deferred_overflow_code
;
265 if (!issue_strict_overflow_warning (code
))
269 locus
= input_location
;
271 locus
= gimple_location (stmt
);
272 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
275 /* Stop deferring overflow warnings, ignoring any deferred
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL
, 0);
284 /* Whether we are deferring overflow warnings. */
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings
> 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
296 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
298 if (fold_deferring_overflow_warnings
> 0)
300 if (fold_deferred_overflow_warning
== NULL
301 || wc
< fold_deferred_overflow_code
)
303 fold_deferred_overflow_warning
= gmsgid
;
304 fold_deferred_overflow_code
= wc
;
307 else if (issue_strict_overflow_warning (wc
))
308 warning (OPT_Wstrict_overflow
, gmsgid
);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
315 negate_mathfn_p (enum built_in_function code
)
319 CASE_FLT_FN (BUILT_IN_ASIN
):
320 CASE_FLT_FN (BUILT_IN_ASINH
):
321 CASE_FLT_FN (BUILT_IN_ATAN
):
322 CASE_FLT_FN (BUILT_IN_ATANH
):
323 CASE_FLT_FN (BUILT_IN_CASIN
):
324 CASE_FLT_FN (BUILT_IN_CASINH
):
325 CASE_FLT_FN (BUILT_IN_CATAN
):
326 CASE_FLT_FN (BUILT_IN_CATANH
):
327 CASE_FLT_FN (BUILT_IN_CBRT
):
328 CASE_FLT_FN (BUILT_IN_CPROJ
):
329 CASE_FLT_FN (BUILT_IN_CSIN
):
330 CASE_FLT_FN (BUILT_IN_CSINH
):
331 CASE_FLT_FN (BUILT_IN_CTAN
):
332 CASE_FLT_FN (BUILT_IN_CTANH
):
333 CASE_FLT_FN (BUILT_IN_ERF
):
334 CASE_FLT_FN (BUILT_IN_LLROUND
):
335 CASE_FLT_FN (BUILT_IN_LROUND
):
336 CASE_FLT_FN (BUILT_IN_ROUND
):
337 CASE_FLT_FN (BUILT_IN_SIN
):
338 CASE_FLT_FN (BUILT_IN_SINH
):
339 CASE_FLT_FN (BUILT_IN_TAN
):
340 CASE_FLT_FN (BUILT_IN_TANH
):
341 CASE_FLT_FN (BUILT_IN_TRUNC
):
344 CASE_FLT_FN (BUILT_IN_LLRINT
):
345 CASE_FLT_FN (BUILT_IN_LRINT
):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
347 CASE_FLT_FN (BUILT_IN_RINT
):
348 return !flag_rounding_math
;
356 /* Check whether we may negate an integer constant T without causing
360 may_negate_without_overflow_p (const_tree t
)
364 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
366 type
= TREE_TYPE (t
);
367 if (TYPE_UNSIGNED (type
))
370 return !wi::only_sign_bit_p (t
);
373 /* Determine whether an expression T can be cheaply negated using
374 the function negate_expr without introducing undefined overflow. */
377 negate_expr_p (tree t
)
384 type
= TREE_TYPE (t
);
387 switch (TREE_CODE (t
))
390 if (TYPE_OVERFLOW_WRAPS (type
))
393 /* Check that -CST will not overflow type. */
394 return may_negate_without_overflow_p (t
);
396 return (INTEGRAL_TYPE_P (type
)
397 && TYPE_OVERFLOW_WRAPS (type
));
404 /* We want to canonicalize to positive real constants. Pretend
405 that only negative ones can be easily negated. */
406 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
409 return negate_expr_p (TREE_REALPART (t
))
410 && negate_expr_p (TREE_IMAGPART (t
));
414 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
417 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
419 for (i
= 0; i
< count
; i
++)
420 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
427 return negate_expr_p (TREE_OPERAND (t
, 0))
428 && negate_expr_p (TREE_OPERAND (t
, 1));
431 return negate_expr_p (TREE_OPERAND (t
, 0));
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t
, 1))
439 && reorder_operands_p (TREE_OPERAND (t
, 0),
440 TREE_OPERAND (t
, 1)))
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t
, 0));
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
449 && reorder_operands_p (TREE_OPERAND (t
, 0),
450 TREE_OPERAND (t
, 1));
453 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
460 return negate_expr_p (TREE_OPERAND (t
, 1))
461 || negate_expr_p (TREE_OPERAND (t
, 0));
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
474 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
476 /* If overflow is undefined then we have to be careful because
477 we ask whether it's ok to associate the negate with the
478 division which is not ok for example for
479 -((a - b) / c) where (-(a - b)) / c may invoke undefined
480 overflow because of negating INT_MIN. So do not use
481 negate_expr_p here but open-code the two important cases. */
482 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
483 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
484 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
487 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
489 return negate_expr_p (TREE_OPERAND (t
, 1));
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type
) == REAL_TYPE
)
495 tree tem
= strip_float_extensions (t
);
497 return negate_expr_p (tem
);
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t
)))
504 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
509 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
511 tree op1
= TREE_OPERAND (t
, 1);
512 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
529 fold_negate_expr (location_t loc
, tree t
)
531 tree type
= TREE_TYPE (t
);
534 switch (TREE_CODE (t
))
536 /* Convert - (~A) to A + 1. */
538 if (INTEGRAL_TYPE_P (type
))
539 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
540 build_one_cst (type
));
544 tem
= fold_negate_const (t
, type
);
545 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
546 || !TYPE_OVERFLOW_TRAPS (type
))
551 tem
= fold_negate_const (t
, type
);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
558 tem
= fold_negate_const (t
, type
);
563 tree rpart
= negate_expr (TREE_REALPART (t
));
564 tree ipart
= negate_expr (TREE_IMAGPART (t
));
566 if ((TREE_CODE (rpart
) == REAL_CST
567 && TREE_CODE (ipart
) == REAL_CST
)
568 || (TREE_CODE (rpart
) == INTEGER_CST
569 && TREE_CODE (ipart
) == INTEGER_CST
))
570 return build_complex (type
, rpart
, ipart
);
576 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
577 tree
*elts
= XALLOCAVEC (tree
, count
);
579 for (i
= 0; i
< count
; i
++)
581 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
582 if (elts
[i
] == NULL_TREE
)
586 return build_vector (type
, elts
);
590 if (negate_expr_p (t
))
591 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
592 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
593 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
597 if (negate_expr_p (t
))
598 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
599 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
603 return TREE_OPERAND (t
, 0);
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
607 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t
, 1))
611 && reorder_operands_p (TREE_OPERAND (t
, 0),
612 TREE_OPERAND (t
, 1)))
614 tem
= negate_expr (TREE_OPERAND (t
, 1));
615 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
616 tem
, TREE_OPERAND (t
, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t
, 0)))
622 tem
= negate_expr (TREE_OPERAND (t
, 0));
623 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
624 tem
, TREE_OPERAND (t
, 1));
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
633 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
634 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
635 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
639 if (TYPE_UNSIGNED (type
))
645 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
647 tem
= TREE_OPERAND (t
, 1);
648 if (negate_expr_p (tem
))
649 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
650 TREE_OPERAND (t
, 0), negate_expr (tem
));
651 tem
= TREE_OPERAND (t
, 0);
652 if (negate_expr_p (tem
))
653 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
654 negate_expr (tem
), TREE_OPERAND (t
, 1));
661 /* In general we can't negate A / B, because if A is INT_MIN and
662 B is 1, we may turn this into INT_MIN / -1 which is undefined
663 and actually traps on some architectures. But if overflow is
664 undefined, we can negate, because - (INT_MIN / 1) is an
666 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
668 const char * const warnmsg
= G_("assuming signed overflow does not "
669 "occur when negating a division");
670 tem
= TREE_OPERAND (t
, 1);
671 if (negate_expr_p (tem
))
673 if (INTEGRAL_TYPE_P (type
)
674 && (TREE_CODE (tem
) != INTEGER_CST
675 || integer_onep (tem
)))
676 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
677 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
678 TREE_OPERAND (t
, 0), negate_expr (tem
));
680 /* If overflow is undefined then we have to be careful because
681 we ask whether it's ok to associate the negate with the
682 division which is not ok for example for
683 -((a - b) / c) where (-(a - b)) / c may invoke undefined
684 overflow because of negating INT_MIN. So do not use
685 negate_expr_p here but open-code the two important cases. */
686 tem
= TREE_OPERAND (t
, 0);
687 if ((INTEGRAL_TYPE_P (type
)
688 && (TREE_CODE (tem
) == NEGATE_EXPR
689 || (TREE_CODE (tem
) == INTEGER_CST
690 && may_negate_without_overflow_p (tem
))))
691 || !INTEGRAL_TYPE_P (type
))
692 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
693 negate_expr (tem
), TREE_OPERAND (t
, 1));
698 /* Convert -((double)float) into (double)(-float). */
699 if (TREE_CODE (type
) == REAL_TYPE
)
701 tem
= strip_float_extensions (t
);
702 if (tem
!= t
&& negate_expr_p (tem
))
703 return fold_convert_loc (loc
, type
, negate_expr (tem
));
708 /* Negate -f(x) as f(-x). */
709 if (negate_mathfn_p (builtin_mathfn_code (t
))
710 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
714 fndecl
= get_callee_fndecl (t
);
715 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
716 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
721 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
722 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
724 tree op1
= TREE_OPERAND (t
, 1);
725 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
727 tree ntype
= TYPE_UNSIGNED (type
)
728 ? signed_type_for (type
)
729 : unsigned_type_for (type
);
730 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
731 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
732 return fold_convert_loc (loc
, type
, temp
);
744 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
745 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
757 loc
= EXPR_LOCATION (t
);
758 type
= TREE_TYPE (t
);
761 tem
= fold_negate_expr (loc
, t
);
763 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
764 return fold_convert_loc (loc
, type
, tem
);
767 /* Split a tree IN into a constant, literal and variable parts that could be
768 combined with CODE to make IN. "constant" means an expression with
769 TREE_CONSTANT but that isn't an actual constant. CODE must be a
770 commutative arithmetic operation. Store the constant part into *CONP,
771 the literal in *LITP and return the variable part. If a part isn't
772 present, set it to null. If the tree does not decompose in this way,
773 return the entire tree as the variable part and the other parts as null.
775 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
776 case, we negate an operand that was subtracted. Except if it is a
777 literal for which we use *MINUS_LITP instead.
779 If NEGATE_P is true, we are negating all of IN, again except a literal
780 for which we use *MINUS_LITP instead.
782 If IN is itself a literal or constant, return it as appropriate.
784 Note that we do not guarantee that any of the three values will be the
785 same type as IN, but they will have the same signedness and mode. */
788 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
789 tree
*minus_litp
, int negate_p
)
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in
);
800 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
801 || TREE_CODE (in
) == FIXED_CST
)
803 else if (TREE_CODE (in
) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
811 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
813 tree op0
= TREE_OPERAND (in
, 0);
814 tree op1
= TREE_OPERAND (in
, 1);
815 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
816 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
818 /* First see if either of the operands is a literal, then a constant. */
819 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
820 || TREE_CODE (op0
) == FIXED_CST
)
821 *litp
= op0
, op0
= 0;
822 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
823 || TREE_CODE (op1
) == FIXED_CST
)
824 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
826 if (op0
!= 0 && TREE_CONSTANT (op0
))
827 *conp
= op0
, op0
= 0;
828 else if (op1
!= 0 && TREE_CONSTANT (op1
))
829 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
831 /* If we haven't dealt with either operand, this is not a case we can
832 decompose. Otherwise, VAR is either of the ones remaining, if any. */
833 if (op0
!= 0 && op1
!= 0)
838 var
= op1
, neg_var_p
= neg1_p
;
840 /* Now do any needed negations. */
842 *minus_litp
= *litp
, *litp
= 0;
844 *conp
= negate_expr (*conp
);
846 var
= negate_expr (var
);
848 else if (TREE_CODE (in
) == BIT_NOT_EXPR
849 && code
== PLUS_EXPR
)
851 /* -X - 1 is folded to ~X, undo that here. */
852 *minus_litp
= build_one_cst (TREE_TYPE (in
));
853 var
= negate_expr (TREE_OPERAND (in
, 0));
855 else if (TREE_CONSTANT (in
))
863 *minus_litp
= *litp
, *litp
= 0;
864 else if (*minus_litp
)
865 *litp
= *minus_litp
, *minus_litp
= 0;
866 *conp
= negate_expr (*conp
);
867 var
= negate_expr (var
);
873 /* Re-associate trees split by the above function. T1 and T2 are
874 either expressions to associate or null. Return the new
875 expression, if any. LOC is the location of the new expression. If
876 we build an operation, do it in TYPE and with CODE. */
879 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
886 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
887 try to fold this since we will have infinite recursion. But do
888 deal with any NEGATE_EXPRs. */
889 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
890 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
892 if (code
== PLUS_EXPR
)
894 if (TREE_CODE (t1
) == NEGATE_EXPR
)
895 return build2_loc (loc
, MINUS_EXPR
, type
,
896 fold_convert_loc (loc
, type
, t2
),
897 fold_convert_loc (loc
, type
,
898 TREE_OPERAND (t1
, 0)));
899 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
900 return build2_loc (loc
, MINUS_EXPR
, type
,
901 fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
,
903 TREE_OPERAND (t2
, 0)));
904 else if (integer_zerop (t2
))
905 return fold_convert_loc (loc
, type
, t1
);
907 else if (code
== MINUS_EXPR
)
909 if (integer_zerop (t2
))
910 return fold_convert_loc (loc
, type
, t1
);
913 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
914 fold_convert_loc (loc
, type
, t2
));
917 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
918 fold_convert_loc (loc
, type
, t2
));
921 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
922 for use in int_const_binop, size_binop and size_diffop. */
925 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
927 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
929 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
944 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
945 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
946 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
950 /* Combine two integer constants ARG1 and ARG2 under operation CODE
951 to produce a new constant. Return NULL_TREE if we don't know how
952 to evaluate CODE at compile-time. */
955 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
960 tree type
= TREE_TYPE (arg1
);
961 signop sign
= TYPE_SIGN (type
);
962 bool overflow
= false;
964 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
965 TYPE_SIGN (TREE_TYPE (parg2
)));
970 res
= wi::bit_or (arg1
, arg2
);
974 res
= wi::bit_xor (arg1
, arg2
);
978 res
= wi::bit_and (arg1
, arg2
);
983 if (wi::neg_p (arg2
))
986 if (code
== RSHIFT_EXPR
)
992 if (code
== RSHIFT_EXPR
)
993 /* It's unclear from the C standard whether shifts can overflow.
994 The following code ignores overflow; perhaps a C standard
995 interpretation ruling is needed. */
996 res
= wi::rshift (arg1
, arg2
, sign
);
998 res
= wi::lshift (arg1
, arg2
);
1003 if (wi::neg_p (arg2
))
1006 if (code
== RROTATE_EXPR
)
1007 code
= LROTATE_EXPR
;
1009 code
= RROTATE_EXPR
;
1012 if (code
== RROTATE_EXPR
)
1013 res
= wi::rrotate (arg1
, arg2
);
1015 res
= wi::lrotate (arg1
, arg2
);
1019 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1023 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1027 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1030 case MULT_HIGHPART_EXPR
:
1031 res
= wi::mul_high (arg1
, arg2
, sign
);
1034 case TRUNC_DIV_EXPR
:
1035 case EXACT_DIV_EXPR
:
1038 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1041 case FLOOR_DIV_EXPR
:
1044 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1050 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1053 case ROUND_DIV_EXPR
:
1056 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1059 case TRUNC_MOD_EXPR
:
1062 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1065 case FLOOR_MOD_EXPR
:
1068 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1074 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1077 case ROUND_MOD_EXPR
:
1080 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1084 res
= wi::min (arg1
, arg2
, sign
);
1088 res
= wi::max (arg1
, arg2
, sign
);
1095 t
= force_fit_type (type
, res
, overflowable
,
1096 (((sign
== SIGNED
|| overflowable
== -1)
1098 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1104 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1106 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1109 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1110 constant. We assume ARG1 and ARG2 have the same data type, or at least
1111 are the same kind of constant and the same machine mode. Return zero if
1112 combining the constants is not allowed in the current operating mode. */
1115 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1117 /* Sanity check for the recursive cases. */
1124 if (TREE_CODE (arg1
) == INTEGER_CST
)
1125 return int_const_binop (code
, arg1
, arg2
);
1127 if (TREE_CODE (arg1
) == REAL_CST
)
1129 enum machine_mode mode
;
1132 REAL_VALUE_TYPE value
;
1133 REAL_VALUE_TYPE result
;
1137 /* The following codes are handled by real_arithmetic. */
1152 d1
= TREE_REAL_CST (arg1
);
1153 d2
= TREE_REAL_CST (arg2
);
1155 type
= TREE_TYPE (arg1
);
1156 mode
= TYPE_MODE (type
);
1158 /* Don't perform operation if we honor signaling NaNs and
1159 either operand is a NaN. */
1160 if (HONOR_SNANS (mode
)
1161 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1164 /* Don't perform operation if it would raise a division
1165 by zero exception. */
1166 if (code
== RDIV_EXPR
1167 && REAL_VALUES_EQUAL (d2
, dconst0
)
1168 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1171 /* If either operand is a NaN, just return it. Otherwise, set up
1172 for floating-point trap; we return an overflow. */
1173 if (REAL_VALUE_ISNAN (d1
))
1175 else if (REAL_VALUE_ISNAN (d2
))
1178 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1179 real_convert (&result
, mode
, &value
);
1181 /* Don't constant fold this floating point operation if
1182 the result has overflowed and flag_trapping_math. */
1183 if (flag_trapping_math
1184 && MODE_HAS_INFINITIES (mode
)
1185 && REAL_VALUE_ISINF (result
)
1186 && !REAL_VALUE_ISINF (d1
)
1187 && !REAL_VALUE_ISINF (d2
))
1190 /* Don't constant fold this floating point operation if the
1191 result may dependent upon the run-time rounding mode and
1192 flag_rounding_math is set, or if GCC's software emulation
1193 is unable to accurately represent the result. */
1194 if ((flag_rounding_math
1195 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1196 && (inexact
|| !real_identical (&result
, &value
)))
1199 t
= build_real (type
, result
);
1201 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1205 if (TREE_CODE (arg1
) == FIXED_CST
)
1207 FIXED_VALUE_TYPE f1
;
1208 FIXED_VALUE_TYPE f2
;
1209 FIXED_VALUE_TYPE result
;
1214 /* The following codes are handled by fixed_arithmetic. */
1220 case TRUNC_DIV_EXPR
:
1221 f2
= TREE_FIXED_CST (arg2
);
1228 f2
.data
.high
= w2
.elt (1);
1229 f2
.data
.low
= w2
.elt (0);
1238 f1
= TREE_FIXED_CST (arg1
);
1239 type
= TREE_TYPE (arg1
);
1240 sat_p
= TYPE_SATURATING (type
);
1241 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1242 t
= build_fixed (type
, result
);
1243 /* Propagate overflow flags. */
1244 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1245 TREE_OVERFLOW (t
) = 1;
1249 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1251 tree type
= TREE_TYPE (arg1
);
1252 tree r1
= TREE_REALPART (arg1
);
1253 tree i1
= TREE_IMAGPART (arg1
);
1254 tree r2
= TREE_REALPART (arg2
);
1255 tree i2
= TREE_IMAGPART (arg2
);
1262 real
= const_binop (code
, r1
, r2
);
1263 imag
= const_binop (code
, i1
, i2
);
1267 if (COMPLEX_FLOAT_TYPE_P (type
))
1268 return do_mpc_arg2 (arg1
, arg2
, type
,
1269 /* do_nonfinite= */ folding_initializer
,
1272 real
= const_binop (MINUS_EXPR
,
1273 const_binop (MULT_EXPR
, r1
, r2
),
1274 const_binop (MULT_EXPR
, i1
, i2
));
1275 imag
= const_binop (PLUS_EXPR
,
1276 const_binop (MULT_EXPR
, r1
, i2
),
1277 const_binop (MULT_EXPR
, i1
, r2
));
1281 if (COMPLEX_FLOAT_TYPE_P (type
))
1282 return do_mpc_arg2 (arg1
, arg2
, type
,
1283 /* do_nonfinite= */ folding_initializer
,
1286 case TRUNC_DIV_EXPR
:
1288 case FLOOR_DIV_EXPR
:
1289 case ROUND_DIV_EXPR
:
1290 if (flag_complex_method
== 0)
1292 /* Keep this algorithm in sync with
1293 tree-complex.c:expand_complex_div_straight().
1295 Expand complex division to scalars, straightforward algorithm.
1296 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1300 = const_binop (PLUS_EXPR
,
1301 const_binop (MULT_EXPR
, r2
, r2
),
1302 const_binop (MULT_EXPR
, i2
, i2
));
1304 = const_binop (PLUS_EXPR
,
1305 const_binop (MULT_EXPR
, r1
, r2
),
1306 const_binop (MULT_EXPR
, i1
, i2
));
1308 = const_binop (MINUS_EXPR
,
1309 const_binop (MULT_EXPR
, i1
, r2
),
1310 const_binop (MULT_EXPR
, r1
, i2
));
1312 real
= const_binop (code
, t1
, magsquared
);
1313 imag
= const_binop (code
, t2
, magsquared
);
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_wide().
1320 Expand complex division to scalars, modified algorithm to minimize
1321 overflow with wide input ranges. */
1322 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1323 fold_abs_const (r2
, TREE_TYPE (type
)),
1324 fold_abs_const (i2
, TREE_TYPE (type
)));
1326 if (integer_nonzerop (compare
))
1328 /* In the TRUE branch, we compute
1330 div = (br * ratio) + bi;
1331 tr = (ar * ratio) + ai;
1332 ti = (ai * ratio) - ar;
1335 tree ratio
= const_binop (code
, r2
, i2
);
1336 tree div
= const_binop (PLUS_EXPR
, i2
,
1337 const_binop (MULT_EXPR
, r2
, ratio
));
1338 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1339 real
= const_binop (PLUS_EXPR
, real
, i1
);
1340 real
= const_binop (code
, real
, div
);
1342 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1343 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1344 imag
= const_binop (code
, imag
, div
);
1348 /* In the FALSE branch, we compute
1350 divisor = (d * ratio) + c;
1351 tr = (b * ratio) + a;
1352 ti = b - (a * ratio);
1355 tree ratio
= const_binop (code
, i2
, r2
);
1356 tree div
= const_binop (PLUS_EXPR
, r2
,
1357 const_binop (MULT_EXPR
, i2
, ratio
));
1359 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1360 real
= const_binop (PLUS_EXPR
, real
, r1
);
1361 real
= const_binop (code
, real
, div
);
1363 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1364 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1365 imag
= const_binop (code
, imag
, div
);
1375 return build_complex (type
, real
, imag
);
1378 if (TREE_CODE (arg1
) == VECTOR_CST
1379 && TREE_CODE (arg2
) == VECTOR_CST
)
1381 tree type
= TREE_TYPE (arg1
);
1382 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1383 tree
*elts
= XALLOCAVEC (tree
, count
);
1385 for (i
= 0; i
< count
; i
++)
1387 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1388 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1390 elts
[i
] = const_binop (code
, elem1
, elem2
);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if (elts
[i
] == NULL_TREE
)
1398 return build_vector (type
, elts
);
1401 /* Shifts allow a scalar offset for a vector. */
1402 if (TREE_CODE (arg1
) == VECTOR_CST
1403 && TREE_CODE (arg2
) == INTEGER_CST
)
1405 tree type
= TREE_TYPE (arg1
);
1406 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1407 tree
*elts
= XALLOCAVEC (tree
, count
);
1409 if (code
== VEC_LSHIFT_EXPR
1410 || code
== VEC_RSHIFT_EXPR
)
1412 if (!tree_fits_uhwi_p (arg2
))
1415 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1416 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1417 unsigned HOST_WIDE_INT innerc
1418 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1419 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1421 int offset
= shiftc
/ innerc
;
1422 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1423 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1424 for !BYTES_BIG_ENDIAN picks first vector element, but
1425 for BYTES_BIG_ENDIAN last element from the vector. */
1426 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1428 tree zero
= build_zero_cst (TREE_TYPE (type
));
1429 for (i
= 0; i
< count
; i
++)
1431 if (i
+ offset
< 0 || i
+ offset
>= count
)
1434 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1438 for (i
= 0; i
< count
; i
++)
1440 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1442 elts
[i
] = const_binop (code
, elem1
, arg2
);
1444 /* It is possible that const_binop cannot handle the given
1445 code and return NULL_TREE */
1446 if (elts
[i
] == NULL_TREE
)
1450 return build_vector (type
, elts
);
1455 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1456 indicates which particular sizetype to create. */
1459 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1461 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1464 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1465 is a tree code. The type of the result is taken from the operands.
1466 Both must be equivalent integer types, ala int_binop_types_match_p.
1467 If the operands are constant, so is the result. */
1470 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1472 tree type
= TREE_TYPE (arg0
);
1474 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1475 return error_mark_node
;
1477 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1480 /* Handle the special case of two integer constants faster. */
1481 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1483 /* And some specific cases even faster than that. */
1484 if (code
== PLUS_EXPR
)
1486 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1488 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1491 else if (code
== MINUS_EXPR
)
1493 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1496 else if (code
== MULT_EXPR
)
1498 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1502 /* Handle general case of two integer constants. For sizetype
1503 constant calculations we always want to know about overflow,
1504 even in the unsigned case. */
1505 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1508 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1511 /* Given two values, either both of sizetype or both of bitsizetype,
1512 compute the difference between the two values. Return the value
1513 in signed type corresponding to the type of the operands. */
1516 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1518 tree type
= TREE_TYPE (arg0
);
1521 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1524 /* If the type is already signed, just do the simple thing. */
1525 if (!TYPE_UNSIGNED (type
))
1526 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1528 if (type
== sizetype
)
1530 else if (type
== bitsizetype
)
1531 ctype
= sbitsizetype
;
1533 ctype
= signed_type_for (type
);
1535 /* If either operand is not a constant, do the conversions to the signed
1536 type and subtract. The hardware will do the right thing with any
1537 overflow in the subtraction. */
1538 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1539 return size_binop_loc (loc
, MINUS_EXPR
,
1540 fold_convert_loc (loc
, ctype
, arg0
),
1541 fold_convert_loc (loc
, ctype
, arg1
));
1543 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1544 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1545 overflow) and negate (which can't either). Special-case a result
1546 of zero while we're here. */
1547 if (tree_int_cst_equal (arg0
, arg1
))
1548 return build_int_cst (ctype
, 0);
1549 else if (tree_int_cst_lt (arg1
, arg0
))
1550 return fold_convert_loc (loc
, ctype
,
1551 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1553 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1554 fold_convert_loc (loc
, ctype
,
1555 size_binop_loc (loc
,
1560 /* A subroutine of fold_convert_const handling conversions of an
1561 INTEGER_CST to another integer type. */
1564 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1566 /* Given an integer constant, make new constant with new type,
1567 appropriately sign-extended or truncated. Use widest_int
1568 so that any extension is done according ARG1's type. */
1569 return force_fit_type (type
, wi::to_widest (arg1
),
1570 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1571 TREE_OVERFLOW (arg1
));
1574 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1575 to an integer type. */
1578 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1580 bool overflow
= false;
1583 /* The following code implements the floating point to integer
1584 conversion rules required by the Java Language Specification,
1585 that IEEE NaNs are mapped to zero and values that overflow
1586 the target precision saturate, i.e. values greater than
1587 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1588 are mapped to INT_MIN. These semantics are allowed by the
1589 C and C++ standards that simply state that the behavior of
1590 FP-to-integer conversion is unspecified upon overflow. */
1594 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1598 case FIX_TRUNC_EXPR
:
1599 real_trunc (&r
, VOIDmode
, &x
);
1606 /* If R is NaN, return zero and show we have an overflow. */
1607 if (REAL_VALUE_ISNAN (r
))
1610 val
= wi::zero (TYPE_PRECISION (type
));
1613 /* See if R is less than the lower bound or greater than the
1618 tree lt
= TYPE_MIN_VALUE (type
);
1619 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1620 if (REAL_VALUES_LESS (r
, l
))
1629 tree ut
= TYPE_MAX_VALUE (type
);
1632 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1633 if (REAL_VALUES_LESS (u
, r
))
1642 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1644 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1648 /* A subroutine of fold_convert_const handling conversions of a
1649 FIXED_CST to an integer type. */
1652 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1655 double_int temp
, temp_trunc
;
1658 /* Right shift FIXED_CST to temp by fbit. */
1659 temp
= TREE_FIXED_CST (arg1
).data
;
1660 mode
= TREE_FIXED_CST (arg1
).mode
;
1661 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1663 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1664 HOST_BITS_PER_DOUBLE_INT
,
1665 SIGNED_FIXED_POINT_MODE_P (mode
));
1667 /* Left shift temp to temp_trunc by fbit. */
1668 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1669 HOST_BITS_PER_DOUBLE_INT
,
1670 SIGNED_FIXED_POINT_MODE_P (mode
));
1674 temp
= double_int_zero
;
1675 temp_trunc
= double_int_zero
;
1678 /* If FIXED_CST is negative, we need to round the value toward 0.
1679 By checking if the fractional bits are not zero to add 1 to temp. */
1680 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1681 && temp_trunc
.is_negative ()
1682 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1683 temp
+= double_int_one
;
1685 /* Given a fixed-point constant, make new constant with new type,
1686 appropriately sign-extended or truncated. */
1687 t
= force_fit_type (type
, temp
, -1,
1688 (temp
.is_negative ()
1689 && (TYPE_UNSIGNED (type
)
1690 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1691 | TREE_OVERFLOW (arg1
));
1696 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1697 to another floating point type. */
1700 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1702 REAL_VALUE_TYPE value
;
1705 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1706 t
= build_real (type
, value
);
1708 /* If converting an infinity or NAN to a representation that doesn't
1709 have one, set the overflow bit so that we can produce some kind of
1710 error message at the appropriate point if necessary. It's not the
1711 most user-friendly message, but it's better than nothing. */
1712 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1713 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1714 TREE_OVERFLOW (t
) = 1;
1715 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1716 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1717 TREE_OVERFLOW (t
) = 1;
1718 /* Regular overflow, conversion produced an infinity in a mode that
1719 can't represent them. */
1720 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1721 && REAL_VALUE_ISINF (value
)
1722 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1723 TREE_OVERFLOW (t
) = 1;
1725 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1729 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1730 to a floating point type. */
1733 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1735 REAL_VALUE_TYPE value
;
1738 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1739 t
= build_real (type
, value
);
1741 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1745 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1746 to another fixed-point type. */
1749 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1751 FIXED_VALUE_TYPE value
;
1755 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1756 TYPE_SATURATING (type
));
1757 t
= build_fixed (type
, value
);
1759 /* Propagate overflow flags. */
1760 if (overflow_p
| TREE_OVERFLOW (arg1
))
1761 TREE_OVERFLOW (t
) = 1;
1765 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1766 to a fixed-point type. */
1769 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1771 FIXED_VALUE_TYPE value
;
1776 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1778 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1779 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1780 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1782 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1784 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1785 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1786 TYPE_SATURATING (type
));
1787 t
= build_fixed (type
, value
);
1789 /* Propagate overflow flags. */
1790 if (overflow_p
| TREE_OVERFLOW (arg1
))
1791 TREE_OVERFLOW (t
) = 1;
1795 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1796 to a fixed-point type. */
1799 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1801 FIXED_VALUE_TYPE value
;
1805 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1806 &TREE_REAL_CST (arg1
),
1807 TYPE_SATURATING (type
));
1808 t
= build_fixed (type
, value
);
1810 /* Propagate overflow flags. */
1811 if (overflow_p
| TREE_OVERFLOW (arg1
))
1812 TREE_OVERFLOW (t
) = 1;
1816 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1817 type TYPE. If no simplification can be done return NULL_TREE. */
1820 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1822 if (TREE_TYPE (arg1
) == type
)
1825 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1826 || TREE_CODE (type
) == OFFSET_TYPE
)
1828 if (TREE_CODE (arg1
) == INTEGER_CST
)
1829 return fold_convert_const_int_from_int (type
, arg1
);
1830 else if (TREE_CODE (arg1
) == REAL_CST
)
1831 return fold_convert_const_int_from_real (code
, type
, arg1
);
1832 else if (TREE_CODE (arg1
) == FIXED_CST
)
1833 return fold_convert_const_int_from_fixed (type
, arg1
);
1835 else if (TREE_CODE (type
) == REAL_TYPE
)
1837 if (TREE_CODE (arg1
) == INTEGER_CST
)
1838 return build_real_from_int_cst (type
, arg1
);
1839 else if (TREE_CODE (arg1
) == REAL_CST
)
1840 return fold_convert_const_real_from_real (type
, arg1
);
1841 else if (TREE_CODE (arg1
) == FIXED_CST
)
1842 return fold_convert_const_real_from_fixed (type
, arg1
);
1844 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1846 if (TREE_CODE (arg1
) == FIXED_CST
)
1847 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1848 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1849 return fold_convert_const_fixed_from_int (type
, arg1
);
1850 else if (TREE_CODE (arg1
) == REAL_CST
)
1851 return fold_convert_const_fixed_from_real (type
, arg1
);
1856 /* Construct a vector of zero elements of vector type TYPE. */
1859 build_zero_vector (tree type
)
1863 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1864 return build_vector_from_val (type
, t
);
1867 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1870 fold_convertible_p (const_tree type
, const_tree arg
)
1872 tree orig
= TREE_TYPE (arg
);
1877 if (TREE_CODE (arg
) == ERROR_MARK
1878 || TREE_CODE (type
) == ERROR_MARK
1879 || TREE_CODE (orig
) == ERROR_MARK
)
1882 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1885 switch (TREE_CODE (type
))
1887 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1888 case POINTER_TYPE
: case REFERENCE_TYPE
:
1890 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1891 || TREE_CODE (orig
) == OFFSET_TYPE
)
1893 return (TREE_CODE (orig
) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1897 case FIXED_POINT_TYPE
:
1901 return TREE_CODE (type
) == TREE_CODE (orig
);
1908 /* Convert expression ARG to type TYPE. Used by the middle-end for
1909 simple conversions in preference to calling the front-end's convert. */
1912 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1914 tree orig
= TREE_TYPE (arg
);
1920 if (TREE_CODE (arg
) == ERROR_MARK
1921 || TREE_CODE (type
) == ERROR_MARK
1922 || TREE_CODE (orig
) == ERROR_MARK
)
1923 return error_mark_node
;
1925 switch (TREE_CODE (type
))
1928 case REFERENCE_TYPE
:
1929 /* Handle conversions between pointers to different address spaces. */
1930 if (POINTER_TYPE_P (orig
)
1931 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1932 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1933 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1936 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1938 if (TREE_CODE (arg
) == INTEGER_CST
)
1940 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1941 if (tem
!= NULL_TREE
)
1944 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1945 || TREE_CODE (orig
) == OFFSET_TYPE
)
1946 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1947 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1948 return fold_convert_loc (loc
, type
,
1949 fold_build1_loc (loc
, REALPART_EXPR
,
1950 TREE_TYPE (orig
), arg
));
1951 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1952 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1953 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1956 if (TREE_CODE (arg
) == INTEGER_CST
)
1958 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1959 if (tem
!= NULL_TREE
)
1962 else if (TREE_CODE (arg
) == REAL_CST
)
1964 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1965 if (tem
!= NULL_TREE
)
1968 else if (TREE_CODE (arg
) == FIXED_CST
)
1970 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1971 if (tem
!= NULL_TREE
)
1975 switch (TREE_CODE (orig
))
1978 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1979 case POINTER_TYPE
: case REFERENCE_TYPE
:
1980 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1983 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1985 case FIXED_POINT_TYPE
:
1986 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1989 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1990 return fold_convert_loc (loc
, type
, tem
);
1996 case FIXED_POINT_TYPE
:
1997 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1998 || TREE_CODE (arg
) == REAL_CST
)
2000 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2001 if (tem
!= NULL_TREE
)
2002 goto fold_convert_exit
;
2005 switch (TREE_CODE (orig
))
2007 case FIXED_POINT_TYPE
:
2012 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2015 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2016 return fold_convert_loc (loc
, type
, tem
);
2023 switch (TREE_CODE (orig
))
2026 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2027 case POINTER_TYPE
: case REFERENCE_TYPE
:
2029 case FIXED_POINT_TYPE
:
2030 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2031 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2032 fold_convert_loc (loc
, TREE_TYPE (type
),
2033 integer_zero_node
));
2038 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2040 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2041 TREE_OPERAND (arg
, 0));
2042 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2043 TREE_OPERAND (arg
, 1));
2044 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2047 arg
= save_expr (arg
);
2048 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2049 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2050 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2051 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2052 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2060 if (integer_zerop (arg
))
2061 return build_zero_vector (type
);
2062 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2063 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2064 || TREE_CODE (orig
) == VECTOR_TYPE
);
2065 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2068 tem
= fold_ignored_result (arg
);
2069 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2072 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2073 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2077 protected_set_expr_location_unshare (tem
, loc
);
2081 /* Return false if expr can be assumed not to be an lvalue, true
2085 maybe_lvalue_p (const_tree x
)
2087 /* We only need to wrap lvalue tree codes. */
2088 switch (TREE_CODE (x
))
2101 case ARRAY_RANGE_REF
:
2107 case PREINCREMENT_EXPR
:
2108 case PREDECREMENT_EXPR
:
2110 case TRY_CATCH_EXPR
:
2111 case WITH_CLEANUP_EXPR
:
2120 /* Assume the worst for front-end tree codes. */
2121 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2129 /* Return an expr equal to X but certainly not valid as an lvalue. */
2132 non_lvalue_loc (location_t loc
, tree x
)
2134 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2139 if (! maybe_lvalue_p (x
))
2141 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2144 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2145 Zero means allow extended lvalues. */
2147 int pedantic_lvalues
;
2149 /* When pedantic, return an expr equal to X but certainly not valid as a
2150 pedantic lvalue. Otherwise, return X. */
2153 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2155 if (pedantic_lvalues
)
2156 return non_lvalue_loc (loc
, x
);
2158 return protected_set_expr_location_unshare (x
, loc
);
2161 /* Given a tree comparison code, return the code that is the logical inverse.
2162 It is generally not safe to do this for floating-point comparisons, except
2163 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2164 ERROR_MARK in this case. */
2167 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2169 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2170 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2180 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2182 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2184 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2186 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2200 return UNORDERED_EXPR
;
2201 case UNORDERED_EXPR
:
2202 return ORDERED_EXPR
;
2208 /* Similar, but return the comparison that results if the operands are
2209 swapped. This is safe for floating-point. */
2212 swap_tree_comparison (enum tree_code code
)
2219 case UNORDERED_EXPR
:
2245 /* Convert a comparison tree code from an enum tree_code representation
2246 into a compcode bit-based encoding. This function is the inverse of
2247 compcode_to_comparison. */
2249 static enum comparison_code
2250 comparison_to_compcode (enum tree_code code
)
2267 return COMPCODE_ORD
;
2268 case UNORDERED_EXPR
:
2269 return COMPCODE_UNORD
;
2271 return COMPCODE_UNLT
;
2273 return COMPCODE_UNEQ
;
2275 return COMPCODE_UNLE
;
2277 return COMPCODE_UNGT
;
2279 return COMPCODE_LTGT
;
2281 return COMPCODE_UNGE
;
2287 /* Convert a compcode bit-based encoding of a comparison operator back
2288 to GCC's enum tree_code representation. This function is the
2289 inverse of comparison_to_compcode. */
2291 static enum tree_code
2292 compcode_to_comparison (enum comparison_code code
)
2309 return ORDERED_EXPR
;
2310 case COMPCODE_UNORD
:
2311 return UNORDERED_EXPR
;
2329 /* Return a tree for the comparison which is the combination of
2330 doing the AND or OR (depending on CODE) of the two operations LCODE
2331 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2332 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2333 if this makes the transformation invalid. */
2336 combine_comparisons (location_t loc
,
2337 enum tree_code code
, enum tree_code lcode
,
2338 enum tree_code rcode
, tree truth_type
,
2339 tree ll_arg
, tree lr_arg
)
2341 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2342 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2343 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2348 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2349 compcode
= lcompcode
& rcompcode
;
2352 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2353 compcode
= lcompcode
| rcompcode
;
2362 /* Eliminate unordered comparisons, as well as LTGT and ORD
2363 which are not used unless the mode has NaNs. */
2364 compcode
&= ~COMPCODE_UNORD
;
2365 if (compcode
== COMPCODE_LTGT
)
2366 compcode
= COMPCODE_NE
;
2367 else if (compcode
== COMPCODE_ORD
)
2368 compcode
= COMPCODE_TRUE
;
2370 else if (flag_trapping_math
)
2372 /* Check that the original operation and the optimized ones will trap
2373 under the same condition. */
2374 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2375 && (lcompcode
!= COMPCODE_EQ
)
2376 && (lcompcode
!= COMPCODE_ORD
);
2377 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2378 && (rcompcode
!= COMPCODE_EQ
)
2379 && (rcompcode
!= COMPCODE_ORD
);
2380 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2381 && (compcode
!= COMPCODE_EQ
)
2382 && (compcode
!= COMPCODE_ORD
);
2384 /* In a short-circuited boolean expression the LHS might be
2385 such that the RHS, if evaluated, will never trap. For
2386 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2387 if neither x nor y is NaN. (This is a mixed blessing: for
2388 example, the expression above will never trap, hence
2389 optimizing it to x < y would be invalid). */
2390 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2391 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2394 /* If the comparison was short-circuited, and only the RHS
2395 trapped, we may now generate a spurious trap. */
2397 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2400 /* If we changed the conditions that cause a trap, we lose. */
2401 if ((ltrap
|| rtrap
) != trap
)
2405 if (compcode
== COMPCODE_TRUE
)
2406 return constant_boolean_node (true, truth_type
);
2407 else if (compcode
== COMPCODE_FALSE
)
2408 return constant_boolean_node (false, truth_type
);
2411 enum tree_code tcode
;
2413 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2414 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2418 /* Return nonzero if two operands (typically of the same tree node)
2419 are necessarily equal. If either argument has side-effects this
2420 function returns zero. FLAGS modifies behavior as follows:
2422 If OEP_ONLY_CONST is set, only return nonzero for constants.
2423 This function tests whether the operands are indistinguishable;
2424 it does not test whether they are equal using C's == operation.
2425 The distinction is important for IEEE floating point, because
2426 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2427 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2429 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2430 even though it may hold multiple values during a function.
2431 This is because a GCC tree node guarantees that nothing else is
2432 executed between the evaluation of its "operands" (which may often
2433 be evaluated in arbitrary order). Hence if the operands themselves
2434 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2435 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2436 unset means assuming isochronic (or instantaneous) tree equivalence.
2437 Unless comparing arbitrary expression trees, such as from different
2438 statements, this flag can usually be left unset.
2440 If OEP_PURE_SAME is set, then pure functions with identical arguments
2441 are considered the same. It is used when the caller has other ways
2442 to ensure that global memory is unchanged in between. */
2445 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2447 /* If either is ERROR_MARK, they aren't equal. */
2448 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2449 || TREE_TYPE (arg0
) == error_mark_node
2450 || TREE_TYPE (arg1
) == error_mark_node
)
2453 /* Similar, if either does not have a type (like a released SSA name),
2454 they aren't equal. */
2455 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2458 /* Check equality of integer constants before bailing out due to
2459 precision differences. */
2460 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2461 return tree_int_cst_equal (arg0
, arg1
);
2463 /* If both types don't have the same signedness, then we can't consider
2464 them equal. We must check this before the STRIP_NOPS calls
2465 because they may change the signedness of the arguments. As pointers
2466 strictly don't have a signedness, require either two pointers or
2467 two non-pointers as well. */
2468 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2469 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2472 /* We cannot consider pointers to different address space equal. */
2473 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2474 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2475 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2478 /* If both types don't have the same precision, then it is not safe
2480 if (element_precision (TREE_TYPE (arg0
))
2481 != element_precision (TREE_TYPE (arg1
)))
2487 /* In case both args are comparisons but with different comparison
2488 code, try to swap the comparison operands of one arg to produce
2489 a match and compare that variant. */
2490 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2491 && COMPARISON_CLASS_P (arg0
)
2492 && COMPARISON_CLASS_P (arg1
))
2494 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2496 if (TREE_CODE (arg0
) == swap_code
)
2497 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2498 TREE_OPERAND (arg1
, 1), flags
)
2499 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2500 TREE_OPERAND (arg1
, 0), flags
);
2503 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2504 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2505 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2508 /* This is needed for conversions and for COMPONENT_REF.
2509 Might as well play it safe and always test this. */
2510 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2511 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2512 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2515 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2516 We don't care about side effects in that case because the SAVE_EXPR
2517 takes care of that for us. In all other cases, two expressions are
2518 equal if they have no side effects. If we have two identical
2519 expressions with side effects that should be treated the same due
2520 to the only side effects being identical SAVE_EXPR's, that will
2521 be detected in the recursive calls below.
2522 If we are taking an invariant address of two identical objects
2523 they are necessarily equal as well. */
2524 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2525 && (TREE_CODE (arg0
) == SAVE_EXPR
2526 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2527 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2530 /* Next handle constant cases, those for which we can return 1 even
2531 if ONLY_CONST is set. */
2532 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2533 switch (TREE_CODE (arg0
))
2536 return tree_int_cst_equal (arg0
, arg1
);
2539 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2540 TREE_FIXED_CST (arg1
));
2543 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2544 TREE_REAL_CST (arg1
)))
2548 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2550 /* If we do not distinguish between signed and unsigned zero,
2551 consider them equal. */
2552 if (real_zerop (arg0
) && real_zerop (arg1
))
2561 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2564 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2566 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2567 VECTOR_CST_ELT (arg1
, i
), flags
))
2574 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2576 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2580 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2581 && ! memcmp (TREE_STRING_POINTER (arg0
),
2582 TREE_STRING_POINTER (arg1
),
2583 TREE_STRING_LENGTH (arg0
)));
2586 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2587 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2588 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2593 if (flags
& OEP_ONLY_CONST
)
2596 /* Define macros to test an operand from arg0 and arg1 for equality and a
2597 variant that allows null and views null as being different from any
2598 non-null value. In the latter case, if either is null, the both
2599 must be; otherwise, do the normal comparison. */
2600 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2601 TREE_OPERAND (arg1, N), flags)
2603 #define OP_SAME_WITH_NULL(N) \
2604 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2605 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2610 /* Two conversions are equal only if signedness and modes match. */
2611 switch (TREE_CODE (arg0
))
2614 case FIX_TRUNC_EXPR
:
2615 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2616 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2626 case tcc_comparison
:
2628 if (OP_SAME (0) && OP_SAME (1))
2631 /* For commutative ops, allow the other order. */
2632 return (commutative_tree_code (TREE_CODE (arg0
))
2633 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2634 TREE_OPERAND (arg1
, 1), flags
)
2635 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2636 TREE_OPERAND (arg1
, 0), flags
));
2639 /* If either of the pointer (or reference) expressions we are
2640 dereferencing contain a side effect, these cannot be equal,
2641 but their addresses can be. */
2642 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2643 && (TREE_SIDE_EFFECTS (arg0
)
2644 || TREE_SIDE_EFFECTS (arg1
)))
2647 switch (TREE_CODE (arg0
))
2650 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2657 case TARGET_MEM_REF
:
2658 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2659 /* Require equal extra operands and then fall through to MEM_REF
2660 handling of the two common operands. */
2661 if (!OP_SAME_WITH_NULL (2)
2662 || !OP_SAME_WITH_NULL (3)
2663 || !OP_SAME_WITH_NULL (4))
2667 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2668 /* Require equal access sizes, and similar pointer types.
2669 We can have incomplete types for array references of
2670 variable-sized arrays from the Fortran frontend
2671 though. Also verify the types are compatible. */
2672 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2673 || (TYPE_SIZE (TREE_TYPE (arg0
))
2674 && TYPE_SIZE (TREE_TYPE (arg1
))
2675 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2676 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2677 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2678 && alias_ptr_types_compatible_p
2679 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2680 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2681 && OP_SAME (0) && OP_SAME (1));
2684 case ARRAY_RANGE_REF
:
2685 /* Operands 2 and 3 may be null.
2686 Compare the array index by value if it is constant first as we
2687 may have different types but same value here. */
2690 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2691 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2692 TREE_OPERAND (arg1
, 1))
2694 && OP_SAME_WITH_NULL (2)
2695 && OP_SAME_WITH_NULL (3));
2698 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2699 may be NULL when we're called to compare MEM_EXPRs. */
2700 if (!OP_SAME_WITH_NULL (0)
2703 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2704 return OP_SAME_WITH_NULL (2);
2709 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2710 return OP_SAME (1) && OP_SAME (2);
2716 case tcc_expression
:
2717 switch (TREE_CODE (arg0
))
2720 case TRUTH_NOT_EXPR
:
2723 case TRUTH_ANDIF_EXPR
:
2724 case TRUTH_ORIF_EXPR
:
2725 return OP_SAME (0) && OP_SAME (1);
2728 case WIDEN_MULT_PLUS_EXPR
:
2729 case WIDEN_MULT_MINUS_EXPR
:
2732 /* The multiplcation operands are commutative. */
2735 case TRUTH_AND_EXPR
:
2737 case TRUTH_XOR_EXPR
:
2738 if (OP_SAME (0) && OP_SAME (1))
2741 /* Otherwise take into account this is a commutative operation. */
2742 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2743 TREE_OPERAND (arg1
, 1), flags
)
2744 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2745 TREE_OPERAND (arg1
, 0), flags
));
2750 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2757 switch (TREE_CODE (arg0
))
2760 /* If the CALL_EXPRs call different functions, then they
2761 clearly can not be equal. */
2762 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2767 unsigned int cef
= call_expr_flags (arg0
);
2768 if (flags
& OEP_PURE_SAME
)
2769 cef
&= ECF_CONST
| ECF_PURE
;
2776 /* Now see if all the arguments are the same. */
2778 const_call_expr_arg_iterator iter0
, iter1
;
2780 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2781 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2783 a0
= next_const_call_expr_arg (&iter0
),
2784 a1
= next_const_call_expr_arg (&iter1
))
2785 if (! operand_equal_p (a0
, a1
, flags
))
2788 /* If we get here and both argument lists are exhausted
2789 then the CALL_EXPRs are equal. */
2790 return ! (a0
|| a1
);
2796 case tcc_declaration
:
2797 /* Consider __builtin_sqrt equal to sqrt. */
2798 return (TREE_CODE (arg0
) == FUNCTION_DECL
2799 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2800 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2801 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2808 #undef OP_SAME_WITH_NULL
2811 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2812 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2814 When in doubt, return 0. */
2817 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2819 int unsignedp1
, unsignedpo
;
2820 tree primarg0
, primarg1
, primother
;
2821 unsigned int correct_width
;
2823 if (operand_equal_p (arg0
, arg1
, 0))
2826 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2827 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2830 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2831 and see if the inner values are the same. This removes any
2832 signedness comparison, which doesn't matter here. */
2833 primarg0
= arg0
, primarg1
= arg1
;
2834 STRIP_NOPS (primarg0
);
2835 STRIP_NOPS (primarg1
);
2836 if (operand_equal_p (primarg0
, primarg1
, 0))
2839 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2840 actual comparison operand, ARG0.
2842 First throw away any conversions to wider types
2843 already present in the operands. */
2845 primarg1
= get_narrower (arg1
, &unsignedp1
);
2846 primother
= get_narrower (other
, &unsignedpo
);
2848 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2849 if (unsignedp1
== unsignedpo
2850 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2851 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2853 tree type
= TREE_TYPE (arg0
);
2855 /* Make sure shorter operand is extended the right way
2856 to match the longer operand. */
2857 primarg1
= fold_convert (signed_or_unsigned_type_for
2858 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2860 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2867 /* See if ARG is an expression that is either a comparison or is performing
2868 arithmetic on comparisons. The comparisons must only be comparing
2869 two different values, which will be stored in *CVAL1 and *CVAL2; if
2870 they are nonzero it means that some operands have already been found.
2871 No variables may be used anywhere else in the expression except in the
2872 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2873 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2875 If this is true, return 1. Otherwise, return zero. */
2878 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2880 enum tree_code code
= TREE_CODE (arg
);
2881 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2883 /* We can handle some of the tcc_expression cases here. */
2884 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2886 else if (tclass
== tcc_expression
2887 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2888 || code
== COMPOUND_EXPR
))
2889 tclass
= tcc_binary
;
2891 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2892 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2894 /* If we've already found a CVAL1 or CVAL2, this expression is
2895 two complex to handle. */
2896 if (*cval1
|| *cval2
)
2906 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2909 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2910 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2911 cval1
, cval2
, save_p
));
2916 case tcc_expression
:
2917 if (code
== COND_EXPR
)
2918 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2919 cval1
, cval2
, save_p
)
2920 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2921 cval1
, cval2
, save_p
)
2922 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2923 cval1
, cval2
, save_p
));
2926 case tcc_comparison
:
2927 /* First see if we can handle the first operand, then the second. For
2928 the second operand, we know *CVAL1 can't be zero. It must be that
2929 one side of the comparison is each of the values; test for the
2930 case where this isn't true by failing if the two operands
2933 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2934 TREE_OPERAND (arg
, 1), 0))
2938 *cval1
= TREE_OPERAND (arg
, 0);
2939 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2941 else if (*cval2
== 0)
2942 *cval2
= TREE_OPERAND (arg
, 0);
2943 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2948 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2950 else if (*cval2
== 0)
2951 *cval2
= TREE_OPERAND (arg
, 1);
2952 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2964 /* ARG is a tree that is known to contain just arithmetic operations and
2965 comparisons. Evaluate the operations in the tree substituting NEW0 for
2966 any occurrence of OLD0 as an operand of a comparison and likewise for
2970 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2971 tree old1
, tree new1
)
2973 tree type
= TREE_TYPE (arg
);
2974 enum tree_code code
= TREE_CODE (arg
);
2975 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2977 /* We can handle some of the tcc_expression cases here. */
2978 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2980 else if (tclass
== tcc_expression
2981 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2982 tclass
= tcc_binary
;
2987 return fold_build1_loc (loc
, code
, type
,
2988 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2989 old0
, new0
, old1
, new1
));
2992 return fold_build2_loc (loc
, code
, type
,
2993 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2994 old0
, new0
, old1
, new1
),
2995 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2996 old0
, new0
, old1
, new1
));
2998 case tcc_expression
:
3002 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3006 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3010 return fold_build3_loc (loc
, code
, type
,
3011 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3012 old0
, new0
, old1
, new1
),
3013 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3014 old0
, new0
, old1
, new1
),
3015 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3016 old0
, new0
, old1
, new1
));
3020 /* Fall through - ??? */
3022 case tcc_comparison
:
3024 tree arg0
= TREE_OPERAND (arg
, 0);
3025 tree arg1
= TREE_OPERAND (arg
, 1);
3027 /* We need to check both for exact equality and tree equality. The
3028 former will be true if the operand has a side-effect. In that
3029 case, we know the operand occurred exactly once. */
3031 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3033 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3036 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3038 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3041 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3049 /* Return a tree for the case when the result of an expression is RESULT
3050 converted to TYPE and OMITTED was previously an operand of the expression
3051 but is now not needed (e.g., we folded OMITTED * 0).
3053 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3054 the conversion of RESULT to TYPE. */
3057 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3059 tree t
= fold_convert_loc (loc
, type
, result
);
3061 /* If the resulting operand is an empty statement, just return the omitted
3062 statement casted to void. */
3063 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3064 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3065 fold_ignored_result (omitted
));
3067 if (TREE_SIDE_EFFECTS (omitted
))
3068 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3069 fold_ignored_result (omitted
), t
);
3071 return non_lvalue_loc (loc
, t
);
3074 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3077 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3080 tree t
= fold_convert_loc (loc
, type
, result
);
3082 /* If the resulting operand is an empty statement, just return the omitted
3083 statement casted to void. */
3084 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3085 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3086 fold_ignored_result (omitted
));
3088 if (TREE_SIDE_EFFECTS (omitted
))
3089 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3090 fold_ignored_result (omitted
), t
);
3092 return pedantic_non_lvalue_loc (loc
, t
);
3095 /* Return a tree for the case when the result of an expression is RESULT
3096 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3097 of the expression but are now not needed.
3099 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3100 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3101 evaluated before OMITTED2. Otherwise, if neither has side effects,
3102 just do the conversion of RESULT to TYPE. */
3105 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3106 tree omitted1
, tree omitted2
)
3108 tree t
= fold_convert_loc (loc
, type
, result
);
3110 if (TREE_SIDE_EFFECTS (omitted2
))
3111 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3112 if (TREE_SIDE_EFFECTS (omitted1
))
3113 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3115 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3119 /* Return a simplified tree node for the truth-negation of ARG. This
3120 never alters ARG itself. We assume that ARG is an operation that
3121 returns a truth value (0 or 1).
3123 FIXME: one would think we would fold the result, but it causes
3124 problems with the dominator optimizer. */
3127 fold_truth_not_expr (location_t loc
, tree arg
)
3129 tree type
= TREE_TYPE (arg
);
3130 enum tree_code code
= TREE_CODE (arg
);
3131 location_t loc1
, loc2
;
3133 /* If this is a comparison, we can simply invert it, except for
3134 floating-point non-equality comparisons, in which case we just
3135 enclose a TRUTH_NOT_EXPR around what we have. */
3137 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3139 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3140 if (FLOAT_TYPE_P (op_type
)
3141 && flag_trapping_math
3142 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3143 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3146 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3147 if (code
== ERROR_MARK
)
3150 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3151 TREE_OPERAND (arg
, 1));
3157 return constant_boolean_node (integer_zerop (arg
), type
);
3159 case TRUTH_AND_EXPR
:
3160 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3161 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3162 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3163 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3164 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3167 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3168 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3169 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3170 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3171 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3173 case TRUTH_XOR_EXPR
:
3174 /* Here we can invert either operand. We invert the first operand
3175 unless the second operand is a TRUTH_NOT_EXPR in which case our
3176 result is the XOR of the first operand with the inside of the
3177 negation of the second operand. */
3179 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3180 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3181 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3183 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3184 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3185 TREE_OPERAND (arg
, 1));
3187 case TRUTH_ANDIF_EXPR
:
3188 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3189 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3190 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3191 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3192 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3194 case TRUTH_ORIF_EXPR
:
3195 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3196 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3197 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3198 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3199 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3201 case TRUTH_NOT_EXPR
:
3202 return TREE_OPERAND (arg
, 0);
3206 tree arg1
= TREE_OPERAND (arg
, 1);
3207 tree arg2
= TREE_OPERAND (arg
, 2);
3209 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3210 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3212 /* A COND_EXPR may have a throw as one operand, which
3213 then has void type. Just leave void operands
3215 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3216 VOID_TYPE_P (TREE_TYPE (arg1
))
3217 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3218 VOID_TYPE_P (TREE_TYPE (arg2
))
3219 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3223 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3224 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3225 TREE_OPERAND (arg
, 0),
3226 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3228 case NON_LVALUE_EXPR
:
3229 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3230 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3233 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3234 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3236 /* ... fall through ... */
3239 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3240 return build1_loc (loc
, TREE_CODE (arg
), type
,
3241 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3244 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3246 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3249 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3251 case CLEANUP_POINT_EXPR
:
3252 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3253 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3254 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3261 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3262 assume that ARG is an operation that returns a truth value (0 or 1
3263 for scalars, 0 or -1 for vectors). Return the folded expression if
3264 folding is successful. Otherwise, return NULL_TREE. */
3267 fold_invert_truthvalue (location_t loc
, tree arg
)
3269 tree type
= TREE_TYPE (arg
);
3270 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3276 /* Return a simplified tree node for the truth-negation of ARG. This
3277 never alters ARG itself. We assume that ARG is an operation that
3278 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3281 invert_truthvalue_loc (location_t loc
, tree arg
)
3283 if (TREE_CODE (arg
) == ERROR_MARK
)
3286 tree type
= TREE_TYPE (arg
);
3287 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3293 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3294 operands are another bit-wise operation with a common input. If so,
3295 distribute the bit operations to save an operation and possibly two if
3296 constants are involved. For example, convert
3297 (A | B) & (A | C) into A | (B & C)
3298 Further simplification will occur if B and C are constants.
3300 If this optimization cannot be done, 0 will be returned. */
3303 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3304 tree arg0
, tree arg1
)
3309 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3310 || TREE_CODE (arg0
) == code
3311 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3312 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3315 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3317 common
= TREE_OPERAND (arg0
, 0);
3318 left
= TREE_OPERAND (arg0
, 1);
3319 right
= TREE_OPERAND (arg1
, 1);
3321 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3323 common
= TREE_OPERAND (arg0
, 0);
3324 left
= TREE_OPERAND (arg0
, 1);
3325 right
= TREE_OPERAND (arg1
, 0);
3327 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3329 common
= TREE_OPERAND (arg0
, 1);
3330 left
= TREE_OPERAND (arg0
, 0);
3331 right
= TREE_OPERAND (arg1
, 1);
3333 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3335 common
= TREE_OPERAND (arg0
, 1);
3336 left
= TREE_OPERAND (arg0
, 0);
3337 right
= TREE_OPERAND (arg1
, 0);
3342 common
= fold_convert_loc (loc
, type
, common
);
3343 left
= fold_convert_loc (loc
, type
, left
);
3344 right
= fold_convert_loc (loc
, type
, right
);
3345 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3346 fold_build2_loc (loc
, code
, type
, left
, right
));
3349 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3350 with code CODE. This optimization is unsafe. */
3352 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3353 tree arg0
, tree arg1
)
3355 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3356 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3358 /* (A / C) +- (B / C) -> (A +- B) / C. */
3360 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3361 TREE_OPERAND (arg1
, 1), 0))
3362 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3363 fold_build2_loc (loc
, code
, type
,
3364 TREE_OPERAND (arg0
, 0),
3365 TREE_OPERAND (arg1
, 0)),
3366 TREE_OPERAND (arg0
, 1));
3368 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3369 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3370 TREE_OPERAND (arg1
, 0), 0)
3371 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3372 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3374 REAL_VALUE_TYPE r0
, r1
;
3375 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3376 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3378 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3380 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3381 real_arithmetic (&r0
, code
, &r0
, &r1
);
3382 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3383 TREE_OPERAND (arg0
, 0),
3384 build_real (type
, r0
));
3390 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3391 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3394 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3395 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3397 tree result
, bftype
;
3401 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3402 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3403 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3404 && tree_fits_shwi_p (size
)
3405 && tree_to_shwi (size
) == bitsize
)
3406 return fold_convert_loc (loc
, type
, inner
);
3410 if (TYPE_PRECISION (bftype
) != bitsize
3411 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3412 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3414 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3415 size_int (bitsize
), bitsize_int (bitpos
));
3418 result
= fold_convert_loc (loc
, type
, result
);
3423 /* Optimize a bit-field compare.
3425 There are two cases: First is a compare against a constant and the
3426 second is a comparison of two items where the fields are at the same
3427 bit position relative to the start of a chunk (byte, halfword, word)
3428 large enough to contain it. In these cases we can avoid the shift
3429 implicit in bitfield extractions.
3431 For constants, we emit a compare of the shifted constant with the
3432 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3433 compared. For two fields at the same position, we do the ANDs with the
3434 similar mask and compare the result of the ANDs.
3436 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3437 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3438 are the left and right operands of the comparison, respectively.
3440 If the optimization described above can be done, we return the resulting
3441 tree. Otherwise we return zero. */
3444 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3445 tree compare_type
, tree lhs
, tree rhs
)
3447 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3448 tree type
= TREE_TYPE (lhs
);
3450 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3451 enum machine_mode lmode
, rmode
, nmode
;
3452 int lunsignedp
, runsignedp
;
3453 int lvolatilep
= 0, rvolatilep
= 0;
3454 tree linner
, rinner
= NULL_TREE
;
3458 /* Get all the information about the extractions being done. If the bit size
3459 if the same as the size of the underlying object, we aren't doing an
3460 extraction at all and so can do nothing. We also don't want to
3461 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3462 then will no longer be able to replace it. */
3463 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3464 &lunsignedp
, &lvolatilep
, false);
3465 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3466 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3471 /* If this is not a constant, we can only do something if bit positions,
3472 sizes, and signedness are the same. */
3473 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3474 &runsignedp
, &rvolatilep
, false);
3476 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3477 || lunsignedp
!= runsignedp
|| offset
!= 0
3478 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3482 /* See if we can find a mode to refer to this field. We should be able to,
3483 but fail if we can't. */
3484 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3485 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3486 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3487 TYPE_ALIGN (TREE_TYPE (rinner
))),
3489 if (nmode
== VOIDmode
)
3492 /* Set signed and unsigned types of the precision of this mode for the
3494 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3496 /* Compute the bit position and size for the new reference and our offset
3497 within it. If the new reference is the same size as the original, we
3498 won't optimize anything, so return zero. */
3499 nbitsize
= GET_MODE_BITSIZE (nmode
);
3500 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3502 if (nbitsize
== lbitsize
)
3505 if (BYTES_BIG_ENDIAN
)
3506 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3508 /* Make the mask to be used against the extracted field. */
3509 mask
= build_int_cst_type (unsigned_type
, -1);
3510 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3511 mask
= const_binop (RSHIFT_EXPR
, mask
,
3512 size_int (nbitsize
- lbitsize
- lbitpos
));
3515 /* If not comparing with constant, just rework the comparison
3517 return fold_build2_loc (loc
, code
, compare_type
,
3518 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3519 make_bit_field_ref (loc
, linner
,
3524 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3525 make_bit_field_ref (loc
, rinner
,
3531 /* Otherwise, we are handling the constant case. See if the constant is too
3532 big for the field. Warn and return a tree of for 0 (false) if so. We do
3533 this not only for its own sake, but to avoid having to test for this
3534 error case below. If we didn't, we might generate wrong code.
3536 For unsigned fields, the constant shifted right by the field length should
3537 be all zero. For signed fields, the high-order bits should agree with
3542 if (wi::lrshift (rhs
, lbitsize
) != 0)
3544 warning (0, "comparison is always %d due to width of bit-field",
3546 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3551 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3552 if (tem
!= 0 && tem
!= -1)
3554 warning (0, "comparison is always %d due to width of bit-field",
3556 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3560 /* Single-bit compares should always be against zero. */
3561 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3563 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3564 rhs
= build_int_cst (type
, 0);
3567 /* Make a new bitfield reference, shift the constant over the
3568 appropriate number of bits and mask it with the computed mask
3569 (in case this was a signed field). If we changed it, make a new one. */
3570 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3572 rhs
= const_binop (BIT_AND_EXPR
,
3573 const_binop (LSHIFT_EXPR
,
3574 fold_convert_loc (loc
, unsigned_type
, rhs
),
3575 size_int (lbitpos
)),
3578 lhs
= build2_loc (loc
, code
, compare_type
,
3579 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3583 /* Subroutine for fold_truth_andor_1: decode a field reference.
3585 If EXP is a comparison reference, we return the innermost reference.
3587 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3588 set to the starting bit number.
3590 If the innermost field can be completely contained in a mode-sized
3591 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3593 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3594 otherwise it is not changed.
3596 *PUNSIGNEDP is set to the signedness of the field.
3598 *PMASK is set to the mask used. This is either contained in a
3599 BIT_AND_EXPR or derived from the width of the field.
3601 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3603 Return 0 if this is not a component reference or is one that we can't
3604 do anything with. */
3607 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3608 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3609 int *punsignedp
, int *pvolatilep
,
3610 tree
*pmask
, tree
*pand_mask
)
3612 tree outer_type
= 0;
3614 tree mask
, inner
, offset
;
3616 unsigned int precision
;
3618 /* All the optimizations using this function assume integer fields.
3619 There are problems with FP fields since the type_for_size call
3620 below can fail for, e.g., XFmode. */
3621 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3624 /* We are interested in the bare arrangement of bits, so strip everything
3625 that doesn't affect the machine mode. However, record the type of the
3626 outermost expression if it may matter below. */
3627 if (CONVERT_EXPR_P (exp
)
3628 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3629 outer_type
= TREE_TYPE (exp
);
3632 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3634 and_mask
= TREE_OPERAND (exp
, 1);
3635 exp
= TREE_OPERAND (exp
, 0);
3636 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3637 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3641 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3642 punsignedp
, pvolatilep
, false);
3643 if ((inner
== exp
&& and_mask
== 0)
3644 || *pbitsize
< 0 || offset
!= 0
3645 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3648 /* If the number of bits in the reference is the same as the bitsize of
3649 the outer type, then the outer type gives the signedness. Otherwise
3650 (in case of a small bitfield) the signedness is unchanged. */
3651 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3652 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3654 /* Compute the mask to access the bitfield. */
3655 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3656 precision
= TYPE_PRECISION (unsigned_type
);
3658 mask
= build_int_cst_type (unsigned_type
, -1);
3660 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3661 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3663 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3665 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3666 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3669 *pand_mask
= and_mask
;
3673 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3674 bit positions and MASK is SIGNED. */
3677 all_ones_mask_p (const_tree mask
, unsigned int size
)
3679 tree type
= TREE_TYPE (mask
);
3680 unsigned int precision
= TYPE_PRECISION (type
);
3682 /* If this function returns true when the type of the mask is
3683 UNSIGNED, then there will be errors. In particular see
3684 gcc.c-torture/execute/990326-1.c. There does not appear to be
3685 any documentation paper trail as to why this is so. But the pre
3686 wide-int worked with that restriction and it has been preserved
3688 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3691 return wi::mask (size
, false, precision
) == mask
;
3694 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3695 represents the sign bit of EXP's type. If EXP represents a sign
3696 or zero extension, also test VAL against the unextended type.
3697 The return value is the (sub)expression whose sign bit is VAL,
3698 or NULL_TREE otherwise. */
3701 sign_bit_p (tree exp
, const_tree val
)
3706 /* Tree EXP must have an integral type. */
3707 t
= TREE_TYPE (exp
);
3708 if (! INTEGRAL_TYPE_P (t
))
3711 /* Tree VAL must be an integer constant. */
3712 if (TREE_CODE (val
) != INTEGER_CST
3713 || TREE_OVERFLOW (val
))
3716 width
= TYPE_PRECISION (t
);
3717 if (wi::only_sign_bit_p (val
, width
))
3720 /* Handle extension from a narrower type. */
3721 if (TREE_CODE (exp
) == NOP_EXPR
3722 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3723 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3728 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3729 to be evaluated unconditionally. */
3732 simple_operand_p (const_tree exp
)
3734 /* Strip any conversions that don't change the machine mode. */
3737 return (CONSTANT_CLASS_P (exp
)
3738 || TREE_CODE (exp
) == SSA_NAME
3740 && ! TREE_ADDRESSABLE (exp
)
3741 && ! TREE_THIS_VOLATILE (exp
)
3742 && ! DECL_NONLOCAL (exp
)
3743 /* Don't regard global variables as simple. They may be
3744 allocated in ways unknown to the compiler (shared memory,
3745 #pragma weak, etc). */
3746 && ! TREE_PUBLIC (exp
)
3747 && ! DECL_EXTERNAL (exp
)
3748 /* Weakrefs are not safe to be read, since they can be NULL.
3749 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3750 have DECL_WEAK flag set. */
3751 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3752 /* Loading a static variable is unduly expensive, but global
3753 registers aren't expensive. */
3754 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3757 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3758 to be evaluated unconditionally.
3759 I addition to simple_operand_p, we assume that comparisons, conversions,
3760 and logic-not operations are simple, if their operands are simple, too. */
3763 simple_operand_p_2 (tree exp
)
3765 enum tree_code code
;
3767 if (TREE_SIDE_EFFECTS (exp
)
3768 || tree_could_trap_p (exp
))
3771 while (CONVERT_EXPR_P (exp
))
3772 exp
= TREE_OPERAND (exp
, 0);
3774 code
= TREE_CODE (exp
);
3776 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3777 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3778 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3780 if (code
== TRUTH_NOT_EXPR
)
3781 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3783 return simple_operand_p (exp
);
3787 /* The following functions are subroutines to fold_range_test and allow it to
3788 try to change a logical combination of comparisons into a range test.
3791 X == 2 || X == 3 || X == 4 || X == 5
3795 (unsigned) (X - 2) <= 3
3797 We describe each set of comparisons as being either inside or outside
3798 a range, using a variable named like IN_P, and then describe the
3799 range with a lower and upper bound. If one of the bounds is omitted,
3800 it represents either the highest or lowest value of the type.
3802 In the comments below, we represent a range by two numbers in brackets
3803 preceded by a "+" to designate being inside that range, or a "-" to
3804 designate being outside that range, so the condition can be inverted by
3805 flipping the prefix. An omitted bound is represented by a "-". For
3806 example, "- [-, 10]" means being outside the range starting at the lowest
3807 possible value and ending at 10, in other words, being greater than 10.
3808 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3811 We set up things so that the missing bounds are handled in a consistent
3812 manner so neither a missing bound nor "true" and "false" need to be
3813 handled using a special case. */
3815 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3816 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3817 and UPPER1_P are nonzero if the respective argument is an upper bound
3818 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3819 must be specified for a comparison. ARG1 will be converted to ARG0's
3820 type if both are specified. */
3823 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3824 tree arg1
, int upper1_p
)
3830 /* If neither arg represents infinity, do the normal operation.
3831 Else, if not a comparison, return infinity. Else handle the special
3832 comparison rules. Note that most of the cases below won't occur, but
3833 are handled for consistency. */
3835 if (arg0
!= 0 && arg1
!= 0)
3837 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3838 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3840 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3843 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3846 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3847 for neither. In real maths, we cannot assume open ended ranges are
3848 the same. But, this is computer arithmetic, where numbers are finite.
3849 We can therefore make the transformation of any unbounded range with
3850 the value Z, Z being greater than any representable number. This permits
3851 us to treat unbounded ranges as equal. */
3852 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3853 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3857 result
= sgn0
== sgn1
;
3860 result
= sgn0
!= sgn1
;
3863 result
= sgn0
< sgn1
;
3866 result
= sgn0
<= sgn1
;
3869 result
= sgn0
> sgn1
;
3872 result
= sgn0
>= sgn1
;
3878 return constant_boolean_node (result
, type
);
3881 /* Helper routine for make_range. Perform one step for it, return
3882 new expression if the loop should continue or NULL_TREE if it should
3886 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3887 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3888 bool *strict_overflow_p
)
3890 tree arg0_type
= TREE_TYPE (arg0
);
3891 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3892 int in_p
= *p_in_p
, n_in_p
;
3896 case TRUTH_NOT_EXPR
:
3897 /* We can only do something if the range is testing for zero. */
3898 if (low
== NULL_TREE
|| high
== NULL_TREE
3899 || ! integer_zerop (low
) || ! integer_zerop (high
))
3904 case EQ_EXPR
: case NE_EXPR
:
3905 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3906 /* We can only do something if the range is testing for zero
3907 and if the second operand is an integer constant. Note that
3908 saying something is "in" the range we make is done by
3909 complementing IN_P since it will set in the initial case of
3910 being not equal to zero; "out" is leaving it alone. */
3911 if (low
== NULL_TREE
|| high
== NULL_TREE
3912 || ! integer_zerop (low
) || ! integer_zerop (high
)
3913 || TREE_CODE (arg1
) != INTEGER_CST
)
3918 case NE_EXPR
: /* - [c, c] */
3921 case EQ_EXPR
: /* + [c, c] */
3922 in_p
= ! in_p
, low
= high
= arg1
;
3924 case GT_EXPR
: /* - [-, c] */
3925 low
= 0, high
= arg1
;
3927 case GE_EXPR
: /* + [c, -] */
3928 in_p
= ! in_p
, low
= arg1
, high
= 0;
3930 case LT_EXPR
: /* - [c, -] */
3931 low
= arg1
, high
= 0;
3933 case LE_EXPR
: /* + [-, c] */
3934 in_p
= ! in_p
, low
= 0, high
= arg1
;
3940 /* If this is an unsigned comparison, we also know that EXP is
3941 greater than or equal to zero. We base the range tests we make
3942 on that fact, so we record it here so we can parse existing
3943 range tests. We test arg0_type since often the return type
3944 of, e.g. EQ_EXPR, is boolean. */
3945 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3947 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3949 build_int_cst (arg0_type
, 0),
3953 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3955 /* If the high bound is missing, but we have a nonzero low
3956 bound, reverse the range so it goes from zero to the low bound
3958 if (high
== 0 && low
&& ! integer_zerop (low
))
3961 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3962 build_int_cst (TREE_TYPE (low
), 1), 0);
3963 low
= build_int_cst (arg0_type
, 0);
3973 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3974 low and high are non-NULL, then normalize will DTRT. */
3975 if (!TYPE_UNSIGNED (arg0_type
)
3976 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3978 if (low
== NULL_TREE
)
3979 low
= TYPE_MIN_VALUE (arg0_type
);
3980 if (high
== NULL_TREE
)
3981 high
= TYPE_MAX_VALUE (arg0_type
);
3984 /* (-x) IN [a,b] -> x in [-b, -a] */
3985 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3986 build_int_cst (exp_type
, 0),
3988 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3989 build_int_cst (exp_type
, 0),
3991 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3997 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3998 build_int_cst (exp_type
, 1));
4002 if (TREE_CODE (arg1
) != INTEGER_CST
)
4005 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4006 move a constant to the other side. */
4007 if (!TYPE_UNSIGNED (arg0_type
)
4008 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4011 /* If EXP is signed, any overflow in the computation is undefined,
4012 so we don't worry about it so long as our computations on
4013 the bounds don't overflow. For unsigned, overflow is defined
4014 and this is exactly the right thing. */
4015 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4016 arg0_type
, low
, 0, arg1
, 0);
4017 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4018 arg0_type
, high
, 1, arg1
, 0);
4019 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4020 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4023 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4024 *strict_overflow_p
= true;
4027 /* Check for an unsigned range which has wrapped around the maximum
4028 value thus making n_high < n_low, and normalize it. */
4029 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4031 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4032 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4033 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4034 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4036 /* If the range is of the form +/- [ x+1, x ], we won't
4037 be able to normalize it. But then, it represents the
4038 whole range or the empty set, so make it
4040 if (tree_int_cst_equal (n_low
, low
)
4041 && tree_int_cst_equal (n_high
, high
))
4047 low
= n_low
, high
= n_high
;
4055 case NON_LVALUE_EXPR
:
4056 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4059 if (! INTEGRAL_TYPE_P (arg0_type
)
4060 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4061 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4064 n_low
= low
, n_high
= high
;
4067 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4070 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4072 /* If we're converting arg0 from an unsigned type, to exp,
4073 a signed type, we will be doing the comparison as unsigned.
4074 The tests above have already verified that LOW and HIGH
4077 So we have to ensure that we will handle large unsigned
4078 values the same way that the current signed bounds treat
4081 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4085 /* For fixed-point modes, we need to pass the saturating flag
4086 as the 2nd parameter. */
4087 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4089 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4090 TYPE_SATURATING (arg0_type
));
4093 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4095 /* A range without an upper bound is, naturally, unbounded.
4096 Since convert would have cropped a very large value, use
4097 the max value for the destination type. */
4099 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4100 : TYPE_MAX_VALUE (arg0_type
);
4102 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4103 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4104 fold_convert_loc (loc
, arg0_type
,
4106 build_int_cst (arg0_type
, 1));
4108 /* If the low bound is specified, "and" the range with the
4109 range for which the original unsigned value will be
4113 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4114 1, fold_convert_loc (loc
, arg0_type
,
4119 in_p
= (n_in_p
== in_p
);
4123 /* Otherwise, "or" the range with the range of the input
4124 that will be interpreted as negative. */
4125 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4126 1, fold_convert_loc (loc
, arg0_type
,
4131 in_p
= (in_p
!= n_in_p
);
4145 /* Given EXP, a logical expression, set the range it is testing into
4146 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4147 actually being tested. *PLOW and *PHIGH will be made of the same
4148 type as the returned expression. If EXP is not a comparison, we
4149 will most likely not be returning a useful value and range. Set
4150 *STRICT_OVERFLOW_P to true if the return value is only valid
4151 because signed overflow is undefined; otherwise, do not change
4152 *STRICT_OVERFLOW_P. */
4155 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4156 bool *strict_overflow_p
)
4158 enum tree_code code
;
4159 tree arg0
, arg1
= NULL_TREE
;
4160 tree exp_type
, nexp
;
4163 location_t loc
= EXPR_LOCATION (exp
);
4165 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4166 and see if we can refine the range. Some of the cases below may not
4167 happen, but it doesn't seem worth worrying about this. We "continue"
4168 the outer loop when we've changed something; otherwise we "break"
4169 the switch, which will "break" the while. */
4172 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4176 code
= TREE_CODE (exp
);
4177 exp_type
= TREE_TYPE (exp
);
4180 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4182 if (TREE_OPERAND_LENGTH (exp
) > 0)
4183 arg0
= TREE_OPERAND (exp
, 0);
4184 if (TREE_CODE_CLASS (code
) == tcc_binary
4185 || TREE_CODE_CLASS (code
) == tcc_comparison
4186 || (TREE_CODE_CLASS (code
) == tcc_expression
4187 && TREE_OPERAND_LENGTH (exp
) > 1))
4188 arg1
= TREE_OPERAND (exp
, 1);
4190 if (arg0
== NULL_TREE
)
4193 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4194 &high
, &in_p
, strict_overflow_p
);
4195 if (nexp
== NULL_TREE
)
4200 /* If EXP is a constant, we can evaluate whether this is true or false. */
4201 if (TREE_CODE (exp
) == INTEGER_CST
)
4203 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4205 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4211 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4215 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4216 type, TYPE, return an expression to test if EXP is in (or out of, depending
4217 on IN_P) the range. Return 0 if the test couldn't be created. */
4220 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4221 tree low
, tree high
)
4223 tree etype
= TREE_TYPE (exp
), value
;
4225 #ifdef HAVE_canonicalize_funcptr_for_compare
4226 /* Disable this optimization for function pointer expressions
4227 on targets that require function pointer canonicalization. */
4228 if (HAVE_canonicalize_funcptr_for_compare
4229 && TREE_CODE (etype
) == POINTER_TYPE
4230 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4236 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4238 return invert_truthvalue_loc (loc
, value
);
4243 if (low
== 0 && high
== 0)
4244 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4247 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4248 fold_convert_loc (loc
, etype
, high
));
4251 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4252 fold_convert_loc (loc
, etype
, low
));
4254 if (operand_equal_p (low
, high
, 0))
4255 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4256 fold_convert_loc (loc
, etype
, low
));
4258 if (integer_zerop (low
))
4260 if (! TYPE_UNSIGNED (etype
))
4262 etype
= unsigned_type_for (etype
);
4263 high
= fold_convert_loc (loc
, etype
, high
);
4264 exp
= fold_convert_loc (loc
, etype
, exp
);
4266 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4269 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4270 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4272 int prec
= TYPE_PRECISION (etype
);
4274 if (wi::mask (prec
- 1, false, prec
) == high
)
4276 if (TYPE_UNSIGNED (etype
))
4278 tree signed_etype
= signed_type_for (etype
);
4279 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4281 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4283 etype
= signed_etype
;
4284 exp
= fold_convert_loc (loc
, etype
, exp
);
4286 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4287 build_int_cst (etype
, 0));
4291 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4292 This requires wrap-around arithmetics for the type of the expression.
4293 First make sure that arithmetics in this type is valid, then make sure
4294 that it wraps around. */
4295 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4296 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4297 TYPE_UNSIGNED (etype
));
4299 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4301 tree utype
, minv
, maxv
;
4303 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4304 for the type in question, as we rely on this here. */
4305 utype
= unsigned_type_for (etype
);
4306 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4307 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4308 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4309 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4311 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4318 high
= fold_convert_loc (loc
, etype
, high
);
4319 low
= fold_convert_loc (loc
, etype
, low
);
4320 exp
= fold_convert_loc (loc
, etype
, exp
);
4322 value
= const_binop (MINUS_EXPR
, high
, low
);
4325 if (POINTER_TYPE_P (etype
))
4327 if (value
!= 0 && !TREE_OVERFLOW (value
))
4329 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4330 return build_range_check (loc
, type
,
4331 fold_build_pointer_plus_loc (loc
, exp
, low
),
4332 1, build_int_cst (etype
, 0), value
);
4337 if (value
!= 0 && !TREE_OVERFLOW (value
))
4338 return build_range_check (loc
, type
,
4339 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4340 1, build_int_cst (etype
, 0), value
);
4345 /* Return the predecessor of VAL in its type, handling the infinite case. */
4348 range_predecessor (tree val
)
4350 tree type
= TREE_TYPE (val
);
4352 if (INTEGRAL_TYPE_P (type
)
4353 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4356 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4357 build_int_cst (TREE_TYPE (val
), 1), 0);
4360 /* Return the successor of VAL in its type, handling the infinite case. */
4363 range_successor (tree val
)
4365 tree type
= TREE_TYPE (val
);
4367 if (INTEGRAL_TYPE_P (type
)
4368 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4371 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4372 build_int_cst (TREE_TYPE (val
), 1), 0);
4375 /* Given two ranges, see if we can merge them into one. Return 1 if we
4376 can, 0 if we can't. Set the output range into the specified parameters. */
4379 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4380 tree high0
, int in1_p
, tree low1
, tree high1
)
4388 int lowequal
= ((low0
== 0 && low1
== 0)
4389 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4390 low0
, 0, low1
, 0)));
4391 int highequal
= ((high0
== 0 && high1
== 0)
4392 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4393 high0
, 1, high1
, 1)));
4395 /* Make range 0 be the range that starts first, or ends last if they
4396 start at the same value. Swap them if it isn't. */
4397 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4400 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4401 high1
, 1, high0
, 1))))
4403 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4404 tem
= low0
, low0
= low1
, low1
= tem
;
4405 tem
= high0
, high0
= high1
, high1
= tem
;
4408 /* Now flag two cases, whether the ranges are disjoint or whether the
4409 second range is totally subsumed in the first. Note that the tests
4410 below are simplified by the ones above. */
4411 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4412 high0
, 1, low1
, 0));
4413 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4414 high1
, 1, high0
, 1));
4416 /* We now have four cases, depending on whether we are including or
4417 excluding the two ranges. */
4420 /* If they don't overlap, the result is false. If the second range
4421 is a subset it is the result. Otherwise, the range is from the start
4422 of the second to the end of the first. */
4424 in_p
= 0, low
= high
= 0;
4426 in_p
= 1, low
= low1
, high
= high1
;
4428 in_p
= 1, low
= low1
, high
= high0
;
4431 else if (in0_p
&& ! in1_p
)
4433 /* If they don't overlap, the result is the first range. If they are
4434 equal, the result is false. If the second range is a subset of the
4435 first, and the ranges begin at the same place, we go from just after
4436 the end of the second range to the end of the first. If the second
4437 range is not a subset of the first, or if it is a subset and both
4438 ranges end at the same place, the range starts at the start of the
4439 first range and ends just before the second range.
4440 Otherwise, we can't describe this as a single range. */
4442 in_p
= 1, low
= low0
, high
= high0
;
4443 else if (lowequal
&& highequal
)
4444 in_p
= 0, low
= high
= 0;
4445 else if (subset
&& lowequal
)
4447 low
= range_successor (high1
);
4452 /* We are in the weird situation where high0 > high1 but
4453 high1 has no successor. Punt. */
4457 else if (! subset
|| highequal
)
4460 high
= range_predecessor (low1
);
4464 /* low0 < low1 but low1 has no predecessor. Punt. */
4472 else if (! in0_p
&& in1_p
)
4474 /* If they don't overlap, the result is the second range. If the second
4475 is a subset of the first, the result is false. Otherwise,
4476 the range starts just after the first range and ends at the
4477 end of the second. */
4479 in_p
= 1, low
= low1
, high
= high1
;
4480 else if (subset
|| highequal
)
4481 in_p
= 0, low
= high
= 0;
4484 low
= range_successor (high0
);
4489 /* high1 > high0 but high0 has no successor. Punt. */
4497 /* The case where we are excluding both ranges. Here the complex case
4498 is if they don't overlap. In that case, the only time we have a
4499 range is if they are adjacent. If the second is a subset of the
4500 first, the result is the first. Otherwise, the range to exclude
4501 starts at the beginning of the first range and ends at the end of the
4505 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4506 range_successor (high0
),
4508 in_p
= 0, low
= low0
, high
= high1
;
4511 /* Canonicalize - [min, x] into - [-, x]. */
4512 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4513 switch (TREE_CODE (TREE_TYPE (low0
)))
4516 if (TYPE_PRECISION (TREE_TYPE (low0
))
4517 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4521 if (tree_int_cst_equal (low0
,
4522 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4526 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4527 && integer_zerop (low0
))
4534 /* Canonicalize - [x, max] into - [x, -]. */
4535 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4536 switch (TREE_CODE (TREE_TYPE (high1
)))
4539 if (TYPE_PRECISION (TREE_TYPE (high1
))
4540 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4544 if (tree_int_cst_equal (high1
,
4545 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4549 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4550 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4552 build_int_cst (TREE_TYPE (high1
), 1),
4560 /* The ranges might be also adjacent between the maximum and
4561 minimum values of the given type. For
4562 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4563 return + [x + 1, y - 1]. */
4564 if (low0
== 0 && high1
== 0)
4566 low
= range_successor (high0
);
4567 high
= range_predecessor (low1
);
4568 if (low
== 0 || high
== 0)
4578 in_p
= 0, low
= low0
, high
= high0
;
4580 in_p
= 0, low
= low0
, high
= high1
;
4583 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4588 /* Subroutine of fold, looking inside expressions of the form
4589 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4590 of the COND_EXPR. This function is being used also to optimize
4591 A op B ? C : A, by reversing the comparison first.
4593 Return a folded expression whose code is not a COND_EXPR
4594 anymore, or NULL_TREE if no folding opportunity is found. */
4597 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4598 tree arg0
, tree arg1
, tree arg2
)
4600 enum tree_code comp_code
= TREE_CODE (arg0
);
4601 tree arg00
= TREE_OPERAND (arg0
, 0);
4602 tree arg01
= TREE_OPERAND (arg0
, 1);
4603 tree arg1_type
= TREE_TYPE (arg1
);
4609 /* If we have A op 0 ? A : -A, consider applying the following
4612 A == 0? A : -A same as -A
4613 A != 0? A : -A same as A
4614 A >= 0? A : -A same as abs (A)
4615 A > 0? A : -A same as abs (A)
4616 A <= 0? A : -A same as -abs (A)
4617 A < 0? A : -A same as -abs (A)
4619 None of these transformations work for modes with signed
4620 zeros. If A is +/-0, the first two transformations will
4621 change the sign of the result (from +0 to -0, or vice
4622 versa). The last four will fix the sign of the result,
4623 even though the original expressions could be positive or
4624 negative, depending on the sign of A.
4626 Note that all these transformations are correct if A is
4627 NaN, since the two alternatives (A and -A) are also NaNs. */
4628 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4629 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4630 ? real_zerop (arg01
)
4631 : integer_zerop (arg01
))
4632 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4633 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4634 /* In the case that A is of the form X-Y, '-A' (arg2) may
4635 have already been folded to Y-X, check for that. */
4636 || (TREE_CODE (arg1
) == MINUS_EXPR
4637 && TREE_CODE (arg2
) == MINUS_EXPR
4638 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4639 TREE_OPERAND (arg2
, 1), 0)
4640 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4641 TREE_OPERAND (arg2
, 0), 0))))
4646 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4647 return pedantic_non_lvalue_loc (loc
,
4648 fold_convert_loc (loc
, type
,
4649 negate_expr (tem
)));
4652 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4655 if (flag_trapping_math
)
4660 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4661 arg1
= fold_convert_loc (loc
, signed_type_for
4662 (TREE_TYPE (arg1
)), arg1
);
4663 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4664 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4667 if (flag_trapping_math
)
4671 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4672 arg1
= fold_convert_loc (loc
, signed_type_for
4673 (TREE_TYPE (arg1
)), arg1
);
4674 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4675 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4677 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4681 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4682 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4683 both transformations are correct when A is NaN: A != 0
4684 is then true, and A == 0 is false. */
4686 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4687 && integer_zerop (arg01
) && integer_zerop (arg2
))
4689 if (comp_code
== NE_EXPR
)
4690 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4691 else if (comp_code
== EQ_EXPR
)
4692 return build_zero_cst (type
);
4695 /* Try some transformations of A op B ? A : B.
4697 A == B? A : B same as B
4698 A != B? A : B same as A
4699 A >= B? A : B same as max (A, B)
4700 A > B? A : B same as max (B, A)
4701 A <= B? A : B same as min (A, B)
4702 A < B? A : B same as min (B, A)
4704 As above, these transformations don't work in the presence
4705 of signed zeros. For example, if A and B are zeros of
4706 opposite sign, the first two transformations will change
4707 the sign of the result. In the last four, the original
4708 expressions give different results for (A=+0, B=-0) and
4709 (A=-0, B=+0), but the transformed expressions do not.
4711 The first two transformations are correct if either A or B
4712 is a NaN. In the first transformation, the condition will
4713 be false, and B will indeed be chosen. In the case of the
4714 second transformation, the condition A != B will be true,
4715 and A will be chosen.
4717 The conversions to max() and min() are not correct if B is
4718 a number and A is not. The conditions in the original
4719 expressions will be false, so all four give B. The min()
4720 and max() versions would give a NaN instead. */
4721 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4722 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4723 /* Avoid these transformations if the COND_EXPR may be used
4724 as an lvalue in the C++ front-end. PR c++/19199. */
4726 || VECTOR_TYPE_P (type
)
4727 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4728 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4729 || ! maybe_lvalue_p (arg1
)
4730 || ! maybe_lvalue_p (arg2
)))
4732 tree comp_op0
= arg00
;
4733 tree comp_op1
= arg01
;
4734 tree comp_type
= TREE_TYPE (comp_op0
);
4736 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4737 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4747 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4749 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4754 /* In C++ a ?: expression can be an lvalue, so put the
4755 operand which will be used if they are equal first
4756 so that we can convert this back to the
4757 corresponding COND_EXPR. */
4758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4760 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4761 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4762 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4763 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4764 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4765 comp_op1
, comp_op0
);
4766 return pedantic_non_lvalue_loc (loc
,
4767 fold_convert_loc (loc
, type
, tem
));
4774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4776 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4777 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4778 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4779 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4780 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4781 comp_op1
, comp_op0
);
4782 return pedantic_non_lvalue_loc (loc
,
4783 fold_convert_loc (loc
, type
, tem
));
4787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4788 return pedantic_non_lvalue_loc (loc
,
4789 fold_convert_loc (loc
, type
, arg2
));
4792 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4793 return pedantic_non_lvalue_loc (loc
,
4794 fold_convert_loc (loc
, type
, arg1
));
4797 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4802 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4803 we might still be able to simplify this. For example,
4804 if C1 is one less or one more than C2, this might have started
4805 out as a MIN or MAX and been transformed by this function.
4806 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4808 if (INTEGRAL_TYPE_P (type
)
4809 && TREE_CODE (arg01
) == INTEGER_CST
4810 && TREE_CODE (arg2
) == INTEGER_CST
)
4814 if (TREE_CODE (arg1
) == INTEGER_CST
)
4816 /* We can replace A with C1 in this case. */
4817 arg1
= fold_convert_loc (loc
, type
, arg01
);
4818 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4821 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4822 MIN_EXPR, to preserve the signedness of the comparison. */
4823 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4825 && operand_equal_p (arg01
,
4826 const_binop (PLUS_EXPR
, arg2
,
4827 build_int_cst (type
, 1)),
4830 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4831 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4833 return pedantic_non_lvalue_loc (loc
,
4834 fold_convert_loc (loc
, type
, tem
));
4839 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4841 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4843 && operand_equal_p (arg01
,
4844 const_binop (MINUS_EXPR
, arg2
,
4845 build_int_cst (type
, 1)),
4848 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4849 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4851 return pedantic_non_lvalue_loc (loc
,
4852 fold_convert_loc (loc
, type
, tem
));
4857 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4858 MAX_EXPR, to preserve the signedness of the comparison. */
4859 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4861 && operand_equal_p (arg01
,
4862 const_binop (MINUS_EXPR
, arg2
,
4863 build_int_cst (type
, 1)),
4866 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4867 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4869 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4874 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4875 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4877 && operand_equal_p (arg01
,
4878 const_binop (PLUS_EXPR
, arg2
,
4879 build_int_cst (type
, 1)),
4882 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4883 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4885 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4899 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4900 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4901 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4905 /* EXP is some logical combination of boolean tests. See if we can
4906 merge it into some range test. Return the new tree if so. */
4909 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4912 int or_op
= (code
== TRUTH_ORIF_EXPR
4913 || code
== TRUTH_OR_EXPR
);
4914 int in0_p
, in1_p
, in_p
;
4915 tree low0
, low1
, low
, high0
, high1
, high
;
4916 bool strict_overflow_p
= false;
4918 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4919 "when simplifying range test");
4921 if (!INTEGRAL_TYPE_P (type
))
4924 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4925 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4927 /* If this is an OR operation, invert both sides; we will invert
4928 again at the end. */
4930 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4932 /* If both expressions are the same, if we can merge the ranges, and we
4933 can build the range test, return it or it inverted. If one of the
4934 ranges is always true or always false, consider it to be the same
4935 expression as the other. */
4936 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4937 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4939 && 0 != (tem
= (build_range_check (loc
, type
,
4941 : rhs
!= 0 ? rhs
: integer_zero_node
,
4944 if (strict_overflow_p
)
4945 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4946 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4949 /* On machines where the branch cost is expensive, if this is a
4950 short-circuited branch and the underlying object on both sides
4951 is the same, make a non-short-circuit operation. */
4952 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4953 && lhs
!= 0 && rhs
!= 0
4954 && (code
== TRUTH_ANDIF_EXPR
4955 || code
== TRUTH_ORIF_EXPR
)
4956 && operand_equal_p (lhs
, rhs
, 0))
4958 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4959 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4960 which cases we can't do this. */
4961 if (simple_operand_p (lhs
))
4962 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4963 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4966 else if (!lang_hooks
.decls
.global_bindings_p ()
4967 && !CONTAINS_PLACEHOLDER_P (lhs
))
4969 tree common
= save_expr (lhs
);
4971 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4972 or_op
? ! in0_p
: in0_p
,
4974 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4975 or_op
? ! in1_p
: in1_p
,
4978 if (strict_overflow_p
)
4979 fold_overflow_warning (warnmsg
,
4980 WARN_STRICT_OVERFLOW_COMPARISON
);
4981 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4982 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4991 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4992 bit value. Arrange things so the extra bits will be set to zero if and
4993 only if C is signed-extended to its full width. If MASK is nonzero,
4994 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4997 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4999 tree type
= TREE_TYPE (c
);
5000 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5003 if (p
== modesize
|| unsignedp
)
5006 /* We work by getting just the sign bit into the low-order bit, then
5007 into the high-order bit, then sign-extend. We then XOR that value
5009 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5011 /* We must use a signed type in order to get an arithmetic right shift.
5012 However, we must also avoid introducing accidental overflows, so that
5013 a subsequent call to integer_zerop will work. Hence we must
5014 do the type conversion here. At this point, the constant is either
5015 zero or one, and the conversion to a signed type can never overflow.
5016 We could get an overflow if this conversion is done anywhere else. */
5017 if (TYPE_UNSIGNED (type
))
5018 temp
= fold_convert (signed_type_for (type
), temp
);
5020 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5021 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5023 temp
= const_binop (BIT_AND_EXPR
, temp
,
5024 fold_convert (TREE_TYPE (c
), mask
));
5025 /* If necessary, convert the type back to match the type of C. */
5026 if (TYPE_UNSIGNED (type
))
5027 temp
= fold_convert (type
, temp
);
5029 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5032 /* For an expression that has the form
5036 we can drop one of the inner expressions and simplify to
5040 LOC is the location of the resulting expression. OP is the inner
5041 logical operation; the left-hand side in the examples above, while CMPOP
5042 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5043 removing a condition that guards another, as in
5044 (A != NULL && A->...) || A == NULL
5045 which we must not transform. If RHS_ONLY is true, only eliminate the
5046 right-most operand of the inner logical operation. */
5049 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5052 tree type
= TREE_TYPE (cmpop
);
5053 enum tree_code code
= TREE_CODE (cmpop
);
5054 enum tree_code truthop_code
= TREE_CODE (op
);
5055 tree lhs
= TREE_OPERAND (op
, 0);
5056 tree rhs
= TREE_OPERAND (op
, 1);
5057 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5058 enum tree_code rhs_code
= TREE_CODE (rhs
);
5059 enum tree_code lhs_code
= TREE_CODE (lhs
);
5060 enum tree_code inv_code
;
5062 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5065 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5068 if (rhs_code
== truthop_code
)
5070 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5071 if (newrhs
!= NULL_TREE
)
5074 rhs_code
= TREE_CODE (rhs
);
5077 if (lhs_code
== truthop_code
&& !rhs_only
)
5079 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5080 if (newlhs
!= NULL_TREE
)
5083 lhs_code
= TREE_CODE (lhs
);
5087 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5088 if (inv_code
== rhs_code
5089 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5090 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5092 if (!rhs_only
&& inv_code
== lhs_code
5093 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5094 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5096 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5097 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5102 /* Find ways of folding logical expressions of LHS and RHS:
5103 Try to merge two comparisons to the same innermost item.
5104 Look for range tests like "ch >= '0' && ch <= '9'".
5105 Look for combinations of simple terms on machines with expensive branches
5106 and evaluate the RHS unconditionally.
5108 For example, if we have p->a == 2 && p->b == 4 and we can make an
5109 object large enough to span both A and B, we can do this with a comparison
5110 against the object ANDed with the a mask.
5112 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5113 operations to do this with one comparison.
5115 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5116 function and the one above.
5118 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5119 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5121 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5124 We return the simplified tree or 0 if no optimization is possible. */
5127 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5130 /* If this is the "or" of two comparisons, we can do something if
5131 the comparisons are NE_EXPR. If this is the "and", we can do something
5132 if the comparisons are EQ_EXPR. I.e.,
5133 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5135 WANTED_CODE is this operation code. For single bit fields, we can
5136 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5137 comparison for one-bit fields. */
5139 enum tree_code wanted_code
;
5140 enum tree_code lcode
, rcode
;
5141 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5142 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5143 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5144 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5145 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5146 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5147 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5148 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5149 enum machine_mode lnmode
, rnmode
;
5150 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5151 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5152 tree l_const
, r_const
;
5153 tree lntype
, rntype
, result
;
5154 HOST_WIDE_INT first_bit
, end_bit
;
5157 /* Start by getting the comparison codes. Fail if anything is volatile.
5158 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5159 it were surrounded with a NE_EXPR. */
5161 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5164 lcode
= TREE_CODE (lhs
);
5165 rcode
= TREE_CODE (rhs
);
5167 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5169 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5170 build_int_cst (TREE_TYPE (lhs
), 0));
5174 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5176 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5177 build_int_cst (TREE_TYPE (rhs
), 0));
5181 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5182 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5185 ll_arg
= TREE_OPERAND (lhs
, 0);
5186 lr_arg
= TREE_OPERAND (lhs
, 1);
5187 rl_arg
= TREE_OPERAND (rhs
, 0);
5188 rr_arg
= TREE_OPERAND (rhs
, 1);
5190 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5191 if (simple_operand_p (ll_arg
)
5192 && simple_operand_p (lr_arg
))
5194 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5195 && operand_equal_p (lr_arg
, rr_arg
, 0))
5197 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5198 truth_type
, ll_arg
, lr_arg
);
5202 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5203 && operand_equal_p (lr_arg
, rl_arg
, 0))
5205 result
= combine_comparisons (loc
, code
, lcode
,
5206 swap_tree_comparison (rcode
),
5207 truth_type
, ll_arg
, lr_arg
);
5213 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5214 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5216 /* If the RHS can be evaluated unconditionally and its operands are
5217 simple, it wins to evaluate the RHS unconditionally on machines
5218 with expensive branches. In this case, this isn't a comparison
5219 that can be merged. */
5221 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5223 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5224 && simple_operand_p (rl_arg
)
5225 && simple_operand_p (rr_arg
))
5227 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5228 if (code
== TRUTH_OR_EXPR
5229 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5230 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5231 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5232 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5233 return build2_loc (loc
, NE_EXPR
, truth_type
,
5234 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5236 build_int_cst (TREE_TYPE (ll_arg
), 0));
5238 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5239 if (code
== TRUTH_AND_EXPR
5240 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5241 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5242 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5244 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5245 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5247 build_int_cst (TREE_TYPE (ll_arg
), 0));
5250 /* See if the comparisons can be merged. Then get all the parameters for
5253 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5254 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5258 ll_inner
= decode_field_reference (loc
, ll_arg
,
5259 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5260 &ll_unsignedp
, &volatilep
, &ll_mask
,
5262 lr_inner
= decode_field_reference (loc
, lr_arg
,
5263 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5264 &lr_unsignedp
, &volatilep
, &lr_mask
,
5266 rl_inner
= decode_field_reference (loc
, rl_arg
,
5267 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5268 &rl_unsignedp
, &volatilep
, &rl_mask
,
5270 rr_inner
= decode_field_reference (loc
, rr_arg
,
5271 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5272 &rr_unsignedp
, &volatilep
, &rr_mask
,
5275 /* It must be true that the inner operation on the lhs of each
5276 comparison must be the same if we are to be able to do anything.
5277 Then see if we have constants. If not, the same must be true for
5279 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5280 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5283 if (TREE_CODE (lr_arg
) == INTEGER_CST
5284 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5285 l_const
= lr_arg
, r_const
= rr_arg
;
5286 else if (lr_inner
== 0 || rr_inner
== 0
5287 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5290 l_const
= r_const
= 0;
5292 /* If either comparison code is not correct for our logical operation,
5293 fail. However, we can convert a one-bit comparison against zero into
5294 the opposite comparison against that bit being set in the field. */
5296 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5297 if (lcode
!= wanted_code
)
5299 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5301 /* Make the left operand unsigned, since we are only interested
5302 in the value of one bit. Otherwise we are doing the wrong
5311 /* This is analogous to the code for l_const above. */
5312 if (rcode
!= wanted_code
)
5314 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5323 /* See if we can find a mode that contains both fields being compared on
5324 the left. If we can't, fail. Otherwise, update all constants and masks
5325 to be relative to a field of that size. */
5326 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5327 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5328 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5329 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5331 if (lnmode
== VOIDmode
)
5334 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5335 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5336 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5337 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5339 if (BYTES_BIG_ENDIAN
)
5341 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5342 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5345 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5346 size_int (xll_bitpos
));
5347 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5348 size_int (xrl_bitpos
));
5352 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5353 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5354 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5355 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5356 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5359 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5361 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5366 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5367 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5368 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5369 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5370 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5373 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5375 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5379 /* If the right sides are not constant, do the same for it. Also,
5380 disallow this optimization if a size or signedness mismatch occurs
5381 between the left and right sides. */
5384 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5385 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5386 /* Make sure the two fields on the right
5387 correspond to the left without being swapped. */
5388 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5391 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5392 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5393 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5394 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5396 if (rnmode
== VOIDmode
)
5399 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5400 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5401 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5402 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5404 if (BYTES_BIG_ENDIAN
)
5406 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5407 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5410 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5412 size_int (xlr_bitpos
));
5413 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5415 size_int (xrr_bitpos
));
5417 /* Make a mask that corresponds to both fields being compared.
5418 Do this for both items being compared. If the operands are the
5419 same size and the bits being compared are in the same position
5420 then we can do this by masking both and comparing the masked
5422 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5423 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5424 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5426 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5427 ll_unsignedp
|| rl_unsignedp
);
5428 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5429 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5431 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5432 lr_unsignedp
|| rr_unsignedp
);
5433 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5434 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5436 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5439 /* There is still another way we can do something: If both pairs of
5440 fields being compared are adjacent, we may be able to make a wider
5441 field containing them both.
5443 Note that we still must mask the lhs/rhs expressions. Furthermore,
5444 the mask must be shifted to account for the shift done by
5445 make_bit_field_ref. */
5446 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5447 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5448 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5449 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5453 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5454 ll_bitsize
+ rl_bitsize
,
5455 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5456 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5457 lr_bitsize
+ rr_bitsize
,
5458 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5460 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5461 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5462 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5463 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5465 /* Convert to the smaller type before masking out unwanted bits. */
5467 if (lntype
!= rntype
)
5469 if (lnbitsize
> rnbitsize
)
5471 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5472 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5475 else if (lnbitsize
< rnbitsize
)
5477 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5478 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5483 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5484 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5486 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5487 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5489 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5495 /* Handle the case of comparisons with constants. If there is something in
5496 common between the masks, those bits of the constants must be the same.
5497 If not, the condition is always false. Test for this to avoid generating
5498 incorrect code below. */
5499 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5500 if (! integer_zerop (result
)
5501 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5502 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5504 if (wanted_code
== NE_EXPR
)
5506 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5507 return constant_boolean_node (true, truth_type
);
5511 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5512 return constant_boolean_node (false, truth_type
);
5516 /* Construct the expression we will return. First get the component
5517 reference we will make. Unless the mask is all ones the width of
5518 that field, perform the mask operation. Then compare with the
5520 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5521 ll_unsignedp
|| rl_unsignedp
);
5523 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5524 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5525 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5527 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5528 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5531 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5535 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5539 enum tree_code op_code
;
5542 int consts_equal
, consts_lt
;
5545 STRIP_SIGN_NOPS (arg0
);
5547 op_code
= TREE_CODE (arg0
);
5548 minmax_const
= TREE_OPERAND (arg0
, 1);
5549 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5550 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5551 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5552 inner
= TREE_OPERAND (arg0
, 0);
5554 /* If something does not permit us to optimize, return the original tree. */
5555 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5556 || TREE_CODE (comp_const
) != INTEGER_CST
5557 || TREE_OVERFLOW (comp_const
)
5558 || TREE_CODE (minmax_const
) != INTEGER_CST
5559 || TREE_OVERFLOW (minmax_const
))
5562 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5563 and GT_EXPR, doing the rest with recursive calls using logical
5567 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5570 = optimize_minmax_comparison (loc
,
5571 invert_tree_comparison (code
, false),
5574 return invert_truthvalue_loc (loc
, tem
);
5580 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5581 optimize_minmax_comparison
5582 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5583 optimize_minmax_comparison
5584 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5587 if (op_code
== MAX_EXPR
&& consts_equal
)
5588 /* MAX (X, 0) == 0 -> X <= 0 */
5589 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5591 else if (op_code
== MAX_EXPR
&& consts_lt
)
5592 /* MAX (X, 0) == 5 -> X == 5 */
5593 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5595 else if (op_code
== MAX_EXPR
)
5596 /* MAX (X, 0) == -1 -> false */
5597 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5599 else if (consts_equal
)
5600 /* MIN (X, 0) == 0 -> X >= 0 */
5601 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5604 /* MIN (X, 0) == 5 -> false */
5605 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5608 /* MIN (X, 0) == -1 -> X == -1 */
5609 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5612 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5613 /* MAX (X, 0) > 0 -> X > 0
5614 MAX (X, 0) > 5 -> X > 5 */
5615 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5617 else if (op_code
== MAX_EXPR
)
5618 /* MAX (X, 0) > -1 -> true */
5619 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5621 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5622 /* MIN (X, 0) > 0 -> false
5623 MIN (X, 0) > 5 -> false */
5624 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5627 /* MIN (X, 0) > -1 -> X > -1 */
5628 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5635 /* T is an integer expression that is being multiplied, divided, or taken a
5636 modulus (CODE says which and what kind of divide or modulus) by a
5637 constant C. See if we can eliminate that operation by folding it with
5638 other operations already in T. WIDE_TYPE, if non-null, is a type that
5639 should be used for the computation if wider than our type.
5641 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5642 (X * 2) + (Y * 4). We must, however, be assured that either the original
5643 expression would not overflow or that overflow is undefined for the type
5644 in the language in question.
5646 If we return a non-null expression, it is an equivalent form of the
5647 original computation, but need not be in the original type.
5649 We set *STRICT_OVERFLOW_P to true if the return values depends on
5650 signed overflow being undefined. Otherwise we do not change
5651 *STRICT_OVERFLOW_P. */
5654 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5655 bool *strict_overflow_p
)
5657 /* To avoid exponential search depth, refuse to allow recursion past
5658 three levels. Beyond that (1) it's highly unlikely that we'll find
5659 something interesting and (2) we've probably processed it before
5660 when we built the inner expression. */
5669 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5676 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5677 bool *strict_overflow_p
)
5679 tree type
= TREE_TYPE (t
);
5680 enum tree_code tcode
= TREE_CODE (t
);
5681 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5682 > GET_MODE_SIZE (TYPE_MODE (type
)))
5683 ? wide_type
: type
);
5685 int same_p
= tcode
== code
;
5686 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5687 bool sub_strict_overflow_p
;
5689 /* Don't deal with constants of zero here; they confuse the code below. */
5690 if (integer_zerop (c
))
5693 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5694 op0
= TREE_OPERAND (t
, 0);
5696 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5697 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5699 /* Note that we need not handle conditional operations here since fold
5700 already handles those cases. So just do arithmetic here. */
5704 /* For a constant, we can always simplify if we are a multiply
5705 or (for divide and modulus) if it is a multiple of our constant. */
5706 if (code
== MULT_EXPR
5707 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5708 return const_binop (code
, fold_convert (ctype
, t
),
5709 fold_convert (ctype
, c
));
5712 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5713 /* If op0 is an expression ... */
5714 if ((COMPARISON_CLASS_P (op0
)
5715 || UNARY_CLASS_P (op0
)
5716 || BINARY_CLASS_P (op0
)
5717 || VL_EXP_CLASS_P (op0
)
5718 || EXPRESSION_CLASS_P (op0
))
5719 /* ... and has wrapping overflow, and its type is smaller
5720 than ctype, then we cannot pass through as widening. */
5721 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5722 && (TYPE_PRECISION (ctype
)
5723 > TYPE_PRECISION (TREE_TYPE (op0
))))
5724 /* ... or this is a truncation (t is narrower than op0),
5725 then we cannot pass through this narrowing. */
5726 || (TYPE_PRECISION (type
)
5727 < TYPE_PRECISION (TREE_TYPE (op0
)))
5728 /* ... or signedness changes for division or modulus,
5729 then we cannot pass through this conversion. */
5730 || (code
!= MULT_EXPR
5731 && (TYPE_UNSIGNED (ctype
)
5732 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5733 /* ... or has undefined overflow while the converted to
5734 type has not, we cannot do the operation in the inner type
5735 as that would introduce undefined overflow. */
5736 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5737 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5740 /* Pass the constant down and see if we can make a simplification. If
5741 we can, replace this expression with the inner simplification for
5742 possible later conversion to our or some other type. */
5743 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5744 && TREE_CODE (t2
) == INTEGER_CST
5745 && !TREE_OVERFLOW (t2
)
5746 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5748 ? ctype
: NULL_TREE
,
5749 strict_overflow_p
))))
5754 /* If widening the type changes it from signed to unsigned, then we
5755 must avoid building ABS_EXPR itself as unsigned. */
5756 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5758 tree cstype
= (*signed_type_for
) (ctype
);
5759 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5762 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5763 return fold_convert (ctype
, t1
);
5767 /* If the constant is negative, we cannot simplify this. */
5768 if (tree_int_cst_sgn (c
) == -1)
5772 /* For division and modulus, type can't be unsigned, as e.g.
5773 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5774 For signed types, even with wrapping overflow, this is fine. */
5775 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5777 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5779 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5782 case MIN_EXPR
: case MAX_EXPR
:
5783 /* If widening the type changes the signedness, then we can't perform
5784 this optimization as that changes the result. */
5785 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5788 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5789 sub_strict_overflow_p
= false;
5790 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5791 &sub_strict_overflow_p
)) != 0
5792 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5793 &sub_strict_overflow_p
)) != 0)
5795 if (tree_int_cst_sgn (c
) < 0)
5796 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5797 if (sub_strict_overflow_p
)
5798 *strict_overflow_p
= true;
5799 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5800 fold_convert (ctype
, t2
));
5804 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5805 /* If the second operand is constant, this is a multiplication
5806 or floor division, by a power of two, so we can treat it that
5807 way unless the multiplier or divisor overflows. Signed
5808 left-shift overflow is implementation-defined rather than
5809 undefined in C90, so do not convert signed left shift into
5811 if (TREE_CODE (op1
) == INTEGER_CST
5812 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5813 /* const_binop may not detect overflow correctly,
5814 so check for it explicitly here. */
5815 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5816 && 0 != (t1
= fold_convert (ctype
,
5817 const_binop (LSHIFT_EXPR
,
5820 && !TREE_OVERFLOW (t1
))
5821 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5822 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5824 fold_convert (ctype
, op0
),
5826 c
, code
, wide_type
, strict_overflow_p
);
5829 case PLUS_EXPR
: case MINUS_EXPR
:
5830 /* See if we can eliminate the operation on both sides. If we can, we
5831 can return a new PLUS or MINUS. If we can't, the only remaining
5832 cases where we can do anything are if the second operand is a
5834 sub_strict_overflow_p
= false;
5835 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5836 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5837 if (t1
!= 0 && t2
!= 0
5838 && (code
== MULT_EXPR
5839 /* If not multiplication, we can only do this if both operands
5840 are divisible by c. */
5841 || (multiple_of_p (ctype
, op0
, c
)
5842 && multiple_of_p (ctype
, op1
, c
))))
5844 if (sub_strict_overflow_p
)
5845 *strict_overflow_p
= true;
5846 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5847 fold_convert (ctype
, t2
));
5850 /* If this was a subtraction, negate OP1 and set it to be an addition.
5851 This simplifies the logic below. */
5852 if (tcode
== MINUS_EXPR
)
5854 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5855 /* If OP1 was not easily negatable, the constant may be OP0. */
5856 if (TREE_CODE (op0
) == INTEGER_CST
)
5867 if (TREE_CODE (op1
) != INTEGER_CST
)
5870 /* If either OP1 or C are negative, this optimization is not safe for
5871 some of the division and remainder types while for others we need
5872 to change the code. */
5873 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5875 if (code
== CEIL_DIV_EXPR
)
5876 code
= FLOOR_DIV_EXPR
;
5877 else if (code
== FLOOR_DIV_EXPR
)
5878 code
= CEIL_DIV_EXPR
;
5879 else if (code
!= MULT_EXPR
5880 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5884 /* If it's a multiply or a division/modulus operation of a multiple
5885 of our constant, do the operation and verify it doesn't overflow. */
5886 if (code
== MULT_EXPR
5887 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5889 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5890 fold_convert (ctype
, c
));
5891 /* We allow the constant to overflow with wrapping semantics. */
5893 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5899 /* If we have an unsigned type, we cannot widen the operation since it
5900 will change the result if the original computation overflowed. */
5901 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5904 /* If we were able to eliminate our operation from the first side,
5905 apply our operation to the second side and reform the PLUS. */
5906 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5907 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5909 /* The last case is if we are a multiply. In that case, we can
5910 apply the distributive law to commute the multiply and addition
5911 if the multiplication of the constants doesn't overflow
5912 and overflow is defined. With undefined overflow
5913 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5914 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5915 return fold_build2 (tcode
, ctype
,
5916 fold_build2 (code
, ctype
,
5917 fold_convert (ctype
, op0
),
5918 fold_convert (ctype
, c
)),
5924 /* We have a special case here if we are doing something like
5925 (C * 8) % 4 since we know that's zero. */
5926 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5927 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5928 /* If the multiplication can overflow we cannot optimize this. */
5929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5930 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5931 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5933 *strict_overflow_p
= true;
5934 return omit_one_operand (type
, integer_zero_node
, op0
);
5937 /* ... fall through ... */
5939 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5940 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5945 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5946 strict_overflow_p
)) != 0)
5947 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5948 fold_convert (ctype
, op1
));
5949 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5950 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5951 strict_overflow_p
)) != 0)
5952 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5953 fold_convert (ctype
, t1
));
5954 else if (TREE_CODE (op1
) != INTEGER_CST
)
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5961 bool overflow_p
= false;
5962 bool overflow_mul_p
;
5963 signop sign
= TYPE_SIGN (ctype
);
5964 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5965 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5967 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5970 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5971 wide_int_to_tree (ctype
, mul
));
5974 /* If these operations "cancel" each other, we have the main
5975 optimizations of this pass, which occur when either constant is a
5976 multiple of the other, in which case we replace this with either an
5977 operation or CODE or TCODE.
5979 If we have an unsigned type, we cannot do this since it will change
5980 the result if the original computation overflowed. */
5981 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5982 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5983 || (tcode
== MULT_EXPR
5984 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5985 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5986 && code
!= MULT_EXPR
)))
5988 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5991 *strict_overflow_p
= true;
5992 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5993 fold_convert (ctype
,
5994 const_binop (TRUNC_DIV_EXPR
,
5997 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6000 *strict_overflow_p
= true;
6001 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6002 fold_convert (ctype
,
6003 const_binop (TRUNC_DIV_EXPR
,
6016 /* Return a node which has the indicated constant VALUE (either 0 or
6017 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6018 and is of the indicated TYPE. */
6021 constant_boolean_node (bool value
, tree type
)
6023 if (type
== integer_type_node
)
6024 return value
? integer_one_node
: integer_zero_node
;
6025 else if (type
== boolean_type_node
)
6026 return value
? boolean_true_node
: boolean_false_node
;
6027 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6028 return build_vector_from_val (type
,
6029 build_int_cst (TREE_TYPE (type
),
6032 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6036 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6037 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6038 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6039 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6040 COND is the first argument to CODE; otherwise (as in the example
6041 given here), it is the second argument. TYPE is the type of the
6042 original expression. Return NULL_TREE if no simplification is
6046 fold_binary_op_with_conditional_arg (location_t loc
,
6047 enum tree_code code
,
6048 tree type
, tree op0
, tree op1
,
6049 tree cond
, tree arg
, int cond_first_p
)
6051 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6052 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6053 tree test
, true_value
, false_value
;
6054 tree lhs
= NULL_TREE
;
6055 tree rhs
= NULL_TREE
;
6056 enum tree_code cond_code
= COND_EXPR
;
6058 if (TREE_CODE (cond
) == COND_EXPR
6059 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6061 test
= TREE_OPERAND (cond
, 0);
6062 true_value
= TREE_OPERAND (cond
, 1);
6063 false_value
= TREE_OPERAND (cond
, 2);
6064 /* If this operand throws an expression, then it does not make
6065 sense to try to perform a logical or arithmetic operation
6067 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6069 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6074 tree testtype
= TREE_TYPE (cond
);
6076 true_value
= constant_boolean_node (true, testtype
);
6077 false_value
= constant_boolean_node (false, testtype
);
6080 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6081 cond_code
= VEC_COND_EXPR
;
6083 /* This transformation is only worthwhile if we don't have to wrap ARG
6084 in a SAVE_EXPR and the operation can be simplified without recursing
6085 on at least one of the branches once its pushed inside the COND_EXPR. */
6086 if (!TREE_CONSTANT (arg
)
6087 && (TREE_SIDE_EFFECTS (arg
)
6088 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6089 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6092 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6095 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6097 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6099 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6103 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6105 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6107 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6110 /* Check that we have simplified at least one of the branches. */
6111 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6114 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6118 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6120 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6121 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6122 ADDEND is the same as X.
6124 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6125 and finite. The problematic cases are when X is zero, and its mode
6126 has signed zeros. In the case of rounding towards -infinity,
6127 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6128 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6131 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6133 if (!real_zerop (addend
))
6136 /* Don't allow the fold with -fsignaling-nans. */
6137 if (HONOR_SNANS (TYPE_MODE (type
)))
6140 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6141 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6144 /* In a vector or complex, we would need to check the sign of all zeros. */
6145 if (TREE_CODE (addend
) != REAL_CST
)
6148 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6149 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6152 /* The mode has signed zeros, and we have to honor their sign.
6153 In this situation, there is only one case we can return true for.
6154 X - 0 is the same as X unless rounding towards -infinity is
6156 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6159 /* Subroutine of fold() that checks comparisons of built-in math
6160 functions against real constants.
6162 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6163 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6164 is the type of the result and ARG0 and ARG1 are the operands of the
6165 comparison. ARG1 must be a TREE_REAL_CST.
6167 The function returns the constant folded tree if a simplification
6168 can be made, and NULL_TREE otherwise. */
6171 fold_mathfn_compare (location_t loc
,
6172 enum built_in_function fcode
, enum tree_code code
,
6173 tree type
, tree arg0
, tree arg1
)
6177 if (BUILTIN_SQRT_P (fcode
))
6179 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6180 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6182 c
= TREE_REAL_CST (arg1
);
6183 if (REAL_VALUE_NEGATIVE (c
))
6185 /* sqrt(x) < y is always false, if y is negative. */
6186 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6187 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6189 /* sqrt(x) > y is always true, if y is negative and we
6190 don't care about NaNs, i.e. negative values of x. */
6191 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6192 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6194 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6195 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6196 build_real (TREE_TYPE (arg
), dconst0
));
6198 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6202 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6203 real_convert (&c2
, mode
, &c2
);
6205 if (REAL_VALUE_ISINF (c2
))
6207 /* sqrt(x) > y is x == +Inf, when y is very large. */
6208 if (HONOR_INFINITIES (mode
))
6209 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6210 build_real (TREE_TYPE (arg
), c2
));
6212 /* sqrt(x) > y is always false, when y is very large
6213 and we don't care about infinities. */
6214 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6217 /* sqrt(x) > c is the same as x > c*c. */
6218 return fold_build2_loc (loc
, code
, type
, arg
,
6219 build_real (TREE_TYPE (arg
), c2
));
6221 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6225 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6226 real_convert (&c2
, mode
, &c2
);
6228 if (REAL_VALUE_ISINF (c2
))
6230 /* sqrt(x) < y is always true, when y is a very large
6231 value and we don't care about NaNs or Infinities. */
6232 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6233 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6235 /* sqrt(x) < y is x != +Inf when y is very large and we
6236 don't care about NaNs. */
6237 if (! HONOR_NANS (mode
))
6238 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6239 build_real (TREE_TYPE (arg
), c2
));
6241 /* sqrt(x) < y is x >= 0 when y is very large and we
6242 don't care about Infinities. */
6243 if (! HONOR_INFINITIES (mode
))
6244 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6245 build_real (TREE_TYPE (arg
), dconst0
));
6247 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6248 arg
= save_expr (arg
);
6249 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6250 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6251 build_real (TREE_TYPE (arg
),
6253 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6254 build_real (TREE_TYPE (arg
),
6258 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6259 if (! HONOR_NANS (mode
))
6260 return fold_build2_loc (loc
, code
, type
, arg
,
6261 build_real (TREE_TYPE (arg
), c2
));
6263 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6264 arg
= save_expr (arg
);
6265 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6266 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6267 build_real (TREE_TYPE (arg
),
6269 fold_build2_loc (loc
, code
, type
, arg
,
6270 build_real (TREE_TYPE (arg
),
6278 /* Subroutine of fold() that optimizes comparisons against Infinities,
6279 either +Inf or -Inf.
6281 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6283 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6285 The function returns the constant folded tree if a simplification
6286 can be made, and NULL_TREE otherwise. */
6289 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6290 tree arg0
, tree arg1
)
6292 enum machine_mode mode
;
6293 REAL_VALUE_TYPE max
;
6297 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6299 /* For negative infinity swap the sense of the comparison. */
6300 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6302 code
= swap_tree_comparison (code
);
6307 /* x > +Inf is always false, if with ignore sNANs. */
6308 if (HONOR_SNANS (mode
))
6310 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6313 /* x <= +Inf is always true, if we don't case about NaNs. */
6314 if (! HONOR_NANS (mode
))
6315 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6317 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6318 arg0
= save_expr (arg0
);
6319 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6323 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6324 real_maxval (&max
, neg
, mode
);
6325 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6326 arg0
, build_real (TREE_TYPE (arg0
), max
));
6329 /* x < +Inf is always equal to x <= DBL_MAX. */
6330 real_maxval (&max
, neg
, mode
);
6331 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6332 arg0
, build_real (TREE_TYPE (arg0
), max
));
6335 /* x != +Inf is always equal to !(x > DBL_MAX). */
6336 real_maxval (&max
, neg
, mode
);
6337 if (! HONOR_NANS (mode
))
6338 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6339 arg0
, build_real (TREE_TYPE (arg0
), max
));
6341 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6342 arg0
, build_real (TREE_TYPE (arg0
), max
));
6343 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6352 /* Subroutine of fold() that optimizes comparisons of a division by
6353 a nonzero integer constant against an integer constant, i.e.
6356 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6357 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6358 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6360 The function returns the constant folded tree if a simplification
6361 can be made, and NULL_TREE otherwise. */
6364 fold_div_compare (location_t loc
,
6365 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6367 tree prod
, tmp
, hi
, lo
;
6368 tree arg00
= TREE_OPERAND (arg0
, 0);
6369 tree arg01
= TREE_OPERAND (arg0
, 1);
6370 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6371 bool neg_overflow
= false;
6374 /* We have to do this the hard way to detect unsigned overflow.
6375 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6376 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6377 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6378 neg_overflow
= false;
6380 if (sign
== UNSIGNED
)
6382 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6383 build_int_cst (TREE_TYPE (arg01
), 1));
6386 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6387 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6388 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6389 -1, overflow
| TREE_OVERFLOW (prod
));
6391 else if (tree_int_cst_sgn (arg01
) >= 0)
6393 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6394 build_int_cst (TREE_TYPE (arg01
), 1));
6395 switch (tree_int_cst_sgn (arg1
))
6398 neg_overflow
= true;
6399 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6404 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6409 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6419 /* A negative divisor reverses the relational operators. */
6420 code
= swap_tree_comparison (code
);
6422 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6423 build_int_cst (TREE_TYPE (arg01
), 1));
6424 switch (tree_int_cst_sgn (arg1
))
6427 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6432 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6437 neg_overflow
= true;
6438 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6450 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6451 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6452 if (TREE_OVERFLOW (hi
))
6453 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6454 if (TREE_OVERFLOW (lo
))
6455 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6456 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6459 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6460 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6461 if (TREE_OVERFLOW (hi
))
6462 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6463 if (TREE_OVERFLOW (lo
))
6464 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6465 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6468 if (TREE_OVERFLOW (lo
))
6470 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6471 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6473 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6476 if (TREE_OVERFLOW (hi
))
6478 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6479 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6481 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6484 if (TREE_OVERFLOW (hi
))
6486 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6487 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6489 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6492 if (TREE_OVERFLOW (lo
))
6494 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6495 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6497 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6507 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6508 equality/inequality test, then return a simplified form of the test
6509 using a sign testing. Otherwise return NULL. TYPE is the desired
6513 fold_single_bit_test_into_sign_test (location_t loc
,
6514 enum tree_code code
, tree arg0
, tree arg1
,
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6519 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6520 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6526 if (arg00
!= NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00
))
6530 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6532 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6533 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6535 fold_convert_loc (loc
, stype
, arg00
),
6536 build_int_cst (stype
, 0));
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6549 fold_single_bit_test (location_t loc
, enum tree_code code
,
6550 tree arg0
, tree arg1
, tree result_type
)
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6554 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6555 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6557 tree inner
= TREE_OPERAND (arg0
, 0);
6558 tree type
= TREE_TYPE (arg0
);
6559 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6560 enum machine_mode operand_mode
= TYPE_MODE (type
);
6562 tree signed_type
, unsigned_type
, intermediate_type
;
6565 /* First, see if we can fold the single bit test into a sign-bit
6567 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner
) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6580 && bitnum
< TYPE_PRECISION (type
)
6581 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6582 TYPE_PRECISION (type
) - bitnum
))
6584 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6585 inner
= TREE_OPERAND (inner
, 0);
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6593 && !flag_syntax_only
) ? 0 : 1;
6598 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6599 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6600 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6601 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6604 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6605 inner
, size_int (bitnum
));
6607 one
= build_int_cst (intermediate_type
, 1);
6609 if (code
== EQ_EXPR
)
6610 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6612 /* Put the AND last so it can combine with more things. */
6613 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6615 /* Make sure to return the proper type. */
6616 inner
= fold_convert_loc (loc
, result_type
, inner
);
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6627 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6629 if (! flag_evaluation_order
)
6631 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6633 return ! TREE_SIDE_EFFECTS (arg0
)
6634 && ! TREE_SIDE_EFFECTS (arg1
);
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6643 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6645 STRIP_SIGN_NOPS (arg0
);
6646 STRIP_SIGN_NOPS (arg1
);
6648 if (TREE_CODE (arg1
) == INTEGER_CST
)
6650 if (TREE_CODE (arg0
) == INTEGER_CST
)
6653 if (TREE_CODE (arg1
) == REAL_CST
)
6655 if (TREE_CODE (arg0
) == REAL_CST
)
6658 if (TREE_CODE (arg1
) == FIXED_CST
)
6660 if (TREE_CODE (arg0
) == FIXED_CST
)
6663 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6665 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6668 if (TREE_CONSTANT (arg1
))
6670 if (TREE_CONSTANT (arg0
))
6673 if (optimize_function_for_size_p (cfun
))
6676 if (reorder
&& flag_evaluation_order
6677 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6680 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6681 for commutative and comparison operators. Ensuring a canonical
6682 form allows the optimizers to find additional redundancies without
6683 having to explicitly check for both orderings. */
6684 if (TREE_CODE (arg0
) == SSA_NAME
6685 && TREE_CODE (arg1
) == SSA_NAME
6686 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6689 /* Put SSA_NAMEs last. */
6690 if (TREE_CODE (arg1
) == SSA_NAME
)
6692 if (TREE_CODE (arg0
) == SSA_NAME
)
6695 /* Put variables last. */
6704 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6705 ARG0 is extended to a wider type. */
6708 fold_widened_comparison (location_t loc
, enum tree_code code
,
6709 tree type
, tree arg0
, tree arg1
)
6711 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6713 tree shorter_type
, outer_type
;
6717 if (arg0_unw
== arg0
)
6719 shorter_type
= TREE_TYPE (arg0_unw
);
6721 #ifdef HAVE_canonicalize_funcptr_for_compare
6722 /* Disable this optimization if we're casting a function pointer
6723 type on targets that require function pointer canonicalization. */
6724 if (HAVE_canonicalize_funcptr_for_compare
6725 && TREE_CODE (shorter_type
) == POINTER_TYPE
6726 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6730 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6733 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6735 /* If possible, express the comparison in the shorter mode. */
6736 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6737 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6738 && (TREE_TYPE (arg1_unw
) == shorter_type
6739 || ((TYPE_PRECISION (shorter_type
)
6740 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6741 && (TYPE_UNSIGNED (shorter_type
)
6742 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6743 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6744 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6745 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6746 && int_fits_type_p (arg1_unw
, shorter_type
))))
6747 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6748 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6750 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6751 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6752 || !int_fits_type_p (arg1_unw
, shorter_type
))
6755 /* If we are comparing with the integer that does not fit into the range
6756 of the shorter type, the result is known. */
6757 outer_type
= TREE_TYPE (arg1_unw
);
6758 min
= lower_bound_in_type (outer_type
, shorter_type
);
6759 max
= upper_bound_in_type (outer_type
, shorter_type
);
6761 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6763 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6770 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6775 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6781 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6783 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6788 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6790 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6799 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6800 ARG0 just the signedness is changed. */
6803 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6804 tree arg0
, tree arg1
)
6807 tree inner_type
, outer_type
;
6809 if (!CONVERT_EXPR_P (arg0
))
6812 outer_type
= TREE_TYPE (arg0
);
6813 arg0_inner
= TREE_OPERAND (arg0
, 0);
6814 inner_type
= TREE_TYPE (arg0_inner
);
6816 #ifdef HAVE_canonicalize_funcptr_for_compare
6817 /* Disable this optimization if we're casting a function pointer
6818 type on targets that require function pointer canonicalization. */
6819 if (HAVE_canonicalize_funcptr_for_compare
6820 && TREE_CODE (inner_type
) == POINTER_TYPE
6821 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6825 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6828 if (TREE_CODE (arg1
) != INTEGER_CST
6829 && !(CONVERT_EXPR_P (arg1
)
6830 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6833 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6838 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6841 if (TREE_CODE (arg1
) == INTEGER_CST
)
6842 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6843 TREE_OVERFLOW (arg1
));
6845 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6847 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6850 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6851 step of the array. Reconstructs s and delta in the case of s *
6852 delta being an integer constant (and thus already folded). ADDR is
6853 the address. MULT is the multiplicative expression. If the
6854 function succeeds, the new address expression is returned.
6855 Otherwise NULL_TREE is returned. LOC is the location of the
6856 resulting expression. */
6859 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6861 tree s
, delta
, step
;
6862 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6867 /* Strip the nops that might be added when converting op1 to sizetype. */
6870 /* Canonicalize op1 into a possibly non-constant delta
6871 and an INTEGER_CST s. */
6872 if (TREE_CODE (op1
) == MULT_EXPR
)
6874 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6879 if (TREE_CODE (arg0
) == INTEGER_CST
)
6884 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6892 else if (TREE_CODE (op1
) == INTEGER_CST
)
6899 /* Simulate we are delta * 1. */
6901 s
= integer_one_node
;
6904 /* Handle &x.array the same as we would handle &x.array[0]. */
6905 if (TREE_CODE (ref
) == COMPONENT_REF
6906 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6910 /* Remember if this was a multi-dimensional array. */
6911 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6914 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6917 itype
= TREE_TYPE (domain
);
6919 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6920 if (TREE_CODE (step
) != INTEGER_CST
)
6925 if (! tree_int_cst_equal (step
, s
))
6930 /* Try if delta is a multiple of step. */
6931 tree tmp
= div_if_zero_remainder (op1
, step
);
6937 /* Only fold here if we can verify we do not overflow one
6938 dimension of a multi-dimensional array. */
6943 if (!TYPE_MIN_VALUE (domain
)
6944 || !TYPE_MAX_VALUE (domain
)
6945 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6948 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6949 fold_convert_loc (loc
, itype
,
6950 TYPE_MIN_VALUE (domain
)),
6951 fold_convert_loc (loc
, itype
, delta
));
6952 if (TREE_CODE (tmp
) != INTEGER_CST
6953 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6957 /* We found a suitable component reference. */
6959 pref
= TREE_OPERAND (addr
, 0);
6960 ret
= copy_node (pref
);
6961 SET_EXPR_LOCATION (ret
, loc
);
6963 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6965 (loc
, PLUS_EXPR
, itype
,
6966 fold_convert_loc (loc
, itype
,
6968 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6969 fold_convert_loc (loc
, itype
, delta
)),
6970 NULL_TREE
, NULL_TREE
);
6971 return build_fold_addr_expr_loc (loc
, ret
);
6976 for (;; ref
= TREE_OPERAND (ref
, 0))
6978 if (TREE_CODE (ref
) == ARRAY_REF
)
6982 /* Remember if this was a multi-dimensional array. */
6983 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6986 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6989 itype
= TREE_TYPE (domain
);
6991 step
= array_ref_element_size (ref
);
6992 if (TREE_CODE (step
) != INTEGER_CST
)
6997 if (! tree_int_cst_equal (step
, s
))
7002 /* Try if delta is a multiple of step. */
7003 tree tmp
= div_if_zero_remainder (op1
, step
);
7009 /* Only fold here if we can verify we do not overflow one
7010 dimension of a multi-dimensional array. */
7015 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7016 || !TYPE_MAX_VALUE (domain
)
7017 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7020 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7021 fold_convert_loc (loc
, itype
,
7022 TREE_OPERAND (ref
, 1)),
7023 fold_convert_loc (loc
, itype
, delta
));
7025 || TREE_CODE (tmp
) != INTEGER_CST
7026 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7035 if (!handled_component_p (ref
))
7039 /* We found the suitable array reference. So copy everything up to it,
7040 and replace the index. */
7042 pref
= TREE_OPERAND (addr
, 0);
7043 ret
= copy_node (pref
);
7044 SET_EXPR_LOCATION (ret
, loc
);
7049 pref
= TREE_OPERAND (pref
, 0);
7050 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7051 pos
= TREE_OPERAND (pos
, 0);
7054 TREE_OPERAND (pos
, 1)
7055 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7056 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7057 fold_convert_loc (loc
, itype
, delta
));
7058 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7062 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7063 means A >= Y && A != MAX, but in this case we know that
7064 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7067 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7069 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7071 if (TREE_CODE (bound
) == LT_EXPR
)
7072 a
= TREE_OPERAND (bound
, 0);
7073 else if (TREE_CODE (bound
) == GT_EXPR
)
7074 a
= TREE_OPERAND (bound
, 1);
7078 typea
= TREE_TYPE (a
);
7079 if (!INTEGRAL_TYPE_P (typea
)
7080 && !POINTER_TYPE_P (typea
))
7083 if (TREE_CODE (ineq
) == LT_EXPR
)
7085 a1
= TREE_OPERAND (ineq
, 1);
7086 y
= TREE_OPERAND (ineq
, 0);
7088 else if (TREE_CODE (ineq
) == GT_EXPR
)
7090 a1
= TREE_OPERAND (ineq
, 0);
7091 y
= TREE_OPERAND (ineq
, 1);
7096 if (TREE_TYPE (a1
) != typea
)
7099 if (POINTER_TYPE_P (typea
))
7101 /* Convert the pointer types into integer before taking the difference. */
7102 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7103 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7104 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7107 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7109 if (!diff
|| !integer_onep (diff
))
7112 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7115 /* Fold a sum or difference of at least one multiplication.
7116 Returns the folded tree or NULL if no simplification could be made. */
7119 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7120 tree arg0
, tree arg1
)
7122 tree arg00
, arg01
, arg10
, arg11
;
7123 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7125 /* (A * C) +- (B * C) -> (A+-B) * C.
7126 (A * C) +- A -> A * (C+-1).
7127 We are most concerned about the case where C is a constant,
7128 but other combinations show up during loop reduction. Since
7129 it is not difficult, try all four possibilities. */
7131 if (TREE_CODE (arg0
) == MULT_EXPR
)
7133 arg00
= TREE_OPERAND (arg0
, 0);
7134 arg01
= TREE_OPERAND (arg0
, 1);
7136 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7138 arg00
= build_one_cst (type
);
7143 /* We cannot generate constant 1 for fract. */
7144 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7147 arg01
= build_one_cst (type
);
7149 if (TREE_CODE (arg1
) == MULT_EXPR
)
7151 arg10
= TREE_OPERAND (arg1
, 0);
7152 arg11
= TREE_OPERAND (arg1
, 1);
7154 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7156 arg10
= build_one_cst (type
);
7157 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7158 the purpose of this canonicalization. */
7159 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7160 && negate_expr_p (arg1
)
7161 && code
== PLUS_EXPR
)
7163 arg11
= negate_expr (arg1
);
7171 /* We cannot generate constant 1 for fract. */
7172 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7175 arg11
= build_one_cst (type
);
7179 if (operand_equal_p (arg01
, arg11
, 0))
7180 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7181 else if (operand_equal_p (arg00
, arg10
, 0))
7182 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7183 else if (operand_equal_p (arg00
, arg11
, 0))
7184 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7185 else if (operand_equal_p (arg01
, arg10
, 0))
7186 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7188 /* No identical multiplicands; see if we can find a common
7189 power-of-two factor in non-power-of-two multiplies. This
7190 can help in multi-dimensional array access. */
7191 else if (tree_fits_shwi_p (arg01
)
7192 && tree_fits_shwi_p (arg11
))
7194 HOST_WIDE_INT int01
, int11
, tmp
;
7197 int01
= tree_to_shwi (arg01
);
7198 int11
= tree_to_shwi (arg11
);
7200 /* Move min of absolute values to int11. */
7201 if (absu_hwi (int01
) < absu_hwi (int11
))
7203 tmp
= int01
, int01
= int11
, int11
= tmp
;
7204 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7211 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7212 /* The remainder should not be a constant, otherwise we
7213 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7214 increased the number of multiplications necessary. */
7215 && TREE_CODE (arg10
) != INTEGER_CST
)
7217 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7218 build_int_cst (TREE_TYPE (arg00
),
7223 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7228 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7229 fold_build2_loc (loc
, code
, type
,
7230 fold_convert_loc (loc
, type
, alt0
),
7231 fold_convert_loc (loc
, type
, alt1
)),
7232 fold_convert_loc (loc
, type
, same
));
7237 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7238 specified by EXPR into the buffer PTR of length LEN bytes.
7239 Return the number of bytes placed in the buffer, or zero
7243 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7245 tree type
= TREE_TYPE (expr
);
7246 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7247 int byte
, offset
, word
, words
;
7248 unsigned char value
;
7250 if ((off
== -1 && total_bytes
> len
)
7251 || off
>= total_bytes
)
7255 words
= total_bytes
/ UNITS_PER_WORD
;
7257 for (byte
= 0; byte
< total_bytes
; byte
++)
7259 int bitpos
= byte
* BITS_PER_UNIT
;
7260 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7262 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7264 if (total_bytes
> UNITS_PER_WORD
)
7266 word
= byte
/ UNITS_PER_WORD
;
7267 if (WORDS_BIG_ENDIAN
)
7268 word
= (words
- 1) - word
;
7269 offset
= word
* UNITS_PER_WORD
;
7270 if (BYTES_BIG_ENDIAN
)
7271 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7273 offset
+= byte
% UNITS_PER_WORD
;
7276 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7278 && offset
- off
< len
)
7279 ptr
[offset
- off
] = value
;
7281 return MIN (len
, total_bytes
- off
);
7285 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7286 specified by EXPR into the buffer PTR of length LEN bytes.
7287 Return the number of bytes placed in the buffer, or zero
7291 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7293 tree type
= TREE_TYPE (expr
);
7294 enum machine_mode mode
= TYPE_MODE (type
);
7295 int total_bytes
= GET_MODE_SIZE (mode
);
7296 FIXED_VALUE_TYPE value
;
7297 tree i_value
, i_type
;
7299 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7302 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7304 if (NULL_TREE
== i_type
7305 || TYPE_PRECISION (i_type
) != total_bytes
)
7308 value
= TREE_FIXED_CST (expr
);
7309 i_value
= double_int_to_tree (i_type
, value
.data
);
7311 return native_encode_int (i_value
, ptr
, len
, off
);
7315 /* Subroutine of native_encode_expr. Encode the REAL_CST
7316 specified by EXPR into the buffer PTR of length LEN bytes.
7317 Return the number of bytes placed in the buffer, or zero
7321 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7323 tree type
= TREE_TYPE (expr
);
7324 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7325 int byte
, offset
, word
, words
, bitpos
;
7326 unsigned char value
;
7328 /* There are always 32 bits in each long, no matter the size of
7329 the hosts long. We handle floating point representations with
7333 if ((off
== -1 && total_bytes
> len
)
7334 || off
>= total_bytes
)
7338 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7340 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7342 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7343 bitpos
+= BITS_PER_UNIT
)
7345 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7346 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7348 if (UNITS_PER_WORD
< 4)
7350 word
= byte
/ UNITS_PER_WORD
;
7351 if (WORDS_BIG_ENDIAN
)
7352 word
= (words
- 1) - word
;
7353 offset
= word
* UNITS_PER_WORD
;
7354 if (BYTES_BIG_ENDIAN
)
7355 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7357 offset
+= byte
% UNITS_PER_WORD
;
7360 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7361 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7363 && offset
- off
< len
)
7364 ptr
[offset
- off
] = value
;
7366 return MIN (len
, total_bytes
- off
);
7369 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7370 specified by EXPR into the buffer PTR of length LEN bytes.
7371 Return the number of bytes placed in the buffer, or zero
7375 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7380 part
= TREE_REALPART (expr
);
7381 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7385 part
= TREE_IMAGPART (expr
);
7387 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7388 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7392 return rsize
+ isize
;
7396 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7397 specified by EXPR into the buffer PTR of length LEN bytes.
7398 Return the number of bytes placed in the buffer, or zero
7402 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7409 count
= VECTOR_CST_NELTS (expr
);
7410 itype
= TREE_TYPE (TREE_TYPE (expr
));
7411 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7412 for (i
= 0; i
< count
; i
++)
7419 elem
= VECTOR_CST_ELT (expr
, i
);
7420 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7421 if ((off
== -1 && res
!= size
)
7434 /* Subroutine of native_encode_expr. Encode the STRING_CST
7435 specified by EXPR into the buffer PTR of length LEN bytes.
7436 Return the number of bytes placed in the buffer, or zero
7440 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7442 tree type
= TREE_TYPE (expr
);
7443 HOST_WIDE_INT total_bytes
;
7445 if (TREE_CODE (type
) != ARRAY_TYPE
7446 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7447 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7448 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7450 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7451 if ((off
== -1 && total_bytes
> len
)
7452 || off
>= total_bytes
)
7456 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7459 if (off
< TREE_STRING_LENGTH (expr
))
7461 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7462 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7464 memset (ptr
+ written
, 0,
7465 MIN (total_bytes
- written
, len
- written
));
7468 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7469 return MIN (total_bytes
- off
, len
);
7473 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7474 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7475 buffer PTR of length LEN bytes. If OFF is not -1 then start
7476 the encoding at byte offset OFF and encode at most LEN bytes.
7477 Return the number of bytes placed in the buffer, or zero upon failure. */
7480 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7482 switch (TREE_CODE (expr
))
7485 return native_encode_int (expr
, ptr
, len
, off
);
7488 return native_encode_real (expr
, ptr
, len
, off
);
7491 return native_encode_fixed (expr
, ptr
, len
, off
);
7494 return native_encode_complex (expr
, ptr
, len
, off
);
7497 return native_encode_vector (expr
, ptr
, len
, off
);
7500 return native_encode_string (expr
, ptr
, len
, off
);
7508 /* Subroutine of native_interpret_expr. Interpret the contents of
7509 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7510 If the buffer cannot be interpreted, return NULL_TREE. */
7513 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7515 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7517 if (total_bytes
> len
7518 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7521 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7523 return wide_int_to_tree (type
, result
);
7527 /* Subroutine of native_interpret_expr. Interpret the contents of
7528 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7529 If the buffer cannot be interpreted, return NULL_TREE. */
7532 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7534 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7536 FIXED_VALUE_TYPE fixed_value
;
7538 if (total_bytes
> len
7539 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7542 result
= double_int::from_buffer (ptr
, total_bytes
);
7543 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7545 return build_fixed (type
, fixed_value
);
7549 /* Subroutine of native_interpret_expr. Interpret the contents of
7550 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7551 If the buffer cannot be interpreted, return NULL_TREE. */
7554 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7556 enum machine_mode mode
= TYPE_MODE (type
);
7557 int total_bytes
= GET_MODE_SIZE (mode
);
7558 int byte
, offset
, word
, words
, bitpos
;
7559 unsigned char value
;
7560 /* There are always 32 bits in each long, no matter the size of
7561 the hosts long. We handle floating point representations with
7566 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7567 if (total_bytes
> len
|| total_bytes
> 24)
7569 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7571 memset (tmp
, 0, sizeof (tmp
));
7572 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7573 bitpos
+= BITS_PER_UNIT
)
7575 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7576 if (UNITS_PER_WORD
< 4)
7578 word
= byte
/ UNITS_PER_WORD
;
7579 if (WORDS_BIG_ENDIAN
)
7580 word
= (words
- 1) - word
;
7581 offset
= word
* UNITS_PER_WORD
;
7582 if (BYTES_BIG_ENDIAN
)
7583 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7585 offset
+= byte
% UNITS_PER_WORD
;
7588 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7589 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7591 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7594 real_from_target (&r
, tmp
, mode
);
7595 return build_real (type
, r
);
7599 /* Subroutine of native_interpret_expr. Interpret the contents of
7600 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7601 If the buffer cannot be interpreted, return NULL_TREE. */
7604 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7606 tree etype
, rpart
, ipart
;
7609 etype
= TREE_TYPE (type
);
7610 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7613 rpart
= native_interpret_expr (etype
, ptr
, size
);
7616 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7619 return build_complex (type
, rpart
, ipart
);
7623 /* Subroutine of native_interpret_expr. Interpret the contents of
7624 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7625 If the buffer cannot be interpreted, return NULL_TREE. */
7628 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7634 etype
= TREE_TYPE (type
);
7635 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7636 count
= TYPE_VECTOR_SUBPARTS (type
);
7637 if (size
* count
> len
)
7640 elements
= XALLOCAVEC (tree
, count
);
7641 for (i
= count
- 1; i
>= 0; i
--)
7643 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7648 return build_vector (type
, elements
);
7652 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7653 the buffer PTR of length LEN as a constant of type TYPE. For
7654 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7655 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7656 return NULL_TREE. */
7659 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7661 switch (TREE_CODE (type
))
7667 case REFERENCE_TYPE
:
7668 return native_interpret_int (type
, ptr
, len
);
7671 return native_interpret_real (type
, ptr
, len
);
7673 case FIXED_POINT_TYPE
:
7674 return native_interpret_fixed (type
, ptr
, len
);
7677 return native_interpret_complex (type
, ptr
, len
);
7680 return native_interpret_vector (type
, ptr
, len
);
7687 /* Returns true if we can interpret the contents of a native encoding
7691 can_native_interpret_type_p (tree type
)
7693 switch (TREE_CODE (type
))
7699 case REFERENCE_TYPE
:
7700 case FIXED_POINT_TYPE
:
7710 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7711 TYPE at compile-time. If we're unable to perform the conversion
7712 return NULL_TREE. */
7715 fold_view_convert_expr (tree type
, tree expr
)
7717 /* We support up to 512-bit values (for V8DFmode). */
7718 unsigned char buffer
[64];
7721 /* Check that the host and target are sane. */
7722 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7725 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7729 return native_interpret_expr (type
, buffer
, len
);
7732 /* Build an expression for the address of T. Folds away INDIRECT_REF
7733 to avoid confusing the gimplify process. */
7736 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7738 /* The size of the object is not relevant when talking about its address. */
7739 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7740 t
= TREE_OPERAND (t
, 0);
7742 if (TREE_CODE (t
) == INDIRECT_REF
)
7744 t
= TREE_OPERAND (t
, 0);
7746 if (TREE_TYPE (t
) != ptrtype
)
7747 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7749 else if (TREE_CODE (t
) == MEM_REF
7750 && integer_zerop (TREE_OPERAND (t
, 1)))
7751 return TREE_OPERAND (t
, 0);
7752 else if (TREE_CODE (t
) == MEM_REF
7753 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7754 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7755 TREE_OPERAND (t
, 0),
7756 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7757 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7759 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7761 if (TREE_TYPE (t
) != ptrtype
)
7762 t
= fold_convert_loc (loc
, ptrtype
, t
);
7765 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7770 /* Build an expression for the address of T. */
7773 build_fold_addr_expr_loc (location_t loc
, tree t
)
7775 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7777 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7780 static bool vec_cst_ctor_to_array (tree
, tree
*);
7782 /* Fold a unary expression of code CODE and type TYPE with operand
7783 OP0. Return the folded expression if folding is successful.
7784 Otherwise, return NULL_TREE. */
7787 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7791 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7793 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7794 && TREE_CODE_LENGTH (code
) == 1);
7799 if (CONVERT_EXPR_CODE_P (code
)
7800 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7802 /* Don't use STRIP_NOPS, because signedness of argument type
7804 STRIP_SIGN_NOPS (arg0
);
7808 /* Strip any conversions that don't change the mode. This
7809 is safe for every expression, except for a comparison
7810 expression because its signedness is derived from its
7813 Note that this is done as an internal manipulation within
7814 the constant folder, in order to find the simplest
7815 representation of the arguments so that their form can be
7816 studied. In any cases, the appropriate type conversions
7817 should be put back in the tree that will get out of the
7823 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7825 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7826 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7827 fold_build1_loc (loc
, code
, type
,
7828 fold_convert_loc (loc
, TREE_TYPE (op0
),
7829 TREE_OPERAND (arg0
, 1))));
7830 else if (TREE_CODE (arg0
) == COND_EXPR
)
7832 tree arg01
= TREE_OPERAND (arg0
, 1);
7833 tree arg02
= TREE_OPERAND (arg0
, 2);
7834 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7835 arg01
= fold_build1_loc (loc
, code
, type
,
7836 fold_convert_loc (loc
,
7837 TREE_TYPE (op0
), arg01
));
7838 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7839 arg02
= fold_build1_loc (loc
, code
, type
,
7840 fold_convert_loc (loc
,
7841 TREE_TYPE (op0
), arg02
));
7842 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7845 /* If this was a conversion, and all we did was to move into
7846 inside the COND_EXPR, bring it back out. But leave it if
7847 it is a conversion from integer to integer and the
7848 result precision is no wider than a word since such a
7849 conversion is cheap and may be optimized away by combine,
7850 while it couldn't if it were outside the COND_EXPR. Then return
7851 so we don't get into an infinite recursion loop taking the
7852 conversion out and then back in. */
7854 if ((CONVERT_EXPR_CODE_P (code
)
7855 || code
== NON_LVALUE_EXPR
)
7856 && TREE_CODE (tem
) == COND_EXPR
7857 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7858 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7859 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7860 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7861 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7862 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7863 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7865 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7866 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7867 || flag_syntax_only
))
7868 tem
= build1_loc (loc
, code
, type
,
7870 TREE_TYPE (TREE_OPERAND
7871 (TREE_OPERAND (tem
, 1), 0)),
7872 TREE_OPERAND (tem
, 0),
7873 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7874 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7883 /* Re-association barriers around constants and other re-association
7884 barriers can be removed. */
7885 if (CONSTANT_CLASS_P (op0
)
7886 || TREE_CODE (op0
) == PAREN_EXPR
)
7887 return fold_convert_loc (loc
, type
, op0
);
7890 case NON_LVALUE_EXPR
:
7891 if (!maybe_lvalue_p (op0
))
7892 return fold_convert_loc (loc
, type
, op0
);
7897 case FIX_TRUNC_EXPR
:
7898 if (TREE_TYPE (op0
) == type
)
7901 if (COMPARISON_CLASS_P (op0
))
7903 /* If we have (type) (a CMP b) and type is an integral type, return
7904 new expression involving the new type. Canonicalize
7905 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7907 Do not fold the result as that would not simplify further, also
7908 folding again results in recursions. */
7909 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7910 return build2_loc (loc
, TREE_CODE (op0
), type
,
7911 TREE_OPERAND (op0
, 0),
7912 TREE_OPERAND (op0
, 1));
7913 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7914 && TREE_CODE (type
) != VECTOR_TYPE
)
7915 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7916 constant_boolean_node (true, type
),
7917 constant_boolean_node (false, type
));
7920 /* Handle cases of two conversions in a row. */
7921 if (CONVERT_EXPR_P (op0
))
7923 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7924 tree inter_type
= TREE_TYPE (op0
);
7925 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7926 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7927 int inside_float
= FLOAT_TYPE_P (inside_type
);
7928 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7929 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7930 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7931 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7932 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7933 int inter_float
= FLOAT_TYPE_P (inter_type
);
7934 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7935 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7936 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7937 int final_int
= INTEGRAL_TYPE_P (type
);
7938 int final_ptr
= POINTER_TYPE_P (type
);
7939 int final_float
= FLOAT_TYPE_P (type
);
7940 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7941 unsigned int final_prec
= TYPE_PRECISION (type
);
7942 int final_unsignedp
= TYPE_UNSIGNED (type
);
7944 /* In addition to the cases of two conversions in a row
7945 handled below, if we are converting something to its own
7946 type via an object of identical or wider precision, neither
7947 conversion is needed. */
7948 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7949 && (((inter_int
|| inter_ptr
) && final_int
)
7950 || (inter_float
&& final_float
))
7951 && inter_prec
>= final_prec
)
7952 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7954 /* Likewise, if the intermediate and initial types are either both
7955 float or both integer, we don't need the middle conversion if the
7956 former is wider than the latter and doesn't change the signedness
7957 (for integers). Avoid this if the final type is a pointer since
7958 then we sometimes need the middle conversion. Likewise if the
7959 final type has a precision not equal to the size of its mode. */
7960 if (((inter_int
&& inside_int
)
7961 || (inter_float
&& inside_float
)
7962 || (inter_vec
&& inside_vec
))
7963 && inter_prec
>= inside_prec
7964 && (inter_float
|| inter_vec
7965 || inter_unsignedp
== inside_unsignedp
)
7966 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7967 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7969 && (! final_vec
|| inter_prec
== inside_prec
))
7970 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7972 /* If we have a sign-extension of a zero-extended value, we can
7973 replace that by a single zero-extension. Likewise if the
7974 final conversion does not change precision we can drop the
7975 intermediate conversion. */
7976 if (inside_int
&& inter_int
&& final_int
7977 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7978 && inside_unsignedp
&& !inter_unsignedp
)
7979 || final_prec
== inter_prec
))
7980 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7982 /* Two conversions in a row are not needed unless:
7983 - some conversion is floating-point (overstrict for now), or
7984 - some conversion is a vector (overstrict for now), or
7985 - the intermediate type is narrower than both initial and
7987 - the intermediate type and innermost type differ in signedness,
7988 and the outermost type is wider than the intermediate, or
7989 - the initial type is a pointer type and the precisions of the
7990 intermediate and final types differ, or
7991 - the final type is a pointer type and the precisions of the
7992 initial and intermediate types differ. */
7993 if (! inside_float
&& ! inter_float
&& ! final_float
7994 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7995 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7996 && ! (inside_int
&& inter_int
7997 && inter_unsignedp
!= inside_unsignedp
7998 && inter_prec
< final_prec
)
7999 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8000 == (final_unsignedp
&& final_prec
> inter_prec
))
8001 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8002 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8003 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
8004 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
8005 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8008 /* Handle (T *)&A.B.C for A being of type T and B and C
8009 living at offset zero. This occurs frequently in
8010 C++ upcasting and then accessing the base. */
8011 if (TREE_CODE (op0
) == ADDR_EXPR
8012 && POINTER_TYPE_P (type
)
8013 && handled_component_p (TREE_OPERAND (op0
, 0)))
8015 HOST_WIDE_INT bitsize
, bitpos
;
8017 enum machine_mode mode
;
8018 int unsignedp
, volatilep
;
8019 tree base
= TREE_OPERAND (op0
, 0);
8020 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8021 &mode
, &unsignedp
, &volatilep
, false);
8022 /* If the reference was to a (constant) zero offset, we can use
8023 the address of the base if it has the same base type
8024 as the result type and the pointer type is unqualified. */
8025 if (! offset
&& bitpos
== 0
8026 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8027 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8028 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8029 return fold_convert_loc (loc
, type
,
8030 build_fold_addr_expr_loc (loc
, base
));
8033 if (TREE_CODE (op0
) == MODIFY_EXPR
8034 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8035 /* Detect assigning a bitfield. */
8036 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8038 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8040 /* Don't leave an assignment inside a conversion
8041 unless assigning a bitfield. */
8042 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8043 /* First do the assignment, then return converted constant. */
8044 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8045 TREE_NO_WARNING (tem
) = 1;
8046 TREE_USED (tem
) = 1;
8050 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8051 constants (if x has signed type, the sign bit cannot be set
8052 in c). This folds extension into the BIT_AND_EXPR.
8053 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8054 very likely don't have maximal range for their precision and this
8055 transformation effectively doesn't preserve non-maximal ranges. */
8056 if (TREE_CODE (type
) == INTEGER_TYPE
8057 && TREE_CODE (op0
) == BIT_AND_EXPR
8058 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8060 tree and_expr
= op0
;
8061 tree and0
= TREE_OPERAND (and_expr
, 0);
8062 tree and1
= TREE_OPERAND (and_expr
, 1);
8065 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8066 || (TYPE_PRECISION (type
)
8067 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8069 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8070 <= HOST_BITS_PER_WIDE_INT
8071 && tree_fits_uhwi_p (and1
))
8073 unsigned HOST_WIDE_INT cst
;
8075 cst
= tree_to_uhwi (and1
);
8076 cst
&= HOST_WIDE_INT_M1U
8077 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8078 change
= (cst
== 0);
8079 #ifdef LOAD_EXTEND_OP
8081 && !flag_syntax_only
8082 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8085 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8086 and0
= fold_convert_loc (loc
, uns
, and0
);
8087 and1
= fold_convert_loc (loc
, uns
, and1
);
8093 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8094 TREE_OVERFLOW (and1
));
8095 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8096 fold_convert_loc (loc
, type
, and0
), tem
);
8100 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8101 when one of the new casts will fold away. Conservatively we assume
8102 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8103 if (POINTER_TYPE_P (type
)
8104 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8105 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8106 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8107 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8108 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8110 tree arg00
= TREE_OPERAND (arg0
, 0);
8111 tree arg01
= TREE_OPERAND (arg0
, 1);
8113 return fold_build_pointer_plus_loc
8114 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8117 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8118 of the same precision, and X is an integer type not narrower than
8119 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8120 if (INTEGRAL_TYPE_P (type
)
8121 && TREE_CODE (op0
) == BIT_NOT_EXPR
8122 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8123 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8124 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8126 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8127 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8128 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8129 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8130 fold_convert_loc (loc
, type
, tem
));
8133 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8134 type of X and Y (integer types only). */
8135 if (INTEGRAL_TYPE_P (type
)
8136 && TREE_CODE (op0
) == MULT_EXPR
8137 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8138 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8140 /* Be careful not to introduce new overflows. */
8142 if (TYPE_OVERFLOW_WRAPS (type
))
8145 mult_type
= unsigned_type_for (type
);
8147 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8149 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8150 fold_convert_loc (loc
, mult_type
,
8151 TREE_OPERAND (op0
, 0)),
8152 fold_convert_loc (loc
, mult_type
,
8153 TREE_OPERAND (op0
, 1)));
8154 return fold_convert_loc (loc
, type
, tem
);
8158 tem
= fold_convert_const (code
, type
, arg0
);
8159 return tem
? tem
: NULL_TREE
;
8161 case ADDR_SPACE_CONVERT_EXPR
:
8162 if (integer_zerop (arg0
))
8163 return fold_convert_const (code
, type
, arg0
);
8166 case FIXED_CONVERT_EXPR
:
8167 tem
= fold_convert_const (code
, type
, arg0
);
8168 return tem
? tem
: NULL_TREE
;
8170 case VIEW_CONVERT_EXPR
:
8171 if (TREE_TYPE (op0
) == type
)
8173 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8174 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8175 type
, TREE_OPERAND (op0
, 0));
8176 if (TREE_CODE (op0
) == MEM_REF
)
8177 return fold_build2_loc (loc
, MEM_REF
, type
,
8178 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8180 /* For integral conversions with the same precision or pointer
8181 conversions use a NOP_EXPR instead. */
8182 if ((INTEGRAL_TYPE_P (type
)
8183 || POINTER_TYPE_P (type
))
8184 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8185 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8186 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8187 return fold_convert_loc (loc
, type
, op0
);
8189 /* Strip inner integral conversions that do not change the precision. */
8190 if (CONVERT_EXPR_P (op0
)
8191 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8192 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8193 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8194 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8195 && (TYPE_PRECISION (TREE_TYPE (op0
))
8196 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8197 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8198 type
, TREE_OPERAND (op0
, 0));
8200 return fold_view_convert_expr (type
, op0
);
8203 tem
= fold_negate_expr (loc
, arg0
);
8205 return fold_convert_loc (loc
, type
, tem
);
8209 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8210 return fold_abs_const (arg0
, type
);
8211 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8212 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8213 /* Convert fabs((double)float) into (double)fabsf(float). */
8214 else if (TREE_CODE (arg0
) == NOP_EXPR
8215 && TREE_CODE (type
) == REAL_TYPE
)
8217 tree targ0
= strip_float_extensions (arg0
);
8219 return fold_convert_loc (loc
, type
,
8220 fold_build1_loc (loc
, ABS_EXPR
,
8224 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8225 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8227 else if (tree_expr_nonnegative_p (arg0
))
8230 /* Strip sign ops from argument. */
8231 if (TREE_CODE (type
) == REAL_TYPE
)
8233 tem
= fold_strip_sign_ops (arg0
);
8235 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8236 fold_convert_loc (loc
, type
, tem
));
8241 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8242 return fold_convert_loc (loc
, type
, arg0
);
8243 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8245 tree itype
= TREE_TYPE (type
);
8246 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8247 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8248 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8249 negate_expr (ipart
));
8251 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8253 tree itype
= TREE_TYPE (type
);
8254 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8255 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8256 return build_complex (type
, rpart
, negate_expr (ipart
));
8258 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8259 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8263 if (TREE_CODE (arg0
) == INTEGER_CST
)
8264 return fold_not_const (arg0
, type
);
8265 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8266 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8267 /* Convert ~ (-A) to A - 1. */
8268 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8269 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8270 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8271 build_int_cst (type
, 1));
8272 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8273 else if (INTEGRAL_TYPE_P (type
)
8274 && ((TREE_CODE (arg0
) == MINUS_EXPR
8275 && integer_onep (TREE_OPERAND (arg0
, 1)))
8276 || (TREE_CODE (arg0
) == PLUS_EXPR
8277 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8278 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8279 fold_convert_loc (loc
, type
,
8280 TREE_OPERAND (arg0
, 0)));
8281 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8282 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8283 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8284 fold_convert_loc (loc
, type
,
8285 TREE_OPERAND (arg0
, 0)))))
8286 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8287 fold_convert_loc (loc
, type
,
8288 TREE_OPERAND (arg0
, 1)));
8289 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8290 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8291 fold_convert_loc (loc
, type
,
8292 TREE_OPERAND (arg0
, 1)))))
8293 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8294 fold_convert_loc (loc
, type
,
8295 TREE_OPERAND (arg0
, 0)), tem
);
8296 /* Perform BIT_NOT_EXPR on each element individually. */
8297 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8301 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8303 elements
= XALLOCAVEC (tree
, count
);
8304 for (i
= 0; i
< count
; i
++)
8306 elem
= VECTOR_CST_ELT (arg0
, i
);
8307 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8308 if (elem
== NULL_TREE
)
8313 return build_vector (type
, elements
);
8315 else if (COMPARISON_CLASS_P (arg0
)
8316 && (VECTOR_TYPE_P (type
)
8317 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8319 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8320 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8321 HONOR_NANS (TYPE_MODE (op_type
)));
8322 if (subcode
!= ERROR_MARK
)
8323 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8324 TREE_OPERAND (arg0
, 1));
8330 case TRUTH_NOT_EXPR
:
8331 /* Note that the operand of this must be an int
8332 and its values must be 0 or 1.
8333 ("true" is a fixed value perhaps depending on the language,
8334 but we don't handle values other than 1 correctly yet.) */
8335 tem
= fold_truth_not_expr (loc
, arg0
);
8338 return fold_convert_loc (loc
, type
, tem
);
8341 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8342 return fold_convert_loc (loc
, type
, arg0
);
8343 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8344 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8345 TREE_OPERAND (arg0
, 1));
8346 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8347 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8348 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8350 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8351 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8352 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8353 TREE_OPERAND (arg0
, 0)),
8354 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8355 TREE_OPERAND (arg0
, 1)));
8356 return fold_convert_loc (loc
, type
, tem
);
8358 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8360 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8361 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8362 TREE_OPERAND (arg0
, 0));
8363 return fold_convert_loc (loc
, type
, tem
);
8365 if (TREE_CODE (arg0
) == CALL_EXPR
)
8367 tree fn
= get_callee_fndecl (arg0
);
8368 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8369 switch (DECL_FUNCTION_CODE (fn
))
8371 CASE_FLT_FN (BUILT_IN_CEXPI
):
8372 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8374 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8384 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8385 return build_zero_cst (type
);
8386 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8387 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8388 TREE_OPERAND (arg0
, 0));
8389 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8390 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8391 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8393 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8394 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8395 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8396 TREE_OPERAND (arg0
, 0)),
8397 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8398 TREE_OPERAND (arg0
, 1)));
8399 return fold_convert_loc (loc
, type
, tem
);
8401 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8403 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8404 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8405 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8407 if (TREE_CODE (arg0
) == CALL_EXPR
)
8409 tree fn
= get_callee_fndecl (arg0
);
8410 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8411 switch (DECL_FUNCTION_CODE (fn
))
8413 CASE_FLT_FN (BUILT_IN_CEXPI
):
8414 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8416 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8426 /* Fold *&X to X if X is an lvalue. */
8427 if (TREE_CODE (op0
) == ADDR_EXPR
)
8429 tree op00
= TREE_OPERAND (op0
, 0);
8430 if ((TREE_CODE (op00
) == VAR_DECL
8431 || TREE_CODE (op00
) == PARM_DECL
8432 || TREE_CODE (op00
) == RESULT_DECL
)
8433 && !TREE_READONLY (op00
))
8438 case VEC_UNPACK_LO_EXPR
:
8439 case VEC_UNPACK_HI_EXPR
:
8440 case VEC_UNPACK_FLOAT_LO_EXPR
:
8441 case VEC_UNPACK_FLOAT_HI_EXPR
:
8443 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8445 enum tree_code subcode
;
8447 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8448 if (TREE_CODE (arg0
) != VECTOR_CST
)
8451 elts
= XALLOCAVEC (tree
, nelts
* 2);
8452 if (!vec_cst_ctor_to_array (arg0
, elts
))
8455 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8456 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8459 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8462 subcode
= FLOAT_EXPR
;
8464 for (i
= 0; i
< nelts
; i
++)
8466 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8467 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8471 return build_vector (type
, elts
);
8474 case REDUC_MIN_EXPR
:
8475 case REDUC_MAX_EXPR
:
8476 case REDUC_PLUS_EXPR
:
8478 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8480 enum tree_code subcode
;
8482 if (TREE_CODE (op0
) != VECTOR_CST
)
8485 elts
= XALLOCAVEC (tree
, nelts
);
8486 if (!vec_cst_ctor_to_array (op0
, elts
))
8491 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8492 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8493 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8494 default: gcc_unreachable ();
8497 for (i
= 1; i
< nelts
; i
++)
8499 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8500 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8502 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8505 return build_vector (type
, elts
);
8510 } /* switch (code) */
8514 /* If the operation was a conversion do _not_ mark a resulting constant
8515 with TREE_OVERFLOW if the original constant was not. These conversions
8516 have implementation defined behavior and retaining the TREE_OVERFLOW
8517 flag here would confuse later passes such as VRP. */
8519 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8520 tree type
, tree op0
)
8522 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8524 && TREE_CODE (res
) == INTEGER_CST
8525 && TREE_CODE (op0
) == INTEGER_CST
8526 && CONVERT_EXPR_CODE_P (code
))
8527 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8532 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8533 operands OP0 and OP1. LOC is the location of the resulting expression.
8534 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8535 Return the folded expression if folding is successful. Otherwise,
8536 return NULL_TREE. */
8538 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8539 tree arg0
, tree arg1
, tree op0
, tree op1
)
8543 /* We only do these simplifications if we are optimizing. */
8547 /* Check for things like (A || B) && (A || C). We can convert this
8548 to A || (B && C). Note that either operator can be any of the four
8549 truth and/or operations and the transformation will still be
8550 valid. Also note that we only care about order for the
8551 ANDIF and ORIF operators. If B contains side effects, this
8552 might change the truth-value of A. */
8553 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8554 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8555 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8556 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8557 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8558 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8560 tree a00
= TREE_OPERAND (arg0
, 0);
8561 tree a01
= TREE_OPERAND (arg0
, 1);
8562 tree a10
= TREE_OPERAND (arg1
, 0);
8563 tree a11
= TREE_OPERAND (arg1
, 1);
8564 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8565 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8566 && (code
== TRUTH_AND_EXPR
8567 || code
== TRUTH_OR_EXPR
));
8569 if (operand_equal_p (a00
, a10
, 0))
8570 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8571 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8572 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8573 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8574 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8575 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8576 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8577 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8579 /* This case if tricky because we must either have commutative
8580 operators or else A10 must not have side-effects. */
8582 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8583 && operand_equal_p (a01
, a11
, 0))
8584 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8585 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8589 /* See if we can build a range comparison. */
8590 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8593 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8594 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8596 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8598 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8601 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8602 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8604 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8606 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8609 /* Check for the possibility of merging component references. If our
8610 lhs is another similar operation, try to merge its rhs with our
8611 rhs. Then try to merge our lhs and rhs. */
8612 if (TREE_CODE (arg0
) == code
8613 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8614 TREE_OPERAND (arg0
, 1), arg1
)))
8615 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8617 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8620 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8621 && (code
== TRUTH_AND_EXPR
8622 || code
== TRUTH_ANDIF_EXPR
8623 || code
== TRUTH_OR_EXPR
8624 || code
== TRUTH_ORIF_EXPR
))
8626 enum tree_code ncode
, icode
;
8628 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8629 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8630 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8632 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8633 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8634 We don't want to pack more than two leafs to a non-IF AND/OR
8636 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8637 equal to IF-CODE, then we don't want to add right-hand operand.
8638 If the inner right-hand side of left-hand operand has
8639 side-effects, or isn't simple, then we can't add to it,
8640 as otherwise we might destroy if-sequence. */
8641 if (TREE_CODE (arg0
) == icode
8642 && simple_operand_p_2 (arg1
)
8643 /* Needed for sequence points to handle trappings, and
8645 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8647 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8649 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8652 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8653 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8654 else if (TREE_CODE (arg1
) == icode
8655 && simple_operand_p_2 (arg0
)
8656 /* Needed for sequence points to handle trappings, and
8658 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8660 tem
= fold_build2_loc (loc
, ncode
, type
,
8661 arg0
, TREE_OPERAND (arg1
, 0));
8662 return fold_build2_loc (loc
, icode
, type
, tem
,
8663 TREE_OPERAND (arg1
, 1));
8665 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8667 For sequence point consistancy, we need to check for trapping,
8668 and side-effects. */
8669 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8670 && simple_operand_p_2 (arg1
))
8671 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8677 /* Fold a binary expression of code CODE and type TYPE with operands
8678 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8679 Return the folded expression if folding is successful. Otherwise,
8680 return NULL_TREE. */
8683 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8685 enum tree_code compl_code
;
8687 if (code
== MIN_EXPR
)
8688 compl_code
= MAX_EXPR
;
8689 else if (code
== MAX_EXPR
)
8690 compl_code
= MIN_EXPR
;
8694 /* MIN (MAX (a, b), b) == b. */
8695 if (TREE_CODE (op0
) == compl_code
8696 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8697 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8699 /* MIN (MAX (b, a), b) == b. */
8700 if (TREE_CODE (op0
) == compl_code
8701 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8702 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8703 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8705 /* MIN (a, MAX (a, b)) == a. */
8706 if (TREE_CODE (op1
) == compl_code
8707 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8708 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8709 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8711 /* MIN (a, MAX (b, a)) == a. */
8712 if (TREE_CODE (op1
) == compl_code
8713 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8714 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8715 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8720 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8721 by changing CODE to reduce the magnitude of constants involved in
8722 ARG0 of the comparison.
8723 Returns a canonicalized comparison tree if a simplification was
8724 possible, otherwise returns NULL_TREE.
8725 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8726 valid if signed overflow is undefined. */
8729 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8730 tree arg0
, tree arg1
,
8731 bool *strict_overflow_p
)
8733 enum tree_code code0
= TREE_CODE (arg0
);
8734 tree t
, cst0
= NULL_TREE
;
8738 /* Match A +- CST code arg1 and CST code arg1. We can change the
8739 first form only if overflow is undefined. */
8740 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8741 /* In principle pointers also have undefined overflow behavior,
8742 but that causes problems elsewhere. */
8743 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8744 && (code0
== MINUS_EXPR
8745 || code0
== PLUS_EXPR
)
8746 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8747 || code0
== INTEGER_CST
))
8750 /* Identify the constant in arg0 and its sign. */
8751 if (code0
== INTEGER_CST
)
8754 cst0
= TREE_OPERAND (arg0
, 1);
8755 sgn0
= tree_int_cst_sgn (cst0
);
8757 /* Overflowed constants and zero will cause problems. */
8758 if (integer_zerop (cst0
)
8759 || TREE_OVERFLOW (cst0
))
8762 /* See if we can reduce the magnitude of the constant in
8763 arg0 by changing the comparison code. */
8764 if (code0
== INTEGER_CST
)
8766 /* CST <= arg1 -> CST-1 < arg1. */
8767 if (code
== LE_EXPR
&& sgn0
== 1)
8769 /* -CST < arg1 -> -CST-1 <= arg1. */
8770 else if (code
== LT_EXPR
&& sgn0
== -1)
8772 /* CST > arg1 -> CST-1 >= arg1. */
8773 else if (code
== GT_EXPR
&& sgn0
== 1)
8775 /* -CST >= arg1 -> -CST-1 > arg1. */
8776 else if (code
== GE_EXPR
&& sgn0
== -1)
8780 /* arg1 code' CST' might be more canonical. */
8785 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8787 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8789 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8790 else if (code
== GT_EXPR
8791 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8793 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8794 else if (code
== LE_EXPR
8795 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8797 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8798 else if (code
== GE_EXPR
8799 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8803 *strict_overflow_p
= true;
8806 /* Now build the constant reduced in magnitude. But not if that
8807 would produce one outside of its types range. */
8808 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8810 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8811 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8813 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8814 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8815 /* We cannot swap the comparison here as that would cause us to
8816 endlessly recurse. */
8819 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8820 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8821 if (code0
!= INTEGER_CST
)
8822 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8823 t
= fold_convert (TREE_TYPE (arg1
), t
);
8825 /* If swapping might yield to a more canonical form, do so. */
8827 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8829 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8832 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8833 overflow further. Try to decrease the magnitude of constants involved
8834 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8835 and put sole constants at the second argument position.
8836 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8839 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8840 tree arg0
, tree arg1
)
8843 bool strict_overflow_p
;
8844 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8845 "when reducing constant in comparison");
8847 /* Try canonicalization by simplifying arg0. */
8848 strict_overflow_p
= false;
8849 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8850 &strict_overflow_p
);
8853 if (strict_overflow_p
)
8854 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8858 /* Try canonicalization by simplifying arg1 using the swapped
8860 code
= swap_tree_comparison (code
);
8861 strict_overflow_p
= false;
8862 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8863 &strict_overflow_p
);
8864 if (t
&& strict_overflow_p
)
8865 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8869 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8870 space. This is used to avoid issuing overflow warnings for
8871 expressions like &p->x which can not wrap. */
8874 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8876 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8883 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8884 if (offset
== NULL_TREE
)
8885 wi_offset
= wi::zero (precision
);
8886 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8892 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8893 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8897 if (!wi::fits_uhwi_p (total
))
8900 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8904 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8906 if (TREE_CODE (base
) == ADDR_EXPR
)
8908 HOST_WIDE_INT base_size
;
8910 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8911 if (base_size
> 0 && size
< base_size
)
8915 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8918 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8919 kind INTEGER_CST. This makes sure to properly sign-extend the
8922 static HOST_WIDE_INT
8923 size_low_cst (const_tree t
)
8925 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8926 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8927 if (prec
< HOST_BITS_PER_WIDE_INT
)
8928 return sext_hwi (w
, prec
);
8932 /* Subroutine of fold_binary. This routine performs all of the
8933 transformations that are common to the equality/inequality
8934 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8935 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8936 fold_binary should call fold_binary. Fold a comparison with
8937 tree code CODE and type TYPE with operands OP0 and OP1. Return
8938 the folded comparison or NULL_TREE. */
8941 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8944 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8945 tree arg0
, arg1
, tem
;
8950 STRIP_SIGN_NOPS (arg0
);
8951 STRIP_SIGN_NOPS (arg1
);
8953 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8954 if (tem
!= NULL_TREE
)
8957 /* If one arg is a real or integer constant, put it last. */
8958 if (tree_swap_operands_p (arg0
, arg1
, true))
8959 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8961 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8962 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8963 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8964 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8965 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8966 && TREE_CODE (arg1
) == INTEGER_CST
8967 && !TREE_OVERFLOW (arg1
))
8969 const enum tree_code
8970 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8971 tree const1
= TREE_OPERAND (arg0
, 1);
8972 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8973 tree variable
= TREE_OPERAND (arg0
, 0);
8974 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8976 /* If the constant operation overflowed this can be
8977 simplified as a comparison against INT_MAX/INT_MIN. */
8978 if (TREE_OVERFLOW (new_const
))
8980 int const1_sgn
= tree_int_cst_sgn (const1
);
8981 enum tree_code code2
= code
;
8983 /* Get the sign of the constant on the lhs if the
8984 operation were VARIABLE + CONST1. */
8985 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8986 const1_sgn
= -const1_sgn
;
8988 /* The sign of the constant determines if we overflowed
8989 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8990 Canonicalize to the INT_MIN overflow by swapping the comparison
8992 if (const1_sgn
== -1)
8993 code2
= swap_tree_comparison (code
);
8995 /* We now can look at the canonicalized case
8996 VARIABLE + 1 CODE2 INT_MIN
8997 and decide on the result. */
9004 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
9010 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9019 fold_overflow_warning ("assuming signed overflow does not occur "
9020 "when changing X +- C1 cmp C2 to "
9022 WARN_STRICT_OVERFLOW_COMPARISON
);
9023 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
9027 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
9028 if (TREE_CODE (arg0
) == MINUS_EXPR
9030 && integer_zerop (arg1
))
9032 /* ??? The transformation is valid for the other operators if overflow
9033 is undefined for the type, but performing it here badly interacts
9034 with the transformation in fold_cond_expr_with_comparison which
9035 attempts to synthetize ABS_EXPR. */
9037 fold_overflow_warning ("assuming signed overflow does not occur "
9038 "when changing X - Y cmp 0 to X cmp Y",
9039 WARN_STRICT_OVERFLOW_COMPARISON
);
9040 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
9041 TREE_OPERAND (arg0
, 1));
9044 /* For comparisons of pointers we can decompose it to a compile time
9045 comparison of the base objects and the offsets into the object.
9046 This requires at least one operand being an ADDR_EXPR or a
9047 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9048 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9049 && (TREE_CODE (arg0
) == ADDR_EXPR
9050 || TREE_CODE (arg1
) == ADDR_EXPR
9051 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9052 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9054 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9055 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9056 enum machine_mode mode
;
9057 int volatilep
, unsignedp
;
9058 bool indirect_base0
= false, indirect_base1
= false;
9060 /* Get base and offset for the access. Strip ADDR_EXPR for
9061 get_inner_reference, but put it back by stripping INDIRECT_REF
9062 off the base object if possible. indirect_baseN will be true
9063 if baseN is not an address but refers to the object itself. */
9065 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9067 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9068 &bitsize
, &bitpos0
, &offset0
, &mode
,
9069 &unsignedp
, &volatilep
, false);
9070 if (TREE_CODE (base0
) == INDIRECT_REF
)
9071 base0
= TREE_OPERAND (base0
, 0);
9073 indirect_base0
= true;
9075 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9077 base0
= TREE_OPERAND (arg0
, 0);
9078 STRIP_SIGN_NOPS (base0
);
9079 if (TREE_CODE (base0
) == ADDR_EXPR
)
9081 base0
= TREE_OPERAND (base0
, 0);
9082 indirect_base0
= true;
9084 offset0
= TREE_OPERAND (arg0
, 1);
9085 if (tree_fits_shwi_p (offset0
))
9087 HOST_WIDE_INT off
= size_low_cst (offset0
);
9088 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9090 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9092 bitpos0
= off
* BITS_PER_UNIT
;
9093 offset0
= NULL_TREE
;
9099 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9101 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9102 &bitsize
, &bitpos1
, &offset1
, &mode
,
9103 &unsignedp
, &volatilep
, false);
9104 if (TREE_CODE (base1
) == INDIRECT_REF
)
9105 base1
= TREE_OPERAND (base1
, 0);
9107 indirect_base1
= true;
9109 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9111 base1
= TREE_OPERAND (arg1
, 0);
9112 STRIP_SIGN_NOPS (base1
);
9113 if (TREE_CODE (base1
) == ADDR_EXPR
)
9115 base1
= TREE_OPERAND (base1
, 0);
9116 indirect_base1
= true;
9118 offset1
= TREE_OPERAND (arg1
, 1);
9119 if (tree_fits_shwi_p (offset1
))
9121 HOST_WIDE_INT off
= size_low_cst (offset1
);
9122 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9124 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9126 bitpos1
= off
* BITS_PER_UNIT
;
9127 offset1
= NULL_TREE
;
9132 /* A local variable can never be pointed to by
9133 the default SSA name of an incoming parameter. */
9134 if ((TREE_CODE (arg0
) == ADDR_EXPR
9136 && TREE_CODE (base0
) == VAR_DECL
9137 && auto_var_in_fn_p (base0
, current_function_decl
)
9139 && TREE_CODE (base1
) == SSA_NAME
9140 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9141 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9142 || (TREE_CODE (arg1
) == ADDR_EXPR
9144 && TREE_CODE (base1
) == VAR_DECL
9145 && auto_var_in_fn_p (base1
, current_function_decl
)
9147 && TREE_CODE (base0
) == SSA_NAME
9148 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9149 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9151 if (code
== NE_EXPR
)
9152 return constant_boolean_node (1, type
);
9153 else if (code
== EQ_EXPR
)
9154 return constant_boolean_node (0, type
);
9156 /* If we have equivalent bases we might be able to simplify. */
9157 else if (indirect_base0
== indirect_base1
9158 && operand_equal_p (base0
, base1
, 0))
9160 /* We can fold this expression to a constant if the non-constant
9161 offset parts are equal. */
9162 if ((offset0
== offset1
9163 || (offset0
&& offset1
9164 && operand_equal_p (offset0
, offset1
, 0)))
9167 || (indirect_base0
&& DECL_P (base0
))
9168 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9172 && bitpos0
!= bitpos1
9173 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9174 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9175 fold_overflow_warning (("assuming pointer wraparound does not "
9176 "occur when comparing P +- C1 with "
9178 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9183 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9185 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9187 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9189 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9191 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9193 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9197 /* We can simplify the comparison to a comparison of the variable
9198 offset parts if the constant offset parts are equal.
9199 Be careful to use signed sizetype here because otherwise we
9200 mess with array offsets in the wrong way. This is possible
9201 because pointer arithmetic is restricted to retain within an
9202 object and overflow on pointer differences is undefined as of
9203 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9204 else if (bitpos0
== bitpos1
9206 || (indirect_base0
&& DECL_P (base0
))
9207 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9209 /* By converting to signed sizetype we cover middle-end pointer
9210 arithmetic which operates on unsigned pointer types of size
9211 type size and ARRAY_REF offsets which are properly sign or
9212 zero extended from their type in case it is narrower than
9214 if (offset0
== NULL_TREE
)
9215 offset0
= build_int_cst (ssizetype
, 0);
9217 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9218 if (offset1
== NULL_TREE
)
9219 offset1
= build_int_cst (ssizetype
, 0);
9221 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9224 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9225 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9226 fold_overflow_warning (("assuming pointer wraparound does not "
9227 "occur when comparing P +- C1 with "
9229 WARN_STRICT_OVERFLOW_COMPARISON
);
9231 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9234 /* For non-equal bases we can simplify if they are addresses
9235 of local binding decls or constants. */
9236 else if (indirect_base0
&& indirect_base1
9237 /* We know that !operand_equal_p (base0, base1, 0)
9238 because the if condition was false. But make
9239 sure two decls are not the same. */
9241 && TREE_CODE (arg0
) == ADDR_EXPR
9242 && TREE_CODE (arg1
) == ADDR_EXPR
9243 && (((TREE_CODE (base0
) == VAR_DECL
9244 || TREE_CODE (base0
) == PARM_DECL
)
9245 && (targetm
.binds_local_p (base0
)
9246 || CONSTANT_CLASS_P (base1
)))
9247 || CONSTANT_CLASS_P (base0
))
9248 && (((TREE_CODE (base1
) == VAR_DECL
9249 || TREE_CODE (base1
) == PARM_DECL
)
9250 && (targetm
.binds_local_p (base1
)
9251 || CONSTANT_CLASS_P (base0
)))
9252 || CONSTANT_CLASS_P (base1
)))
9254 if (code
== EQ_EXPR
)
9255 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9257 else if (code
== NE_EXPR
)
9258 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9261 /* For equal offsets we can simplify to a comparison of the
9263 else if (bitpos0
== bitpos1
9265 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9267 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9268 && ((offset0
== offset1
)
9269 || (offset0
&& offset1
9270 && operand_equal_p (offset0
, offset1
, 0))))
9273 base0
= build_fold_addr_expr_loc (loc
, base0
);
9275 base1
= build_fold_addr_expr_loc (loc
, base1
);
9276 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9280 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9281 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9282 the resulting offset is smaller in absolute value than the
9283 original one and has the same sign. */
9284 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9285 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9286 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9287 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9288 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9289 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9290 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9292 tree const1
= TREE_OPERAND (arg0
, 1);
9293 tree const2
= TREE_OPERAND (arg1
, 1);
9294 tree variable1
= TREE_OPERAND (arg0
, 0);
9295 tree variable2
= TREE_OPERAND (arg1
, 0);
9297 const char * const warnmsg
= G_("assuming signed overflow does not "
9298 "occur when combining constants around "
9301 /* Put the constant on the side where it doesn't overflow and is
9302 of lower absolute value and of same sign than before. */
9303 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9304 ? MINUS_EXPR
: PLUS_EXPR
,
9306 if (!TREE_OVERFLOW (cst
)
9307 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9308 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9310 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9311 return fold_build2_loc (loc
, code
, type
,
9313 fold_build2_loc (loc
, TREE_CODE (arg1
),
9318 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9319 ? MINUS_EXPR
: PLUS_EXPR
,
9321 if (!TREE_OVERFLOW (cst
)
9322 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9323 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9325 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9326 return fold_build2_loc (loc
, code
, type
,
9327 fold_build2_loc (loc
, TREE_CODE (arg0
),
9334 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9335 signed arithmetic case. That form is created by the compiler
9336 often enough for folding it to be of value. One example is in
9337 computing loop trip counts after Operator Strength Reduction. */
9338 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9339 && TREE_CODE (arg0
) == MULT_EXPR
9340 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9341 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9342 && integer_zerop (arg1
))
9344 tree const1
= TREE_OPERAND (arg0
, 1);
9345 tree const2
= arg1
; /* zero */
9346 tree variable1
= TREE_OPERAND (arg0
, 0);
9347 enum tree_code cmp_code
= code
;
9349 /* Handle unfolded multiplication by zero. */
9350 if (integer_zerop (const1
))
9351 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9353 fold_overflow_warning (("assuming signed overflow does not occur when "
9354 "eliminating multiplication in comparison "
9356 WARN_STRICT_OVERFLOW_COMPARISON
);
9358 /* If const1 is negative we swap the sense of the comparison. */
9359 if (tree_int_cst_sgn (const1
) < 0)
9360 cmp_code
= swap_tree_comparison (cmp_code
);
9362 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9365 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9369 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9371 tree targ0
= strip_float_extensions (arg0
);
9372 tree targ1
= strip_float_extensions (arg1
);
9373 tree newtype
= TREE_TYPE (targ0
);
9375 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9376 newtype
= TREE_TYPE (targ1
);
9378 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9379 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9380 return fold_build2_loc (loc
, code
, type
,
9381 fold_convert_loc (loc
, newtype
, targ0
),
9382 fold_convert_loc (loc
, newtype
, targ1
));
9384 /* (-a) CMP (-b) -> b CMP a */
9385 if (TREE_CODE (arg0
) == NEGATE_EXPR
9386 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9387 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9388 TREE_OPERAND (arg0
, 0));
9390 if (TREE_CODE (arg1
) == REAL_CST
)
9392 REAL_VALUE_TYPE cst
;
9393 cst
= TREE_REAL_CST (arg1
);
9395 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9396 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9397 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9398 TREE_OPERAND (arg0
, 0),
9399 build_real (TREE_TYPE (arg1
),
9400 real_value_negate (&cst
)));
9402 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9403 /* a CMP (-0) -> a CMP 0 */
9404 if (REAL_VALUE_MINUS_ZERO (cst
))
9405 return fold_build2_loc (loc
, code
, type
, arg0
,
9406 build_real (TREE_TYPE (arg1
), dconst0
));
9408 /* x != NaN is always true, other ops are always false. */
9409 if (REAL_VALUE_ISNAN (cst
)
9410 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9412 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9413 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9416 /* Fold comparisons against infinity. */
9417 if (REAL_VALUE_ISINF (cst
)
9418 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9420 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9421 if (tem
!= NULL_TREE
)
9426 /* If this is a comparison of a real constant with a PLUS_EXPR
9427 or a MINUS_EXPR of a real constant, we can convert it into a
9428 comparison with a revised real constant as long as no overflow
9429 occurs when unsafe_math_optimizations are enabled. */
9430 if (flag_unsafe_math_optimizations
9431 && TREE_CODE (arg1
) == REAL_CST
9432 && (TREE_CODE (arg0
) == PLUS_EXPR
9433 || TREE_CODE (arg0
) == MINUS_EXPR
)
9434 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9435 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9436 ? MINUS_EXPR
: PLUS_EXPR
,
9437 arg1
, TREE_OPERAND (arg0
, 1)))
9438 && !TREE_OVERFLOW (tem
))
9439 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9441 /* Likewise, we can simplify a comparison of a real constant with
9442 a MINUS_EXPR whose first operand is also a real constant, i.e.
9443 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9444 floating-point types only if -fassociative-math is set. */
9445 if (flag_associative_math
9446 && TREE_CODE (arg1
) == REAL_CST
9447 && TREE_CODE (arg0
) == MINUS_EXPR
9448 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9449 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9451 && !TREE_OVERFLOW (tem
))
9452 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9453 TREE_OPERAND (arg0
, 1), tem
);
9455 /* Fold comparisons against built-in math functions. */
9456 if (TREE_CODE (arg1
) == REAL_CST
9457 && flag_unsafe_math_optimizations
9458 && ! flag_errno_math
)
9460 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9462 if (fcode
!= END_BUILTINS
)
9464 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9465 if (tem
!= NULL_TREE
)
9471 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9472 && CONVERT_EXPR_P (arg0
))
9474 /* If we are widening one operand of an integer comparison,
9475 see if the other operand is similarly being widened. Perhaps we
9476 can do the comparison in the narrower type. */
9477 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9481 /* Or if we are changing signedness. */
9482 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9487 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9488 constant, we can simplify it. */
9489 if (TREE_CODE (arg1
) == INTEGER_CST
9490 && (TREE_CODE (arg0
) == MIN_EXPR
9491 || TREE_CODE (arg0
) == MAX_EXPR
)
9492 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9494 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9499 /* Simplify comparison of something with itself. (For IEEE
9500 floating-point, we can only do some of these simplifications.) */
9501 if (operand_equal_p (arg0
, arg1
, 0))
9506 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9507 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9508 return constant_boolean_node (1, type
);
9513 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9514 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9515 return constant_boolean_node (1, type
);
9516 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9519 /* For NE, we can only do this simplification if integer
9520 or we don't honor IEEE floating point NaNs. */
9521 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9522 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9524 /* ... fall through ... */
9527 return constant_boolean_node (0, type
);
9533 /* If we are comparing an expression that just has comparisons
9534 of two integer values, arithmetic expressions of those comparisons,
9535 and constants, we can simplify it. There are only three cases
9536 to check: the two values can either be equal, the first can be
9537 greater, or the second can be greater. Fold the expression for
9538 those three values. Since each value must be 0 or 1, we have
9539 eight possibilities, each of which corresponds to the constant 0
9540 or 1 or one of the six possible comparisons.
9542 This handles common cases like (a > b) == 0 but also handles
9543 expressions like ((x > y) - (y > x)) > 0, which supposedly
9544 occur in macroized code. */
9546 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9548 tree cval1
= 0, cval2
= 0;
9551 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9552 /* Don't handle degenerate cases here; they should already
9553 have been handled anyway. */
9554 && cval1
!= 0 && cval2
!= 0
9555 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9556 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9557 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9558 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9559 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9560 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9561 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9563 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9564 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9566 /* We can't just pass T to eval_subst in case cval1 or cval2
9567 was the same as ARG1. */
9570 = fold_build2_loc (loc
, code
, type
,
9571 eval_subst (loc
, arg0
, cval1
, maxval
,
9575 = fold_build2_loc (loc
, code
, type
,
9576 eval_subst (loc
, arg0
, cval1
, maxval
,
9580 = fold_build2_loc (loc
, code
, type
,
9581 eval_subst (loc
, arg0
, cval1
, minval
,
9585 /* All three of these results should be 0 or 1. Confirm they are.
9586 Then use those values to select the proper code to use. */
9588 if (TREE_CODE (high_result
) == INTEGER_CST
9589 && TREE_CODE (equal_result
) == INTEGER_CST
9590 && TREE_CODE (low_result
) == INTEGER_CST
)
9592 /* Make a 3-bit mask with the high-order bit being the
9593 value for `>', the next for '=', and the low for '<'. */
9594 switch ((integer_onep (high_result
) * 4)
9595 + (integer_onep (equal_result
) * 2)
9596 + integer_onep (low_result
))
9600 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9621 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9626 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9627 SET_EXPR_LOCATION (tem
, loc
);
9630 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9635 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9636 into a single range test. */
9637 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9638 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9639 && TREE_CODE (arg1
) == INTEGER_CST
9640 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9641 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9642 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9643 && !TREE_OVERFLOW (arg1
))
9645 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9646 if (tem
!= NULL_TREE
)
9650 /* Fold ~X op ~Y as Y op X. */
9651 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9652 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9654 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9655 return fold_build2_loc (loc
, code
, type
,
9656 fold_convert_loc (loc
, cmp_type
,
9657 TREE_OPERAND (arg1
, 0)),
9658 TREE_OPERAND (arg0
, 0));
9661 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9662 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9663 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9665 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9666 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9667 TREE_OPERAND (arg0
, 0),
9668 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9669 fold_convert_loc (loc
, cmp_type
, arg1
)));
9676 /* Subroutine of fold_binary. Optimize complex multiplications of the
9677 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9678 argument EXPR represents the expression "z" of type TYPE. */
9681 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9683 tree itype
= TREE_TYPE (type
);
9684 tree rpart
, ipart
, tem
;
9686 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9688 rpart
= TREE_OPERAND (expr
, 0);
9689 ipart
= TREE_OPERAND (expr
, 1);
9691 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9693 rpart
= TREE_REALPART (expr
);
9694 ipart
= TREE_IMAGPART (expr
);
9698 expr
= save_expr (expr
);
9699 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9700 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9703 rpart
= save_expr (rpart
);
9704 ipart
= save_expr (ipart
);
9705 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9706 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9707 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9708 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9709 build_zero_cst (itype
));
9713 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9714 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9715 guarantees that P and N have the same least significant log2(M) bits.
9716 N is not otherwise constrained. In particular, N is not normalized to
9717 0 <= N < M as is common. In general, the precise value of P is unknown.
9718 M is chosen as large as possible such that constant N can be determined.
9720 Returns M and sets *RESIDUE to N.
9722 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9723 account. This is not always possible due to PR 35705.
9726 static unsigned HOST_WIDE_INT
9727 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9728 bool allow_func_align
)
9730 enum tree_code code
;
9734 code
= TREE_CODE (expr
);
9735 if (code
== ADDR_EXPR
)
9737 unsigned int bitalign
;
9738 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9739 *residue
/= BITS_PER_UNIT
;
9740 return bitalign
/ BITS_PER_UNIT
;
9742 else if (code
== POINTER_PLUS_EXPR
)
9745 unsigned HOST_WIDE_INT modulus
;
9746 enum tree_code inner_code
;
9748 op0
= TREE_OPERAND (expr
, 0);
9750 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9753 op1
= TREE_OPERAND (expr
, 1);
9755 inner_code
= TREE_CODE (op1
);
9756 if (inner_code
== INTEGER_CST
)
9758 *residue
+= TREE_INT_CST_LOW (op1
);
9761 else if (inner_code
== MULT_EXPR
)
9763 op1
= TREE_OPERAND (op1
, 1);
9764 if (TREE_CODE (op1
) == INTEGER_CST
)
9766 unsigned HOST_WIDE_INT align
;
9768 /* Compute the greatest power-of-2 divisor of op1. */
9769 align
= TREE_INT_CST_LOW (op1
);
9772 /* If align is non-zero and less than *modulus, replace
9773 *modulus with align., If align is 0, then either op1 is 0
9774 or the greatest power-of-2 divisor of op1 doesn't fit in an
9775 unsigned HOST_WIDE_INT. In either case, no additional
9776 constraint is imposed. */
9778 modulus
= MIN (modulus
, align
);
9785 /* If we get here, we were unable to determine anything useful about the
9790 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9791 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9794 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9796 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9798 if (TREE_CODE (arg
) == VECTOR_CST
)
9800 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9801 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9803 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9805 constructor_elt
*elt
;
9807 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9808 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9811 elts
[i
] = elt
->value
;
9815 for (; i
< nelts
; i
++)
9817 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9821 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9822 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9823 NULL_TREE otherwise. */
9826 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9828 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9830 bool need_ctor
= false;
9832 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9833 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9834 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9835 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9838 elts
= XALLOCAVEC (tree
, nelts
* 3);
9839 if (!vec_cst_ctor_to_array (arg0
, elts
)
9840 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9843 for (i
= 0; i
< nelts
; i
++)
9845 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9847 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9852 vec
<constructor_elt
, va_gc
> *v
;
9853 vec_alloc (v
, nelts
);
9854 for (i
= 0; i
< nelts
; i
++)
9855 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9856 return build_constructor (type
, v
);
9859 return build_vector (type
, &elts
[2 * nelts
]);
9862 /* Try to fold a pointer difference of type TYPE two address expressions of
9863 array references AREF0 and AREF1 using location LOC. Return a
9864 simplified expression for the difference or NULL_TREE. */
9867 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9868 tree aref0
, tree aref1
)
9870 tree base0
= TREE_OPERAND (aref0
, 0);
9871 tree base1
= TREE_OPERAND (aref1
, 0);
9872 tree base_offset
= build_int_cst (type
, 0);
9874 /* If the bases are array references as well, recurse. If the bases
9875 are pointer indirections compute the difference of the pointers.
9876 If the bases are equal, we are set. */
9877 if ((TREE_CODE (base0
) == ARRAY_REF
9878 && TREE_CODE (base1
) == ARRAY_REF
9880 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9881 || (INDIRECT_REF_P (base0
)
9882 && INDIRECT_REF_P (base1
)
9883 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9884 TREE_OPERAND (base0
, 0),
9885 TREE_OPERAND (base1
, 0))))
9886 || operand_equal_p (base0
, base1
, 0))
9888 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9889 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9890 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9891 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9892 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9894 fold_build2_loc (loc
, MULT_EXPR
, type
,
9900 /* If the real or vector real constant CST of type TYPE has an exact
9901 inverse, return it, else return NULL. */
9904 exact_inverse (tree type
, tree cst
)
9907 tree unit_type
, *elts
;
9908 enum machine_mode mode
;
9909 unsigned vec_nelts
, i
;
9911 switch (TREE_CODE (cst
))
9914 r
= TREE_REAL_CST (cst
);
9916 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9917 return build_real (type
, r
);
9922 vec_nelts
= VECTOR_CST_NELTS (cst
);
9923 elts
= XALLOCAVEC (tree
, vec_nelts
);
9924 unit_type
= TREE_TYPE (type
);
9925 mode
= TYPE_MODE (unit_type
);
9927 for (i
= 0; i
< vec_nelts
; i
++)
9929 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9930 if (!exact_real_inverse (mode
, &r
))
9932 elts
[i
] = build_real (unit_type
, r
);
9935 return build_vector (type
, elts
);
9942 /* Mask out the tz least significant bits of X of type TYPE where
9943 tz is the number of trailing zeroes in Y. */
9945 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9947 int tz
= wi::ctz (y
);
9949 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9953 /* Return true when T is an address and is known to be nonzero.
9954 For floating point we further ensure that T is not denormal.
9955 Similar logic is present in nonzero_address in rtlanal.h.
9957 If the return value is based on the assumption that signed overflow
9958 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9959 change *STRICT_OVERFLOW_P. */
9962 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9964 tree type
= TREE_TYPE (t
);
9965 enum tree_code code
;
9967 /* Doing something useful for floating point would need more work. */
9968 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9971 code
= TREE_CODE (t
);
9972 switch (TREE_CODE_CLASS (code
))
9975 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9978 case tcc_comparison
:
9979 return tree_binary_nonzero_warnv_p (code
, type
,
9980 TREE_OPERAND (t
, 0),
9981 TREE_OPERAND (t
, 1),
9984 case tcc_declaration
:
9986 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9994 case TRUTH_NOT_EXPR
:
9995 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9998 case TRUTH_AND_EXPR
:
10000 case TRUTH_XOR_EXPR
:
10001 return tree_binary_nonzero_warnv_p (code
, type
,
10002 TREE_OPERAND (t
, 0),
10003 TREE_OPERAND (t
, 1),
10004 strict_overflow_p
);
10011 case WITH_SIZE_EXPR
:
10013 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10015 case COMPOUND_EXPR
:
10018 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10019 strict_overflow_p
);
10022 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10023 strict_overflow_p
);
10027 tree fndecl
= get_callee_fndecl (t
);
10028 if (!fndecl
) return false;
10029 if (flag_delete_null_pointer_checks
&& !flag_check_new
10030 && DECL_IS_OPERATOR_NEW (fndecl
)
10031 && !TREE_NOTHROW (fndecl
))
10033 if (flag_delete_null_pointer_checks
10034 && lookup_attribute ("returns_nonnull",
10035 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10037 return alloca_call_p (t
);
10046 /* Return true when T is an address and is known to be nonzero.
10047 Handle warnings about undefined signed overflow. */
10050 tree_expr_nonzero_p (tree t
)
10052 bool ret
, strict_overflow_p
;
10054 strict_overflow_p
= false;
10055 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10056 if (strict_overflow_p
)
10057 fold_overflow_warning (("assuming signed overflow does not occur when "
10058 "determining that expression is always "
10060 WARN_STRICT_OVERFLOW_MISC
);
10064 /* Fold a binary expression of code CODE and type TYPE with operands
10065 OP0 and OP1. LOC is the location of the resulting expression.
10066 Return the folded expression if folding is successful. Otherwise,
10067 return NULL_TREE. */
10070 fold_binary_loc (location_t loc
,
10071 enum tree_code code
, tree type
, tree op0
, tree op1
)
10073 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10074 tree arg0
, arg1
, tem
;
10075 tree t1
= NULL_TREE
;
10076 bool strict_overflow_p
;
10079 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10080 && TREE_CODE_LENGTH (code
) == 2
10081 && op0
!= NULL_TREE
10082 && op1
!= NULL_TREE
);
10087 /* Strip any conversions that don't change the mode. This is
10088 safe for every expression, except for a comparison expression
10089 because its signedness is derived from its operands. So, in
10090 the latter case, only strip conversions that don't change the
10091 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10094 Note that this is done as an internal manipulation within the
10095 constant folder, in order to find the simplest representation
10096 of the arguments so that their form can be studied. In any
10097 cases, the appropriate type conversions should be put back in
10098 the tree that will get out of the constant folder. */
10100 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10102 STRIP_SIGN_NOPS (arg0
);
10103 STRIP_SIGN_NOPS (arg1
);
10111 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10112 constant but we can't do arithmetic on them. */
10113 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10114 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10115 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10116 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10117 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10118 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
10119 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
10121 if (kind
== tcc_binary
)
10123 /* Make sure type and arg0 have the same saturating flag. */
10124 gcc_assert (TYPE_SATURATING (type
)
10125 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10126 tem
= const_binop (code
, arg0
, arg1
);
10128 else if (kind
== tcc_comparison
)
10129 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10133 if (tem
!= NULL_TREE
)
10135 if (TREE_TYPE (tem
) != type
)
10136 tem
= fold_convert_loc (loc
, type
, tem
);
10141 /* If this is a commutative operation, and ARG0 is a constant, move it
10142 to ARG1 to reduce the number of tests below. */
10143 if (commutative_tree_code (code
)
10144 && tree_swap_operands_p (arg0
, arg1
, true))
10145 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10147 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10149 First check for cases where an arithmetic operation is applied to a
10150 compound, conditional, or comparison operation. Push the arithmetic
10151 operation inside the compound or conditional to see if any folding
10152 can then be done. Convert comparison to conditional for this purpose.
10153 The also optimizes non-constant cases that used to be done in
10156 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10157 one of the operands is a comparison and the other is a comparison, a
10158 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10159 code below would make the expression more complex. Change it to a
10160 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10161 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10163 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10164 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10165 && TREE_CODE (type
) != VECTOR_TYPE
10166 && ((truth_value_p (TREE_CODE (arg0
))
10167 && (truth_value_p (TREE_CODE (arg1
))
10168 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10169 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10170 || (truth_value_p (TREE_CODE (arg1
))
10171 && (truth_value_p (TREE_CODE (arg0
))
10172 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10173 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10175 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10176 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10179 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10180 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10182 if (code
== EQ_EXPR
)
10183 tem
= invert_truthvalue_loc (loc
, tem
);
10185 return fold_convert_loc (loc
, type
, tem
);
10188 if (TREE_CODE_CLASS (code
) == tcc_binary
10189 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10191 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10193 tem
= fold_build2_loc (loc
, code
, type
,
10194 fold_convert_loc (loc
, TREE_TYPE (op0
),
10195 TREE_OPERAND (arg0
, 1)), op1
);
10196 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10199 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10200 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10202 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10203 fold_convert_loc (loc
, TREE_TYPE (op1
),
10204 TREE_OPERAND (arg1
, 1)));
10205 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10209 if (TREE_CODE (arg0
) == COND_EXPR
10210 || TREE_CODE (arg0
) == VEC_COND_EXPR
10211 || COMPARISON_CLASS_P (arg0
))
10213 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10215 /*cond_first_p=*/1);
10216 if (tem
!= NULL_TREE
)
10220 if (TREE_CODE (arg1
) == COND_EXPR
10221 || TREE_CODE (arg1
) == VEC_COND_EXPR
10222 || COMPARISON_CLASS_P (arg1
))
10224 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10226 /*cond_first_p=*/0);
10227 if (tem
!= NULL_TREE
)
10235 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10236 if (TREE_CODE (arg0
) == ADDR_EXPR
10237 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10239 tree iref
= TREE_OPERAND (arg0
, 0);
10240 return fold_build2 (MEM_REF
, type
,
10241 TREE_OPERAND (iref
, 0),
10242 int_const_binop (PLUS_EXPR
, arg1
,
10243 TREE_OPERAND (iref
, 1)));
10246 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10247 if (TREE_CODE (arg0
) == ADDR_EXPR
10248 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10251 HOST_WIDE_INT coffset
;
10252 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10256 return fold_build2 (MEM_REF
, type
,
10257 build_fold_addr_expr (base
),
10258 int_const_binop (PLUS_EXPR
, arg1
,
10259 size_int (coffset
)));
10264 case POINTER_PLUS_EXPR
:
10265 /* 0 +p index -> (type)index */
10266 if (integer_zerop (arg0
))
10267 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10269 /* PTR +p 0 -> PTR */
10270 if (integer_zerop (arg1
))
10271 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10273 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10274 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10275 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10276 return fold_convert_loc (loc
, type
,
10277 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10278 fold_convert_loc (loc
, sizetype
,
10280 fold_convert_loc (loc
, sizetype
,
10283 /* (PTR +p B) +p A -> PTR +p (B + A) */
10284 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10287 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10288 tree arg00
= TREE_OPERAND (arg0
, 0);
10289 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10290 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10291 return fold_convert_loc (loc
, type
,
10292 fold_build_pointer_plus_loc (loc
,
10296 /* PTR_CST +p CST -> CST1 */
10297 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10298 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10299 fold_convert_loc (loc
, type
, arg1
));
10301 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10302 of the array. Loop optimizer sometimes produce this type of
10304 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10306 tem
= try_move_mult_to_index (loc
, arg0
,
10307 fold_convert_loc (loc
,
10310 return fold_convert_loc (loc
, type
, tem
);
10316 /* A + (-B) -> A - B */
10317 if (TREE_CODE (arg1
) == NEGATE_EXPR
10318 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10319 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10320 fold_convert_loc (loc
, type
, arg0
),
10321 fold_convert_loc (loc
, type
,
10322 TREE_OPERAND (arg1
, 0)));
10323 /* (-A) + B -> B - A */
10324 if (TREE_CODE (arg0
) == NEGATE_EXPR
10325 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
)
10326 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10327 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10328 fold_convert_loc (loc
, type
, arg1
),
10329 fold_convert_loc (loc
, type
,
10330 TREE_OPERAND (arg0
, 0)));
10332 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10334 /* Convert ~A + 1 to -A. */
10335 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10336 && integer_onep (arg1
))
10337 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10338 fold_convert_loc (loc
, type
,
10339 TREE_OPERAND (arg0
, 0)));
10341 /* ~X + X is -1. */
10342 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10343 && !TYPE_OVERFLOW_TRAPS (type
))
10345 tree tem
= TREE_OPERAND (arg0
, 0);
10348 if (operand_equal_p (tem
, arg1
, 0))
10350 t1
= build_all_ones_cst (type
);
10351 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10355 /* X + ~X is -1. */
10356 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10357 && !TYPE_OVERFLOW_TRAPS (type
))
10359 tree tem
= TREE_OPERAND (arg1
, 0);
10362 if (operand_equal_p (arg0
, tem
, 0))
10364 t1
= build_all_ones_cst (type
);
10365 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10369 /* X + (X / CST) * -CST is X % CST. */
10370 if (TREE_CODE (arg1
) == MULT_EXPR
10371 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10372 && operand_equal_p (arg0
,
10373 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10375 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10376 tree cst1
= TREE_OPERAND (arg1
, 1);
10377 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10379 if (sum
&& integer_zerop (sum
))
10380 return fold_convert_loc (loc
, type
,
10381 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10382 TREE_TYPE (arg0
), arg0
,
10387 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10388 one. Make sure the type is not saturating and has the signedness of
10389 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10390 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10391 if ((TREE_CODE (arg0
) == MULT_EXPR
10392 || TREE_CODE (arg1
) == MULT_EXPR
)
10393 && !TYPE_SATURATING (type
)
10394 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10395 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10396 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10398 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10403 if (! FLOAT_TYPE_P (type
))
10405 if (integer_zerop (arg1
))
10406 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10408 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10409 with a constant, and the two constants have no bits in common,
10410 we should treat this as a BIT_IOR_EXPR since this may produce more
10411 simplifications. */
10412 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10413 && TREE_CODE (arg1
) == BIT_AND_EXPR
10414 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10415 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10416 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10417 TREE_OPERAND (arg1
, 1)) == 0)
10419 code
= BIT_IOR_EXPR
;
10423 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10424 (plus (plus (mult) (mult)) (foo)) so that we can
10425 take advantage of the factoring cases below. */
10426 if (TYPE_OVERFLOW_WRAPS (type
)
10427 && (((TREE_CODE (arg0
) == PLUS_EXPR
10428 || TREE_CODE (arg0
) == MINUS_EXPR
)
10429 && TREE_CODE (arg1
) == MULT_EXPR
)
10430 || ((TREE_CODE (arg1
) == PLUS_EXPR
10431 || TREE_CODE (arg1
) == MINUS_EXPR
)
10432 && TREE_CODE (arg0
) == MULT_EXPR
)))
10434 tree parg0
, parg1
, parg
, marg
;
10435 enum tree_code pcode
;
10437 if (TREE_CODE (arg1
) == MULT_EXPR
)
10438 parg
= arg0
, marg
= arg1
;
10440 parg
= arg1
, marg
= arg0
;
10441 pcode
= TREE_CODE (parg
);
10442 parg0
= TREE_OPERAND (parg
, 0);
10443 parg1
= TREE_OPERAND (parg
, 1);
10444 STRIP_NOPS (parg0
);
10445 STRIP_NOPS (parg1
);
10447 if (TREE_CODE (parg0
) == MULT_EXPR
10448 && TREE_CODE (parg1
) != MULT_EXPR
)
10449 return fold_build2_loc (loc
, pcode
, type
,
10450 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10451 fold_convert_loc (loc
, type
,
10453 fold_convert_loc (loc
, type
,
10455 fold_convert_loc (loc
, type
, parg1
));
10456 if (TREE_CODE (parg0
) != MULT_EXPR
10457 && TREE_CODE (parg1
) == MULT_EXPR
)
10459 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10460 fold_convert_loc (loc
, type
, parg0
),
10461 fold_build2_loc (loc
, pcode
, type
,
10462 fold_convert_loc (loc
, type
, marg
),
10463 fold_convert_loc (loc
, type
,
10469 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10470 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10471 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10473 /* Likewise if the operands are reversed. */
10474 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10475 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10477 /* Convert X + -C into X - C. */
10478 if (TREE_CODE (arg1
) == REAL_CST
10479 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10481 tem
= fold_negate_const (arg1
, type
);
10482 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10483 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10484 fold_convert_loc (loc
, type
, arg0
),
10485 fold_convert_loc (loc
, type
, tem
));
10488 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10489 to __complex__ ( x, y ). This is not the same for SNaNs or
10490 if signed zeros are involved. */
10491 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10492 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10493 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10495 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10496 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10497 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10498 bool arg0rz
= false, arg0iz
= false;
10499 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10500 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10502 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10503 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10504 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10506 tree rp
= arg1r
? arg1r
10507 : build1 (REALPART_EXPR
, rtype
, arg1
);
10508 tree ip
= arg0i
? arg0i
10509 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10510 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10512 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10514 tree rp
= arg0r
? arg0r
10515 : build1 (REALPART_EXPR
, rtype
, arg0
);
10516 tree ip
= arg1i
? arg1i
10517 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10518 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10523 if (flag_unsafe_math_optimizations
10524 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10525 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10526 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10529 /* Convert x+x into x*2.0. */
10530 if (operand_equal_p (arg0
, arg1
, 0)
10531 && SCALAR_FLOAT_TYPE_P (type
))
10532 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10533 build_real (type
, dconst2
));
10535 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10536 We associate floats only if the user has specified
10537 -fassociative-math. */
10538 if (flag_associative_math
10539 && TREE_CODE (arg1
) == PLUS_EXPR
10540 && TREE_CODE (arg0
) != MULT_EXPR
)
10542 tree tree10
= TREE_OPERAND (arg1
, 0);
10543 tree tree11
= TREE_OPERAND (arg1
, 1);
10544 if (TREE_CODE (tree11
) == MULT_EXPR
10545 && TREE_CODE (tree10
) == MULT_EXPR
)
10548 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10549 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10552 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10553 We associate floats only if the user has specified
10554 -fassociative-math. */
10555 if (flag_associative_math
10556 && TREE_CODE (arg0
) == PLUS_EXPR
10557 && TREE_CODE (arg1
) != MULT_EXPR
)
10559 tree tree00
= TREE_OPERAND (arg0
, 0);
10560 tree tree01
= TREE_OPERAND (arg0
, 1);
10561 if (TREE_CODE (tree01
) == MULT_EXPR
10562 && TREE_CODE (tree00
) == MULT_EXPR
)
10565 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10566 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10572 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10573 is a rotate of A by C1 bits. */
10574 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10575 is a rotate of A by B bits. */
10577 enum tree_code code0
, code1
;
10579 code0
= TREE_CODE (arg0
);
10580 code1
= TREE_CODE (arg1
);
10581 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10582 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10583 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10584 TREE_OPERAND (arg1
, 0), 0)
10585 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10586 TYPE_UNSIGNED (rtype
))
10587 /* Only create rotates in complete modes. Other cases are not
10588 expanded properly. */
10589 && (element_precision (rtype
)
10590 == element_precision (TYPE_MODE (rtype
))))
10592 tree tree01
, tree11
;
10593 enum tree_code code01
, code11
;
10595 tree01
= TREE_OPERAND (arg0
, 1);
10596 tree11
= TREE_OPERAND (arg1
, 1);
10597 STRIP_NOPS (tree01
);
10598 STRIP_NOPS (tree11
);
10599 code01
= TREE_CODE (tree01
);
10600 code11
= TREE_CODE (tree11
);
10601 if (code01
== INTEGER_CST
10602 && code11
== INTEGER_CST
10603 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10604 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10606 tem
= build2_loc (loc
, LROTATE_EXPR
,
10607 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10608 TREE_OPERAND (arg0
, 0),
10609 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10610 return fold_convert_loc (loc
, type
, tem
);
10612 else if (code11
== MINUS_EXPR
)
10614 tree tree110
, tree111
;
10615 tree110
= TREE_OPERAND (tree11
, 0);
10616 tree111
= TREE_OPERAND (tree11
, 1);
10617 STRIP_NOPS (tree110
);
10618 STRIP_NOPS (tree111
);
10619 if (TREE_CODE (tree110
) == INTEGER_CST
10620 && 0 == compare_tree_int (tree110
,
10622 (TREE_TYPE (TREE_OPERAND
10624 && operand_equal_p (tree01
, tree111
, 0))
10626 fold_convert_loc (loc
, type
,
10627 build2 ((code0
== LSHIFT_EXPR
10630 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10631 TREE_OPERAND (arg0
, 0), tree01
));
10633 else if (code01
== MINUS_EXPR
)
10635 tree tree010
, tree011
;
10636 tree010
= TREE_OPERAND (tree01
, 0);
10637 tree011
= TREE_OPERAND (tree01
, 1);
10638 STRIP_NOPS (tree010
);
10639 STRIP_NOPS (tree011
);
10640 if (TREE_CODE (tree010
) == INTEGER_CST
10641 && 0 == compare_tree_int (tree010
,
10643 (TREE_TYPE (TREE_OPERAND
10645 && operand_equal_p (tree11
, tree011
, 0))
10646 return fold_convert_loc
10648 build2 ((code0
!= LSHIFT_EXPR
10651 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10652 TREE_OPERAND (arg0
, 0), tree11
));
10658 /* In most languages, can't associate operations on floats through
10659 parentheses. Rather than remember where the parentheses were, we
10660 don't associate floats at all, unless the user has specified
10661 -fassociative-math.
10662 And, we need to make sure type is not saturating. */
10664 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10665 && !TYPE_SATURATING (type
))
10667 tree var0
, con0
, lit0
, minus_lit0
;
10668 tree var1
, con1
, lit1
, minus_lit1
;
10672 /* Split both trees into variables, constants, and literals. Then
10673 associate each group together, the constants with literals,
10674 then the result with variables. This increases the chances of
10675 literals being recombined later and of generating relocatable
10676 expressions for the sum of a constant and literal. */
10677 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10678 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10679 code
== MINUS_EXPR
);
10681 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10682 if (code
== MINUS_EXPR
)
10685 /* With undefined overflow prefer doing association in a type
10686 which wraps on overflow, if that is one of the operand types. */
10687 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10688 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10690 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10691 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10692 atype
= TREE_TYPE (arg0
);
10693 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10694 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10695 atype
= TREE_TYPE (arg1
);
10696 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10699 /* With undefined overflow we can only associate constants with one
10700 variable, and constants whose association doesn't overflow. */
10701 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10702 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10709 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10710 tmp0
= TREE_OPERAND (tmp0
, 0);
10711 if (CONVERT_EXPR_P (tmp0
)
10712 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10713 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10714 <= TYPE_PRECISION (atype
)))
10715 tmp0
= TREE_OPERAND (tmp0
, 0);
10716 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10717 tmp1
= TREE_OPERAND (tmp1
, 0);
10718 if (CONVERT_EXPR_P (tmp1
)
10719 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10720 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10721 <= TYPE_PRECISION (atype
)))
10722 tmp1
= TREE_OPERAND (tmp1
, 0);
10723 /* The only case we can still associate with two variables
10724 is if they are the same, modulo negation and bit-pattern
10725 preserving conversions. */
10726 if (!operand_equal_p (tmp0
, tmp1
, 0))
10731 /* Only do something if we found more than two objects. Otherwise,
10732 nothing has changed and we risk infinite recursion. */
10734 && (2 < ((var0
!= 0) + (var1
!= 0)
10735 + (con0
!= 0) + (con1
!= 0)
10736 + (lit0
!= 0) + (lit1
!= 0)
10737 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10739 bool any_overflows
= false;
10740 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10741 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10742 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10743 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10744 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10745 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10746 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10747 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10750 /* Preserve the MINUS_EXPR if the negative part of the literal is
10751 greater than the positive part. Otherwise, the multiplicative
10752 folding code (i.e extract_muldiv) may be fooled in case
10753 unsigned constants are subtracted, like in the following
10754 example: ((X*2 + 4) - 8U)/2. */
10755 if (minus_lit0
&& lit0
)
10757 if (TREE_CODE (lit0
) == INTEGER_CST
10758 && TREE_CODE (minus_lit0
) == INTEGER_CST
10759 && tree_int_cst_lt (lit0
, minus_lit0
))
10761 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10762 MINUS_EXPR
, atype
);
10767 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10768 MINUS_EXPR
, atype
);
10773 /* Don't introduce overflows through reassociation. */
10775 && ((lit0
&& TREE_OVERFLOW (lit0
))
10776 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10783 fold_convert_loc (loc
, type
,
10784 associate_trees (loc
, var0
, minus_lit0
,
10785 MINUS_EXPR
, atype
));
10788 con0
= associate_trees (loc
, con0
, minus_lit0
,
10789 MINUS_EXPR
, atype
);
10791 fold_convert_loc (loc
, type
,
10792 associate_trees (loc
, var0
, con0
,
10793 PLUS_EXPR
, atype
));
10797 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10799 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10807 /* Pointer simplifications for subtraction, simple reassociations. */
10808 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10810 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10811 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10812 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10814 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10815 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10816 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10817 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10818 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10819 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10821 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10824 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10825 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10827 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10828 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10829 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10830 fold_convert_loc (loc
, type
, arg1
));
10832 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10834 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10836 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10838 tree arg10
= fold_convert_loc (loc
, type
,
10839 TREE_OPERAND (arg1
, 0));
10840 tree arg11
= fold_convert_loc (loc
, type
,
10841 TREE_OPERAND (arg1
, 1));
10842 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg0
,
10843 fold_convert_loc (loc
, type
, arg10
));
10845 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10848 /* A - (-B) -> A + B */
10849 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10850 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10851 fold_convert_loc (loc
, type
,
10852 TREE_OPERAND (arg1
, 0)));
10853 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10854 if (TREE_CODE (arg0
) == NEGATE_EXPR
10855 && negate_expr_p (arg1
)
10856 && reorder_operands_p (arg0
, arg1
))
10857 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10858 fold_convert_loc (loc
, type
,
10859 negate_expr (arg1
)),
10860 fold_convert_loc (loc
, type
,
10861 TREE_OPERAND (arg0
, 0)));
10862 /* Convert -A - 1 to ~A. */
10863 if (TREE_CODE (type
) != COMPLEX_TYPE
10864 && TREE_CODE (arg0
) == NEGATE_EXPR
10865 && integer_onep (arg1
)
10866 && !TYPE_OVERFLOW_TRAPS (type
))
10867 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10868 fold_convert_loc (loc
, type
,
10869 TREE_OPERAND (arg0
, 0)));
10871 /* Convert -1 - A to ~A. */
10872 if (TREE_CODE (type
) != COMPLEX_TYPE
10873 && integer_all_onesp (arg0
))
10874 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10877 /* X - (X / Y) * Y is X % Y. */
10878 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10879 && TREE_CODE (arg1
) == MULT_EXPR
10880 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10881 && operand_equal_p (arg0
,
10882 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10883 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10884 TREE_OPERAND (arg1
, 1), 0))
10886 fold_convert_loc (loc
, type
,
10887 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10888 arg0
, TREE_OPERAND (arg1
, 1)));
10890 if (! FLOAT_TYPE_P (type
))
10892 if (integer_zerop (arg0
))
10893 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10894 if (integer_zerop (arg1
))
10895 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10897 /* Fold A - (A & B) into ~B & A. */
10898 if (!TREE_SIDE_EFFECTS (arg0
)
10899 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10901 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10903 tree arg10
= fold_convert_loc (loc
, type
,
10904 TREE_OPERAND (arg1
, 0));
10905 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10906 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10908 fold_convert_loc (loc
, type
, arg0
));
10910 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10912 tree arg11
= fold_convert_loc (loc
,
10913 type
, TREE_OPERAND (arg1
, 1));
10914 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10915 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10917 fold_convert_loc (loc
, type
, arg0
));
10921 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10922 any power of 2 minus 1. */
10923 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10924 && TREE_CODE (arg1
) == BIT_AND_EXPR
10925 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10926 TREE_OPERAND (arg1
, 0), 0))
10928 tree mask0
= TREE_OPERAND (arg0
, 1);
10929 tree mask1
= TREE_OPERAND (arg1
, 1);
10930 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10932 if (operand_equal_p (tem
, mask1
, 0))
10934 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10935 TREE_OPERAND (arg0
, 0), mask1
);
10936 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10941 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10942 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10943 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10945 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10946 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10947 (-ARG1 + ARG0) reduces to -ARG1. */
10948 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10949 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10951 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10952 __complex__ ( x, -y ). This is not the same for SNaNs or if
10953 signed zeros are involved. */
10954 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10955 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10956 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10958 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10959 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10960 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10961 bool arg0rz
= false, arg0iz
= false;
10962 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10963 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10965 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10966 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10967 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10969 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10971 : build1 (REALPART_EXPR
, rtype
, arg1
));
10972 tree ip
= arg0i
? arg0i
10973 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10974 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10976 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10978 tree rp
= arg0r
? arg0r
10979 : build1 (REALPART_EXPR
, rtype
, arg0
);
10980 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10982 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10983 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10988 /* Fold &x - &x. This can happen from &x.foo - &x.
10989 This is unsafe for certain floats even in non-IEEE formats.
10990 In IEEE, it is unsafe because it does wrong for NaNs.
10991 Also note that operand_equal_p is always false if an operand
10994 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10995 && operand_equal_p (arg0
, arg1
, 0))
10996 return build_zero_cst (type
);
10998 /* A - B -> A + (-B) if B is easily negatable. */
10999 if (negate_expr_p (arg1
)
11000 && ((FLOAT_TYPE_P (type
)
11001 /* Avoid this transformation if B is a positive REAL_CST. */
11002 && (TREE_CODE (arg1
) != REAL_CST
11003 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
11004 || INTEGRAL_TYPE_P (type
)))
11005 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11006 fold_convert_loc (loc
, type
, arg0
),
11007 fold_convert_loc (loc
, type
,
11008 negate_expr (arg1
)));
11010 /* Try folding difference of addresses. */
11012 HOST_WIDE_INT diff
;
11014 if ((TREE_CODE (arg0
) == ADDR_EXPR
11015 || TREE_CODE (arg1
) == ADDR_EXPR
)
11016 && ptr_difference_const (arg0
, arg1
, &diff
))
11017 return build_int_cst_type (type
, diff
);
11020 /* Fold &a[i] - &a[j] to i-j. */
11021 if (TREE_CODE (arg0
) == ADDR_EXPR
11022 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11023 && TREE_CODE (arg1
) == ADDR_EXPR
11024 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11026 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11027 TREE_OPERAND (arg0
, 0),
11028 TREE_OPERAND (arg1
, 0));
11033 if (FLOAT_TYPE_P (type
)
11034 && flag_unsafe_math_optimizations
11035 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
11036 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
11037 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
11040 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11041 one. Make sure the type is not saturating and has the signedness of
11042 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11043 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11044 if ((TREE_CODE (arg0
) == MULT_EXPR
11045 || TREE_CODE (arg1
) == MULT_EXPR
)
11046 && !TYPE_SATURATING (type
)
11047 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11048 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11049 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11051 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11059 /* (-A) * (-B) -> A * B */
11060 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11061 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11062 fold_convert_loc (loc
, type
,
11063 TREE_OPERAND (arg0
, 0)),
11064 fold_convert_loc (loc
, type
,
11065 negate_expr (arg1
)));
11066 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11067 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11068 fold_convert_loc (loc
, type
,
11069 negate_expr (arg0
)),
11070 fold_convert_loc (loc
, type
,
11071 TREE_OPERAND (arg1
, 0)));
11073 if (! FLOAT_TYPE_P (type
))
11075 if (integer_zerop (arg1
))
11076 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11077 if (integer_onep (arg1
))
11078 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11079 /* Transform x * -1 into -x. Make sure to do the negation
11080 on the original operand with conversions not stripped
11081 because we can only strip non-sign-changing conversions. */
11082 if (integer_minus_onep (arg1
))
11083 return fold_convert_loc (loc
, type
, negate_expr (op0
));
11084 /* Transform x * -C into -x * C if x is easily negatable. */
11085 if (TREE_CODE (arg1
) == INTEGER_CST
11086 && tree_int_cst_sgn (arg1
) == -1
11087 && negate_expr_p (arg0
)
11088 && (tem
= negate_expr (arg1
)) != arg1
11089 && !TREE_OVERFLOW (tem
))
11090 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11091 fold_convert_loc (loc
, type
,
11092 negate_expr (arg0
)),
11095 /* (a * (1 << b)) is (a << b) */
11096 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11097 && integer_onep (TREE_OPERAND (arg1
, 0)))
11098 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11099 TREE_OPERAND (arg1
, 1));
11100 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11101 && integer_onep (TREE_OPERAND (arg0
, 0)))
11102 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11103 TREE_OPERAND (arg0
, 1));
11105 /* (A + A) * C -> A * 2 * C */
11106 if (TREE_CODE (arg0
) == PLUS_EXPR
11107 && TREE_CODE (arg1
) == INTEGER_CST
11108 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11109 TREE_OPERAND (arg0
, 1), 0))
11110 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11111 omit_one_operand_loc (loc
, type
,
11112 TREE_OPERAND (arg0
, 0),
11113 TREE_OPERAND (arg0
, 1)),
11114 fold_build2_loc (loc
, MULT_EXPR
, type
,
11115 build_int_cst (type
, 2) , arg1
));
11117 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11118 sign-changing only. */
11119 if (TREE_CODE (arg1
) == INTEGER_CST
11120 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
11121 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
11122 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11124 strict_overflow_p
= false;
11125 if (TREE_CODE (arg1
) == INTEGER_CST
11126 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11127 &strict_overflow_p
)))
11129 if (strict_overflow_p
)
11130 fold_overflow_warning (("assuming signed overflow does not "
11131 "occur when simplifying "
11133 WARN_STRICT_OVERFLOW_MISC
);
11134 return fold_convert_loc (loc
, type
, tem
);
11137 /* Optimize z * conj(z) for integer complex numbers. */
11138 if (TREE_CODE (arg0
) == CONJ_EXPR
11139 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11140 return fold_mult_zconjz (loc
, type
, arg1
);
11141 if (TREE_CODE (arg1
) == CONJ_EXPR
11142 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11143 return fold_mult_zconjz (loc
, type
, arg0
);
11147 /* Maybe fold x * 0 to 0. The expressions aren't the same
11148 when x is NaN, since x * 0 is also NaN. Nor are they the
11149 same in modes with signed zeros, since multiplying a
11150 negative value by 0 gives -0, not +0. */
11151 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11153 && real_zerop (arg1
))
11154 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11155 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11156 Likewise for complex arithmetic with signed zeros. */
11157 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11158 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11159 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11160 && real_onep (arg1
))
11161 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11163 /* Transform x * -1.0 into -x. */
11164 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11165 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11166 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11167 && real_minus_onep (arg1
))
11168 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11170 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11171 the result for floating point types due to rounding so it is applied
11172 only if -fassociative-math was specify. */
11173 if (flag_associative_math
11174 && TREE_CODE (arg0
) == RDIV_EXPR
11175 && TREE_CODE (arg1
) == REAL_CST
11176 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11178 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11181 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11182 TREE_OPERAND (arg0
, 1));
11185 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11186 if (operand_equal_p (arg0
, arg1
, 0))
11188 tree tem
= fold_strip_sign_ops (arg0
);
11189 if (tem
!= NULL_TREE
)
11191 tem
= fold_convert_loc (loc
, type
, tem
);
11192 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11196 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11197 This is not the same for NaNs or if signed zeros are
11199 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11200 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11201 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11202 && TREE_CODE (arg1
) == COMPLEX_CST
11203 && real_zerop (TREE_REALPART (arg1
)))
11205 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11206 if (real_onep (TREE_IMAGPART (arg1
)))
11208 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11209 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11211 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11212 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11214 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11215 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11216 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11220 /* Optimize z * conj(z) for floating point complex numbers.
11221 Guarded by flag_unsafe_math_optimizations as non-finite
11222 imaginary components don't produce scalar results. */
11223 if (flag_unsafe_math_optimizations
11224 && TREE_CODE (arg0
) == CONJ_EXPR
11225 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11226 return fold_mult_zconjz (loc
, type
, arg1
);
11227 if (flag_unsafe_math_optimizations
11228 && TREE_CODE (arg1
) == CONJ_EXPR
11229 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11230 return fold_mult_zconjz (loc
, type
, arg0
);
11232 if (flag_unsafe_math_optimizations
)
11234 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11235 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11237 /* Optimizations of root(...)*root(...). */
11238 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11241 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11242 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11244 /* Optimize sqrt(x)*sqrt(x) as x. */
11245 if (BUILTIN_SQRT_P (fcode0
)
11246 && operand_equal_p (arg00
, arg10
, 0)
11247 && ! HONOR_SNANS (TYPE_MODE (type
)))
11250 /* Optimize root(x)*root(y) as root(x*y). */
11251 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11252 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11253 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11256 /* Optimize expN(x)*expN(y) as expN(x+y). */
11257 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11259 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11260 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11261 CALL_EXPR_ARG (arg0
, 0),
11262 CALL_EXPR_ARG (arg1
, 0));
11263 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11266 /* Optimizations of pow(...)*pow(...). */
11267 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11268 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11269 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11271 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11272 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11273 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11274 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11276 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11277 if (operand_equal_p (arg01
, arg11
, 0))
11279 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11280 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11282 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11285 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11286 if (operand_equal_p (arg00
, arg10
, 0))
11288 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11289 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11291 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11295 /* Optimize tan(x)*cos(x) as sin(x). */
11296 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11297 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11298 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11299 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11300 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11301 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11302 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11303 CALL_EXPR_ARG (arg1
, 0), 0))
11305 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11307 if (sinfn
!= NULL_TREE
)
11308 return build_call_expr_loc (loc
, sinfn
, 1,
11309 CALL_EXPR_ARG (arg0
, 0));
11312 /* Optimize x*pow(x,c) as pow(x,c+1). */
11313 if (fcode1
== BUILT_IN_POW
11314 || fcode1
== BUILT_IN_POWF
11315 || fcode1
== BUILT_IN_POWL
)
11317 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11318 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11319 if (TREE_CODE (arg11
) == REAL_CST
11320 && !TREE_OVERFLOW (arg11
)
11321 && operand_equal_p (arg0
, arg10
, 0))
11323 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11327 c
= TREE_REAL_CST (arg11
);
11328 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11329 arg
= build_real (type
, c
);
11330 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11334 /* Optimize pow(x,c)*x as pow(x,c+1). */
11335 if (fcode0
== BUILT_IN_POW
11336 || fcode0
== BUILT_IN_POWF
11337 || fcode0
== BUILT_IN_POWL
)
11339 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11340 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11341 if (TREE_CODE (arg01
) == REAL_CST
11342 && !TREE_OVERFLOW (arg01
)
11343 && operand_equal_p (arg1
, arg00
, 0))
11345 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11349 c
= TREE_REAL_CST (arg01
);
11350 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11351 arg
= build_real (type
, c
);
11352 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11356 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11357 if (!in_gimple_form
11359 && operand_equal_p (arg0
, arg1
, 0))
11361 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11365 tree arg
= build_real (type
, dconst2
);
11366 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11375 if (integer_all_onesp (arg1
))
11376 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11377 if (integer_zerop (arg1
))
11378 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11379 if (operand_equal_p (arg0
, arg1
, 0))
11380 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11382 /* ~X | X is -1. */
11383 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11384 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11386 t1
= build_zero_cst (type
);
11387 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11388 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11391 /* X | ~X is -1. */
11392 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11393 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11395 t1
= build_zero_cst (type
);
11396 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11397 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11400 /* Canonicalize (X & C1) | C2. */
11401 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11402 && TREE_CODE (arg1
) == INTEGER_CST
11403 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11405 int width
= TYPE_PRECISION (type
), w
;
11406 wide_int c1
= TREE_OPERAND (arg0
, 1);
11407 wide_int c2
= arg1
;
11409 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11410 if ((c1
& c2
) == c1
)
11411 return omit_one_operand_loc (loc
, type
, arg1
,
11412 TREE_OPERAND (arg0
, 0));
11414 wide_int msk
= wi::mask (width
, false,
11415 TYPE_PRECISION (TREE_TYPE (arg1
)));
11417 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11418 if (msk
.and_not (c1
| c2
) == 0)
11419 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11420 TREE_OPERAND (arg0
, 0), arg1
);
11422 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11423 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11424 mode which allows further optimizations. */
11427 wide_int c3
= c1
.and_not (c2
);
11428 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11430 wide_int mask
= wi::mask (w
, false,
11431 TYPE_PRECISION (type
));
11432 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11440 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11441 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11442 TREE_OPERAND (arg0
, 0),
11443 wide_int_to_tree (type
,
11448 /* (X & Y) | Y is (X, Y). */
11449 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11450 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11451 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11452 /* (X & Y) | X is (Y, X). */
11453 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11454 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11455 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11456 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11457 /* X | (X & Y) is (Y, X). */
11458 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11459 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11460 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11461 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11462 /* X | (Y & X) is (Y, X). */
11463 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11464 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11465 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11466 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11468 /* (X & ~Y) | (~X & Y) is X ^ Y */
11469 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11470 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11472 tree a0
, a1
, l0
, l1
, n0
, n1
;
11474 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11475 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11477 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11478 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11480 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11481 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11483 if ((operand_equal_p (n0
, a0
, 0)
11484 && operand_equal_p (n1
, a1
, 0))
11485 || (operand_equal_p (n0
, a1
, 0)
11486 && operand_equal_p (n1
, a0
, 0)))
11487 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11490 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11491 if (t1
!= NULL_TREE
)
11494 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11496 This results in more efficient code for machines without a NAND
11497 instruction. Combine will canonicalize to the first form
11498 which will allow use of NAND instructions provided by the
11499 backend if they exist. */
11500 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11501 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11504 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11505 build2 (BIT_AND_EXPR
, type
,
11506 fold_convert_loc (loc
, type
,
11507 TREE_OPERAND (arg0
, 0)),
11508 fold_convert_loc (loc
, type
,
11509 TREE_OPERAND (arg1
, 0))));
11512 /* See if this can be simplified into a rotate first. If that
11513 is unsuccessful continue in the association code. */
11517 if (integer_zerop (arg1
))
11518 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11519 if (integer_all_onesp (arg1
))
11520 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11521 if (operand_equal_p (arg0
, arg1
, 0))
11522 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11524 /* ~X ^ X is -1. */
11525 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11526 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11528 t1
= build_zero_cst (type
);
11529 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11530 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11533 /* X ^ ~X is -1. */
11534 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11535 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11537 t1
= build_zero_cst (type
);
11538 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11539 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11542 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11543 with a constant, and the two constants have no bits in common,
11544 we should treat this as a BIT_IOR_EXPR since this may produce more
11545 simplifications. */
11546 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11547 && TREE_CODE (arg1
) == BIT_AND_EXPR
11548 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11549 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11550 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11551 TREE_OPERAND (arg1
, 1)) == 0)
11553 code
= BIT_IOR_EXPR
;
11557 /* (X | Y) ^ X -> Y & ~ X*/
11558 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11559 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11561 tree t2
= TREE_OPERAND (arg0
, 1);
11562 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11564 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11565 fold_convert_loc (loc
, type
, t2
),
11566 fold_convert_loc (loc
, type
, t1
));
11570 /* (Y | X) ^ X -> Y & ~ X*/
11571 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11572 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11574 tree t2
= TREE_OPERAND (arg0
, 0);
11575 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11577 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11578 fold_convert_loc (loc
, type
, t2
),
11579 fold_convert_loc (loc
, type
, t1
));
11583 /* X ^ (X | Y) -> Y & ~ X*/
11584 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11585 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11587 tree t2
= TREE_OPERAND (arg1
, 1);
11588 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11590 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11591 fold_convert_loc (loc
, type
, t2
),
11592 fold_convert_loc (loc
, type
, t1
));
11596 /* X ^ (Y | X) -> Y & ~ X*/
11597 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11598 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11600 tree t2
= TREE_OPERAND (arg1
, 0);
11601 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11603 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11604 fold_convert_loc (loc
, type
, t2
),
11605 fold_convert_loc (loc
, type
, t1
));
11609 /* Convert ~X ^ ~Y to X ^ Y. */
11610 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11611 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11612 return fold_build2_loc (loc
, code
, type
,
11613 fold_convert_loc (loc
, type
,
11614 TREE_OPERAND (arg0
, 0)),
11615 fold_convert_loc (loc
, type
,
11616 TREE_OPERAND (arg1
, 0)));
11618 /* Convert ~X ^ C to X ^ ~C. */
11619 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11620 && TREE_CODE (arg1
) == INTEGER_CST
)
11621 return fold_build2_loc (loc
, code
, type
,
11622 fold_convert_loc (loc
, type
,
11623 TREE_OPERAND (arg0
, 0)),
11624 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11626 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11627 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11628 && integer_onep (TREE_OPERAND (arg0
, 1))
11629 && integer_onep (arg1
))
11630 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11631 build_zero_cst (TREE_TYPE (arg0
)));
11633 /* Fold (X & Y) ^ Y as ~X & Y. */
11634 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11635 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11637 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11638 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11639 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11640 fold_convert_loc (loc
, type
, arg1
));
11642 /* Fold (X & Y) ^ X as ~Y & X. */
11643 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11644 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11645 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11647 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11648 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11649 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11650 fold_convert_loc (loc
, type
, arg1
));
11652 /* Fold X ^ (X & Y) as X & ~Y. */
11653 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11654 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11656 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11657 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11658 fold_convert_loc (loc
, type
, arg0
),
11659 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11661 /* Fold X ^ (Y & X) as ~Y & X. */
11662 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11663 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11664 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11666 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11667 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11668 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11669 fold_convert_loc (loc
, type
, arg0
));
11672 /* See if this can be simplified into a rotate first. If that
11673 is unsuccessful continue in the association code. */
11677 if (integer_all_onesp (arg1
))
11678 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11679 if (integer_zerop (arg1
))
11680 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11681 if (operand_equal_p (arg0
, arg1
, 0))
11682 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11684 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11685 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11686 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11687 || (TREE_CODE (arg0
) == EQ_EXPR
11688 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11689 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11690 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11692 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11693 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11694 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11695 || (TREE_CODE (arg1
) == EQ_EXPR
11696 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11697 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11698 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11700 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11701 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11702 && TREE_CODE (arg1
) == INTEGER_CST
11703 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11705 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11706 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11707 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11708 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11709 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11711 fold_convert_loc (loc
, type
,
11712 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11713 type
, tmp2
, tmp3
));
11716 /* (X | Y) & Y is (X, Y). */
11717 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11718 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11719 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11720 /* (X | Y) & X is (Y, X). */
11721 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11722 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11723 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11724 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11725 /* X & (X | Y) is (Y, X). */
11726 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11727 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11728 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11729 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11730 /* X & (Y | X) is (Y, X). */
11731 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11732 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11733 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11734 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11736 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11737 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11738 && integer_onep (TREE_OPERAND (arg0
, 1))
11739 && integer_onep (arg1
))
11742 tem
= TREE_OPERAND (arg0
, 0);
11743 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11744 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11746 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11747 build_zero_cst (TREE_TYPE (tem
)));
11749 /* Fold ~X & 1 as (X & 1) == 0. */
11750 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11751 && integer_onep (arg1
))
11754 tem
= TREE_OPERAND (arg0
, 0);
11755 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11756 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11758 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11759 build_zero_cst (TREE_TYPE (tem
)));
11761 /* Fold !X & 1 as X == 0. */
11762 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11763 && integer_onep (arg1
))
11765 tem
= TREE_OPERAND (arg0
, 0);
11766 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11767 build_zero_cst (TREE_TYPE (tem
)));
11770 /* Fold (X ^ Y) & Y as ~X & Y. */
11771 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11772 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11774 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11775 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11776 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11777 fold_convert_loc (loc
, type
, arg1
));
11779 /* Fold (X ^ Y) & X as ~Y & X. */
11780 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11781 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11782 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11784 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11785 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11786 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11787 fold_convert_loc (loc
, type
, arg1
));
11789 /* Fold X & (X ^ Y) as X & ~Y. */
11790 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11791 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11793 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11794 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11795 fold_convert_loc (loc
, type
, arg0
),
11796 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11798 /* Fold X & (Y ^ X) as ~Y & X. */
11799 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11800 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11801 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11803 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11804 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11805 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11806 fold_convert_loc (loc
, type
, arg0
));
11809 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11810 multiple of 1 << CST. */
11811 if (TREE_CODE (arg1
) == INTEGER_CST
)
11813 wide_int cst1
= arg1
;
11814 wide_int ncst1
= -cst1
;
11815 if ((cst1
& ncst1
) == ncst1
11816 && multiple_of_p (type
, arg0
,
11817 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11818 return fold_convert_loc (loc
, type
, arg0
);
11821 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11823 if (TREE_CODE (arg1
) == INTEGER_CST
11824 && TREE_CODE (arg0
) == MULT_EXPR
11825 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11827 wide_int warg1
= arg1
;
11828 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11831 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11833 else if (masked
!= warg1
)
11835 /* Avoid the transform if arg1 is a mask of some
11836 mode which allows further optimizations. */
11837 int pop
= wi::popcount (warg1
);
11838 if (!(pop
>= BITS_PER_UNIT
11839 && exact_log2 (pop
) != -1
11840 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11841 return fold_build2_loc (loc
, code
, type
, op0
,
11842 wide_int_to_tree (type
, masked
));
11846 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11847 ((A & N) + B) & M -> (A + B) & M
11848 Similarly if (N & M) == 0,
11849 ((A | N) + B) & M -> (A + B) & M
11850 and for - instead of + (or unary - instead of +)
11851 and/or ^ instead of |.
11852 If B is constant and (B & M) == 0, fold into A & M. */
11853 if (TREE_CODE (arg1
) == INTEGER_CST
)
11855 wide_int cst1
= arg1
;
11856 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11857 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11858 && (TREE_CODE (arg0
) == PLUS_EXPR
11859 || TREE_CODE (arg0
) == MINUS_EXPR
11860 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11861 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11862 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11868 /* Now we know that arg0 is (C + D) or (C - D) or
11869 -C and arg1 (M) is == (1LL << cst) - 1.
11870 Store C into PMOP[0] and D into PMOP[1]. */
11871 pmop
[0] = TREE_OPERAND (arg0
, 0);
11873 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11875 pmop
[1] = TREE_OPERAND (arg0
, 1);
11879 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11882 for (; which
>= 0; which
--)
11883 switch (TREE_CODE (pmop
[which
]))
11888 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11891 cst0
= TREE_OPERAND (pmop
[which
], 1);
11893 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11898 else if (cst0
!= 0)
11900 /* If C or D is of the form (A & N) where
11901 (N & M) == M, or of the form (A | N) or
11902 (A ^ N) where (N & M) == 0, replace it with A. */
11903 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11906 /* If C or D is a N where (N & M) == 0, it can be
11907 omitted (assumed 0). */
11908 if ((TREE_CODE (arg0
) == PLUS_EXPR
11909 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11910 && (cst1
& pmop
[which
]) == 0)
11911 pmop
[which
] = NULL
;
11917 /* Only build anything new if we optimized one or both arguments
11919 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11920 || (TREE_CODE (arg0
) != NEGATE_EXPR
11921 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11923 tree utype
= TREE_TYPE (arg0
);
11924 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11926 /* Perform the operations in a type that has defined
11927 overflow behavior. */
11928 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11929 if (pmop
[0] != NULL
)
11930 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11931 if (pmop
[1] != NULL
)
11932 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11935 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11936 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11937 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11939 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11940 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11942 else if (pmop
[0] != NULL
)
11944 else if (pmop
[1] != NULL
)
11947 return build_int_cst (type
, 0);
11949 else if (pmop
[0] == NULL
)
11950 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11952 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11954 /* TEM is now the new binary +, - or unary - replacement. */
11955 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11956 fold_convert_loc (loc
, utype
, arg1
));
11957 return fold_convert_loc (loc
, type
, tem
);
11962 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11963 if (t1
!= NULL_TREE
)
11965 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11966 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11967 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11969 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11971 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11974 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11977 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11979 This results in more efficient code for machines without a NOR
11980 instruction. Combine will canonicalize to the first form
11981 which will allow use of NOR instructions provided by the
11982 backend if they exist. */
11983 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11984 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11986 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11987 build2 (BIT_IOR_EXPR
, type
,
11988 fold_convert_loc (loc
, type
,
11989 TREE_OPERAND (arg0
, 0)),
11990 fold_convert_loc (loc
, type
,
11991 TREE_OPERAND (arg1
, 0))));
11994 /* If arg0 is derived from the address of an object or function, we may
11995 be able to fold this expression using the object or function's
11997 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11999 unsigned HOST_WIDE_INT modulus
, residue
;
12000 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
12002 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
12003 integer_onep (arg1
));
12005 /* This works because modulus is a power of 2. If this weren't the
12006 case, we'd have to replace it by its greatest power-of-2
12007 divisor: modulus & -modulus. */
12009 return build_int_cst (type
, residue
& low
);
12012 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12013 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12014 if the new mask might be further optimized. */
12015 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
12016 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
12017 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
12018 && TREE_CODE (arg1
) == INTEGER_CST
12019 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12020 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
12021 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12022 < TYPE_PRECISION (TREE_TYPE (arg0
))))
12024 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12025 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
12026 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
12027 tree shift_type
= TREE_TYPE (arg0
);
12029 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
12030 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
12031 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
12032 && TYPE_PRECISION (TREE_TYPE (arg0
))
12033 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
12035 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
12036 tree arg00
= TREE_OPERAND (arg0
, 0);
12037 /* See if more bits can be proven as zero because of
12039 if (TREE_CODE (arg00
) == NOP_EXPR
12040 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
12042 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
12043 if (TYPE_PRECISION (inner_type
)
12044 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
12045 && TYPE_PRECISION (inner_type
) < prec
)
12047 prec
= TYPE_PRECISION (inner_type
);
12048 /* See if we can shorten the right shift. */
12050 shift_type
= inner_type
;
12051 /* Otherwise X >> C1 is all zeros, so we'll optimize
12052 it into (X, 0) later on by making sure zerobits
12056 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
12059 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
12060 zerobits
<<= prec
- shiftc
;
12062 /* For arithmetic shift if sign bit could be set, zerobits
12063 can contain actually sign bits, so no transformation is
12064 possible, unless MASK masks them all away. In that
12065 case the shift needs to be converted into logical shift. */
12066 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
12067 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
12069 if ((mask
& zerobits
) == 0)
12070 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
12076 /* ((X << 16) & 0xff00) is (X, 0). */
12077 if ((mask
& zerobits
) == mask
)
12078 return omit_one_operand_loc (loc
, type
,
12079 build_int_cst (type
, 0), arg0
);
12081 newmask
= mask
| zerobits
;
12082 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
12084 /* Only do the transformation if NEWMASK is some integer
12086 for (prec
= BITS_PER_UNIT
;
12087 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
12088 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
12090 if (prec
< HOST_BITS_PER_WIDE_INT
12091 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
12095 if (shift_type
!= TREE_TYPE (arg0
))
12097 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
12098 fold_convert_loc (loc
, shift_type
,
12099 TREE_OPERAND (arg0
, 0)),
12100 TREE_OPERAND (arg0
, 1));
12101 tem
= fold_convert_loc (loc
, type
, tem
);
12105 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
12106 if (!tree_int_cst_equal (newmaskt
, arg1
))
12107 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
12115 /* Don't touch a floating-point divide by zero unless the mode
12116 of the constant can represent infinity. */
12117 if (TREE_CODE (arg1
) == REAL_CST
12118 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
12119 && real_zerop (arg1
))
12122 /* Optimize A / A to 1.0 if we don't care about
12123 NaNs or Infinities. Skip the transformation
12124 for non-real operands. */
12125 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12126 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12127 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
12128 && operand_equal_p (arg0
, arg1
, 0))
12130 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
12132 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12135 /* The complex version of the above A / A optimization. */
12136 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12137 && operand_equal_p (arg0
, arg1
, 0))
12139 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
12140 if (! HONOR_NANS (TYPE_MODE (elem_type
))
12141 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
12143 tree r
= build_real (elem_type
, dconst1
);
12144 /* omit_two_operands will call fold_convert for us. */
12145 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12149 /* (-A) / (-B) -> A / B */
12150 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
12151 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12152 TREE_OPERAND (arg0
, 0),
12153 negate_expr (arg1
));
12154 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
12155 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12156 negate_expr (arg0
),
12157 TREE_OPERAND (arg1
, 0));
12159 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12160 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12161 && real_onep (arg1
))
12162 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12164 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12165 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12166 && real_minus_onep (arg1
))
12167 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
12168 negate_expr (arg0
)));
12170 /* If ARG1 is a constant, we can convert this to a multiply by the
12171 reciprocal. This does not have the same rounding properties,
12172 so only do this if -freciprocal-math. We can actually
12173 always safely do it if ARG1 is a power of two, but it's hard to
12174 tell if it is or not in a portable manner. */
12176 && (TREE_CODE (arg1
) == REAL_CST
12177 || (TREE_CODE (arg1
) == COMPLEX_CST
12178 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
12179 || (TREE_CODE (arg1
) == VECTOR_CST
12180 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
12182 if (flag_reciprocal_math
12183 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
12184 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
12185 /* Find the reciprocal if optimizing and the result is exact.
12186 TODO: Complex reciprocal not implemented. */
12187 if (TREE_CODE (arg1
) != COMPLEX_CST
)
12189 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
12192 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
12195 /* Convert A/B/C to A/(B*C). */
12196 if (flag_reciprocal_math
12197 && TREE_CODE (arg0
) == RDIV_EXPR
)
12198 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
12199 fold_build2_loc (loc
, MULT_EXPR
, type
,
12200 TREE_OPERAND (arg0
, 1), arg1
));
12202 /* Convert A/(B/C) to (A/B)*C. */
12203 if (flag_reciprocal_math
12204 && TREE_CODE (arg1
) == RDIV_EXPR
)
12205 return fold_build2_loc (loc
, MULT_EXPR
, type
,
12206 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
12207 TREE_OPERAND (arg1
, 0)),
12208 TREE_OPERAND (arg1
, 1));
12210 /* Convert C1/(X*C2) into (C1/C2)/X. */
12211 if (flag_reciprocal_math
12212 && TREE_CODE (arg1
) == MULT_EXPR
12213 && TREE_CODE (arg0
) == REAL_CST
12214 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12216 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12217 TREE_OPERAND (arg1
, 1));
12219 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12220 TREE_OPERAND (arg1
, 0));
12223 if (flag_unsafe_math_optimizations
)
12225 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12226 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12228 /* Optimize sin(x)/cos(x) as tan(x). */
12229 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12230 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12231 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12232 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12233 CALL_EXPR_ARG (arg1
, 0), 0))
12235 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12237 if (tanfn
!= NULL_TREE
)
12238 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12241 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12242 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12243 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12244 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12245 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12246 CALL_EXPR_ARG (arg1
, 0), 0))
12248 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12250 if (tanfn
!= NULL_TREE
)
12252 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12253 CALL_EXPR_ARG (arg0
, 0));
12254 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12255 build_real (type
, dconst1
), tmp
);
12259 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12260 NaNs or Infinities. */
12261 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12262 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12263 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12265 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12266 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12268 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12269 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12270 && operand_equal_p (arg00
, arg01
, 0))
12272 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12274 if (cosfn
!= NULL_TREE
)
12275 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12279 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12280 NaNs or Infinities. */
12281 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12282 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12283 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12285 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12286 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12288 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12289 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12290 && operand_equal_p (arg00
, arg01
, 0))
12292 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12294 if (cosfn
!= NULL_TREE
)
12296 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12297 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12298 build_real (type
, dconst1
),
12304 /* Optimize pow(x,c)/x as pow(x,c-1). */
12305 if (fcode0
== BUILT_IN_POW
12306 || fcode0
== BUILT_IN_POWF
12307 || fcode0
== BUILT_IN_POWL
)
12309 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12310 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12311 if (TREE_CODE (arg01
) == REAL_CST
12312 && !TREE_OVERFLOW (arg01
)
12313 && operand_equal_p (arg1
, arg00
, 0))
12315 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12319 c
= TREE_REAL_CST (arg01
);
12320 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12321 arg
= build_real (type
, c
);
12322 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12326 /* Optimize a/root(b/c) into a*root(c/b). */
12327 if (BUILTIN_ROOT_P (fcode1
))
12329 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12331 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12333 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12334 tree b
= TREE_OPERAND (rootarg
, 0);
12335 tree c
= TREE_OPERAND (rootarg
, 1);
12337 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12339 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12340 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12344 /* Optimize x/expN(y) into x*expN(-y). */
12345 if (BUILTIN_EXPONENT_P (fcode1
))
12347 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12348 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12349 arg1
= build_call_expr_loc (loc
,
12351 fold_convert_loc (loc
, type
, arg
));
12352 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12355 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12356 if (fcode1
== BUILT_IN_POW
12357 || fcode1
== BUILT_IN_POWF
12358 || fcode1
== BUILT_IN_POWL
)
12360 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12361 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12362 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12363 tree neg11
= fold_convert_loc (loc
, type
,
12364 negate_expr (arg11
));
12365 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12366 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12371 case TRUNC_DIV_EXPR
:
12372 /* Optimize (X & (-A)) / A where A is a power of 2,
12374 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12375 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12376 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12378 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12379 arg1
, TREE_OPERAND (arg0
, 1));
12380 if (sum
&& integer_zerop (sum
)) {
12381 tree pow2
= build_int_cst (integer_type_node
,
12382 wi::exact_log2 (arg1
));
12383 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12384 TREE_OPERAND (arg0
, 0), pow2
);
12390 case FLOOR_DIV_EXPR
:
12391 /* Simplify A / (B << N) where A and B are positive and B is
12392 a power of 2, to A >> (N + log2(B)). */
12393 strict_overflow_p
= false;
12394 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12395 && (TYPE_UNSIGNED (type
)
12396 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12398 tree sval
= TREE_OPERAND (arg1
, 0);
12399 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12401 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12402 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
12403 wi::exact_log2 (sval
));
12405 if (strict_overflow_p
)
12406 fold_overflow_warning (("assuming signed overflow does not "
12407 "occur when simplifying A / (B << N)"),
12408 WARN_STRICT_OVERFLOW_MISC
);
12410 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12412 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12413 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12417 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12418 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12419 if (INTEGRAL_TYPE_P (type
)
12420 && TYPE_UNSIGNED (type
)
12421 && code
== FLOOR_DIV_EXPR
)
12422 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12426 case ROUND_DIV_EXPR
:
12427 case CEIL_DIV_EXPR
:
12428 case EXACT_DIV_EXPR
:
12429 if (integer_onep (arg1
))
12430 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12431 if (integer_zerop (arg1
))
12433 /* X / -1 is -X. */
12434 if (!TYPE_UNSIGNED (type
)
12435 && TREE_CODE (arg1
) == INTEGER_CST
12436 && wi::eq_p (arg1
, -1))
12437 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12439 /* Convert -A / -B to A / B when the type is signed and overflow is
12441 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12442 && TREE_CODE (arg0
) == NEGATE_EXPR
12443 && negate_expr_p (arg1
))
12445 if (INTEGRAL_TYPE_P (type
))
12446 fold_overflow_warning (("assuming signed overflow does not occur "
12447 "when distributing negation across "
12449 WARN_STRICT_OVERFLOW_MISC
);
12450 return fold_build2_loc (loc
, code
, type
,
12451 fold_convert_loc (loc
, type
,
12452 TREE_OPERAND (arg0
, 0)),
12453 fold_convert_loc (loc
, type
,
12454 negate_expr (arg1
)));
12456 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12457 && TREE_CODE (arg1
) == NEGATE_EXPR
12458 && negate_expr_p (arg0
))
12460 if (INTEGRAL_TYPE_P (type
))
12461 fold_overflow_warning (("assuming signed overflow does not occur "
12462 "when distributing negation across "
12464 WARN_STRICT_OVERFLOW_MISC
);
12465 return fold_build2_loc (loc
, code
, type
,
12466 fold_convert_loc (loc
, type
,
12467 negate_expr (arg0
)),
12468 fold_convert_loc (loc
, type
,
12469 TREE_OPERAND (arg1
, 0)));
12472 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12473 operation, EXACT_DIV_EXPR.
12475 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12476 At one time others generated faster code, it's not clear if they do
12477 after the last round to changes to the DIV code in expmed.c. */
12478 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12479 && multiple_of_p (type
, arg0
, arg1
))
12480 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12482 strict_overflow_p
= false;
12483 if (TREE_CODE (arg1
) == INTEGER_CST
12484 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12485 &strict_overflow_p
)))
12487 if (strict_overflow_p
)
12488 fold_overflow_warning (("assuming signed overflow does not occur "
12489 "when simplifying division"),
12490 WARN_STRICT_OVERFLOW_MISC
);
12491 return fold_convert_loc (loc
, type
, tem
);
12496 case CEIL_MOD_EXPR
:
12497 case FLOOR_MOD_EXPR
:
12498 case ROUND_MOD_EXPR
:
12499 case TRUNC_MOD_EXPR
:
12500 /* X % 1 is always zero, but be sure to preserve any side
12502 if (integer_onep (arg1
))
12503 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12505 /* X % 0, return X % 0 unchanged so that we can get the
12506 proper warnings and errors. */
12507 if (integer_zerop (arg1
))
12510 /* 0 % X is always zero, but be sure to preserve any side
12511 effects in X. Place this after checking for X == 0. */
12512 if (integer_zerop (arg0
))
12513 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12515 /* X % -1 is zero. */
12516 if (!TYPE_UNSIGNED (type
)
12517 && TREE_CODE (arg1
) == INTEGER_CST
12518 && wi::eq_p (arg1
, -1))
12519 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12521 /* X % -C is the same as X % C. */
12522 if (code
== TRUNC_MOD_EXPR
12523 && TYPE_SIGN (type
) == SIGNED
12524 && TREE_CODE (arg1
) == INTEGER_CST
12525 && !TREE_OVERFLOW (arg1
)
12526 && wi::neg_p (arg1
)
12527 && !TYPE_OVERFLOW_TRAPS (type
)
12528 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12529 && !sign_bit_p (arg1
, arg1
))
12530 return fold_build2_loc (loc
, code
, type
,
12531 fold_convert_loc (loc
, type
, arg0
),
12532 fold_convert_loc (loc
, type
,
12533 negate_expr (arg1
)));
12535 /* X % -Y is the same as X % Y. */
12536 if (code
== TRUNC_MOD_EXPR
12537 && !TYPE_UNSIGNED (type
)
12538 && TREE_CODE (arg1
) == NEGATE_EXPR
12539 && !TYPE_OVERFLOW_TRAPS (type
))
12540 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12541 fold_convert_loc (loc
, type
,
12542 TREE_OPERAND (arg1
, 0)));
12544 strict_overflow_p
= false;
12545 if (TREE_CODE (arg1
) == INTEGER_CST
12546 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12547 &strict_overflow_p
)))
12549 if (strict_overflow_p
)
12550 fold_overflow_warning (("assuming signed overflow does not occur "
12551 "when simplifying modulus"),
12552 WARN_STRICT_OVERFLOW_MISC
);
12553 return fold_convert_loc (loc
, type
, tem
);
12556 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12557 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12558 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12559 && (TYPE_UNSIGNED (type
)
12560 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12563 /* Also optimize A % (C << N) where C is a power of 2,
12564 to A & ((C << N) - 1). */
12565 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12566 c
= TREE_OPERAND (arg1
, 0);
12568 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12571 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12572 build_int_cst (TREE_TYPE (arg1
), 1));
12573 if (strict_overflow_p
)
12574 fold_overflow_warning (("assuming signed overflow does not "
12575 "occur when simplifying "
12576 "X % (power of two)"),
12577 WARN_STRICT_OVERFLOW_MISC
);
12578 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12579 fold_convert_loc (loc
, type
, arg0
),
12580 fold_convert_loc (loc
, type
, mask
));
12588 if (integer_all_onesp (arg0
))
12589 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12593 /* Optimize -1 >> x for arithmetic right shifts. */
12594 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12595 && tree_expr_nonnegative_p (arg1
))
12596 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12597 /* ... fall through ... */
12601 if (integer_zerop (arg1
))
12602 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12603 if (integer_zerop (arg0
))
12604 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12606 /* Prefer vector1 << scalar to vector1 << vector2
12607 if vector2 is uniform. */
12608 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12609 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12610 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12612 /* Since negative shift count is not well-defined,
12613 don't try to compute it in the compiler. */
12614 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12617 prec
= element_precision (type
);
12619 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12620 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12621 && tree_to_uhwi (arg1
) < prec
12622 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12623 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12625 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12626 + tree_to_uhwi (arg1
));
12628 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12629 being well defined. */
12632 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12634 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12635 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12636 TREE_OPERAND (arg0
, 0));
12641 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12642 build_int_cst (TREE_TYPE (arg1
), low
));
12645 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12646 into x & ((unsigned)-1 >> c) for unsigned types. */
12647 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12648 || (TYPE_UNSIGNED (type
)
12649 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12650 && tree_fits_uhwi_p (arg1
)
12651 && tree_to_uhwi (arg1
) < prec
12652 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12653 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12655 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12656 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12662 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12664 lshift
= build_minus_one_cst (type
);
12665 lshift
= const_binop (code
, lshift
, arg1
);
12667 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12671 /* Rewrite an LROTATE_EXPR by a constant into an
12672 RROTATE_EXPR by a new constant. */
12673 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12675 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12676 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12677 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12680 /* If we have a rotate of a bit operation with the rotate count and
12681 the second operand of the bit operation both constant,
12682 permute the two operations. */
12683 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12684 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12685 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12686 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12687 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12688 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12689 fold_build2_loc (loc
, code
, type
,
12690 TREE_OPERAND (arg0
, 0), arg1
),
12691 fold_build2_loc (loc
, code
, type
,
12692 TREE_OPERAND (arg0
, 1), arg1
));
12694 /* Two consecutive rotates adding up to the some integer
12695 multiple of the precision of the type can be ignored. */
12696 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12697 && TREE_CODE (arg0
) == RROTATE_EXPR
12698 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12699 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12701 return TREE_OPERAND (arg0
, 0);
12703 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12704 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12705 if the latter can be further optimized. */
12706 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12707 && TREE_CODE (arg0
) == BIT_AND_EXPR
12708 && TREE_CODE (arg1
) == INTEGER_CST
12709 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12711 tree mask
= fold_build2_loc (loc
, code
, type
,
12712 fold_convert_loc (loc
, type
,
12713 TREE_OPERAND (arg0
, 1)),
12715 tree shift
= fold_build2_loc (loc
, code
, type
,
12716 fold_convert_loc (loc
, type
,
12717 TREE_OPERAND (arg0
, 0)),
12719 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12727 if (operand_equal_p (arg0
, arg1
, 0))
12728 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12729 if (INTEGRAL_TYPE_P (type
)
12730 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12731 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12732 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12738 if (operand_equal_p (arg0
, arg1
, 0))
12739 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12740 if (INTEGRAL_TYPE_P (type
)
12741 && TYPE_MAX_VALUE (type
)
12742 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12743 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12744 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12749 case TRUTH_ANDIF_EXPR
:
12750 /* Note that the operands of this must be ints
12751 and their values must be 0 or 1.
12752 ("true" is a fixed value perhaps depending on the language.) */
12753 /* If first arg is constant zero, return it. */
12754 if (integer_zerop (arg0
))
12755 return fold_convert_loc (loc
, type
, arg0
);
12756 case TRUTH_AND_EXPR
:
12757 /* If either arg is constant true, drop it. */
12758 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12759 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12760 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12761 /* Preserve sequence points. */
12762 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12763 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12764 /* If second arg is constant zero, result is zero, but first arg
12765 must be evaluated. */
12766 if (integer_zerop (arg1
))
12767 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12768 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12769 case will be handled here. */
12770 if (integer_zerop (arg0
))
12771 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12773 /* !X && X is always false. */
12774 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12775 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12776 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12777 /* X && !X is always false. */
12778 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12779 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12780 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12782 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12783 means A >= Y && A != MAX, but in this case we know that
12786 if (!TREE_SIDE_EFFECTS (arg0
)
12787 && !TREE_SIDE_EFFECTS (arg1
))
12789 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12790 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12791 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12793 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12794 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12795 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12798 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12804 case TRUTH_ORIF_EXPR
:
12805 /* Note that the operands of this must be ints
12806 and their values must be 0 or true.
12807 ("true" is a fixed value perhaps depending on the language.) */
12808 /* If first arg is constant true, return it. */
12809 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12810 return fold_convert_loc (loc
, type
, arg0
);
12811 case TRUTH_OR_EXPR
:
12812 /* If either arg is constant zero, drop it. */
12813 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12814 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12815 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12816 /* Preserve sequence points. */
12817 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12818 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12819 /* If second arg is constant true, result is true, but we must
12820 evaluate first arg. */
12821 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12822 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12823 /* Likewise for first arg, but note this only occurs here for
12825 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12826 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12828 /* !X || X is always true. */
12829 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12830 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12831 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12832 /* X || !X is always true. */
12833 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12834 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12835 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12837 /* (X && !Y) || (!X && Y) is X ^ Y */
12838 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12839 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12841 tree a0
, a1
, l0
, l1
, n0
, n1
;
12843 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12844 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12846 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12847 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12849 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12850 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12852 if ((operand_equal_p (n0
, a0
, 0)
12853 && operand_equal_p (n1
, a1
, 0))
12854 || (operand_equal_p (n0
, a1
, 0)
12855 && operand_equal_p (n1
, a0
, 0)))
12856 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12859 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12865 case TRUTH_XOR_EXPR
:
12866 /* If the second arg is constant zero, drop it. */
12867 if (integer_zerop (arg1
))
12868 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12869 /* If the second arg is constant true, this is a logical inversion. */
12870 if (integer_onep (arg1
))
12872 tem
= invert_truthvalue_loc (loc
, arg0
);
12873 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12875 /* Identical arguments cancel to zero. */
12876 if (operand_equal_p (arg0
, arg1
, 0))
12877 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12879 /* !X ^ X is always true. */
12880 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12881 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12882 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12884 /* X ^ !X is always true. */
12885 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12886 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12887 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12896 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12897 if (tem
!= NULL_TREE
)
12900 /* bool_var != 0 becomes bool_var. */
12901 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12902 && code
== NE_EXPR
)
12903 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12905 /* bool_var == 1 becomes bool_var. */
12906 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12907 && code
== EQ_EXPR
)
12908 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12910 /* bool_var != 1 becomes !bool_var. */
12911 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12912 && code
== NE_EXPR
)
12913 return fold_convert_loc (loc
, type
,
12914 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12915 TREE_TYPE (arg0
), arg0
));
12917 /* bool_var == 0 becomes !bool_var. */
12918 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12919 && code
== EQ_EXPR
)
12920 return fold_convert_loc (loc
, type
,
12921 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12922 TREE_TYPE (arg0
), arg0
));
12924 /* !exp != 0 becomes !exp */
12925 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12926 && code
== NE_EXPR
)
12927 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12929 /* If this is an equality comparison of the address of two non-weak,
12930 unaliased symbols neither of which are extern (since we do not
12931 have access to attributes for externs), then we know the result. */
12932 if (TREE_CODE (arg0
) == ADDR_EXPR
12933 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12934 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12935 && ! lookup_attribute ("alias",
12936 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12937 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12938 && TREE_CODE (arg1
) == ADDR_EXPR
12939 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12940 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12941 && ! lookup_attribute ("alias",
12942 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12943 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12945 /* We know that we're looking at the address of two
12946 non-weak, unaliased, static _DECL nodes.
12948 It is both wasteful and incorrect to call operand_equal_p
12949 to compare the two ADDR_EXPR nodes. It is wasteful in that
12950 all we need to do is test pointer equality for the arguments
12951 to the two ADDR_EXPR nodes. It is incorrect to use
12952 operand_equal_p as that function is NOT equivalent to a
12953 C equality test. It can in fact return false for two
12954 objects which would test as equal using the C equality
12956 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12957 return constant_boolean_node (equal
12958 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12962 /* Similarly for a NEGATE_EXPR. */
12963 if (TREE_CODE (arg0
) == NEGATE_EXPR
12964 && TREE_CODE (arg1
) == INTEGER_CST
12965 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12967 && TREE_CODE (tem
) == INTEGER_CST
12968 && !TREE_OVERFLOW (tem
))
12969 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12971 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12972 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12973 && TREE_CODE (arg1
) == INTEGER_CST
12974 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12975 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12976 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12977 fold_convert_loc (loc
,
12980 TREE_OPERAND (arg0
, 1)));
12982 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12983 if ((TREE_CODE (arg0
) == PLUS_EXPR
12984 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12985 || TREE_CODE (arg0
) == MINUS_EXPR
)
12986 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12989 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12990 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12992 tree val
= TREE_OPERAND (arg0
, 1);
12993 return omit_two_operands_loc (loc
, type
,
12994 fold_build2_loc (loc
, code
, type
,
12996 build_int_cst (TREE_TYPE (val
),
12998 TREE_OPERAND (arg0
, 0), arg1
);
13001 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13002 if (TREE_CODE (arg0
) == MINUS_EXPR
13003 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
13004 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13007 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
13009 return omit_two_operands_loc (loc
, type
,
13011 ? boolean_true_node
: boolean_false_node
,
13012 TREE_OPERAND (arg0
, 1), arg1
);
13015 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13016 if (TREE_CODE (arg0
) == ABS_EXPR
13017 && (integer_zerop (arg1
) || real_zerop (arg1
)))
13018 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
13020 /* If this is an EQ or NE comparison with zero and ARG0 is
13021 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13022 two operations, but the latter can be done in one less insn
13023 on machines that have only two-operand insns or on which a
13024 constant cannot be the first operand. */
13025 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13026 && integer_zerop (arg1
))
13028 tree arg00
= TREE_OPERAND (arg0
, 0);
13029 tree arg01
= TREE_OPERAND (arg0
, 1);
13030 if (TREE_CODE (arg00
) == LSHIFT_EXPR
13031 && integer_onep (TREE_OPERAND (arg00
, 0)))
13033 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
13034 arg01
, TREE_OPERAND (arg00
, 1));
13035 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13036 build_int_cst (TREE_TYPE (arg0
), 1));
13037 return fold_build2_loc (loc
, code
, type
,
13038 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13041 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
13042 && integer_onep (TREE_OPERAND (arg01
, 0)))
13044 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
13045 arg00
, TREE_OPERAND (arg01
, 1));
13046 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13047 build_int_cst (TREE_TYPE (arg0
), 1));
13048 return fold_build2_loc (loc
, code
, type
,
13049 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13054 /* If this is an NE or EQ comparison of zero against the result of a
13055 signed MOD operation whose second operand is a power of 2, make
13056 the MOD operation unsigned since it is simpler and equivalent. */
13057 if (integer_zerop (arg1
)
13058 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
13059 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
13060 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
13061 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
13062 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
13063 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13065 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
13066 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
13067 fold_convert_loc (loc
, newtype
,
13068 TREE_OPERAND (arg0
, 0)),
13069 fold_convert_loc (loc
, newtype
,
13070 TREE_OPERAND (arg0
, 1)));
13072 return fold_build2_loc (loc
, code
, type
, newmod
,
13073 fold_convert_loc (loc
, newtype
, arg1
));
13076 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13077 C1 is a valid shift constant, and C2 is a power of two, i.e.
13079 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13080 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
13081 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
13083 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13084 && integer_zerop (arg1
))
13086 tree itype
= TREE_TYPE (arg0
);
13087 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
13088 prec
= TYPE_PRECISION (itype
);
13090 /* Check for a valid shift count. */
13091 if (wi::ltu_p (arg001
, prec
))
13093 tree arg01
= TREE_OPERAND (arg0
, 1);
13094 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13095 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
13096 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13097 can be rewritten as (X & (C2 << C1)) != 0. */
13098 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
13100 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
13101 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
13102 return fold_build2_loc (loc
, code
, type
, tem
,
13103 fold_convert_loc (loc
, itype
, arg1
));
13105 /* Otherwise, for signed (arithmetic) shifts,
13106 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13107 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13108 else if (!TYPE_UNSIGNED (itype
))
13109 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
13110 arg000
, build_int_cst (itype
, 0));
13111 /* Otherwise, of unsigned (logical) shifts,
13112 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13113 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13115 return omit_one_operand_loc (loc
, type
,
13116 code
== EQ_EXPR
? integer_one_node
13117 : integer_zero_node
,
13122 /* If we have (A & C) == C where C is a power of 2, convert this into
13123 (A & C) != 0. Similarly for NE_EXPR. */
13124 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13125 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13126 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13127 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13128 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
13129 integer_zero_node
));
13131 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13132 bit, then fold the expression into A < 0 or A >= 0. */
13133 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
13137 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13138 Similarly for NE_EXPR. */
13139 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13140 && TREE_CODE (arg1
) == INTEGER_CST
13141 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13143 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
13144 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
13145 TREE_OPERAND (arg0
, 1));
13147 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13148 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
13150 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13151 if (integer_nonzerop (dandnotc
))
13152 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13155 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13156 Similarly for NE_EXPR. */
13157 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
13158 && TREE_CODE (arg1
) == INTEGER_CST
13159 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13161 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
13163 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13164 TREE_OPERAND (arg0
, 1),
13165 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
13166 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13167 if (integer_nonzerop (candnotd
))
13168 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13171 /* If this is a comparison of a field, we may be able to simplify it. */
13172 if ((TREE_CODE (arg0
) == COMPONENT_REF
13173 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13174 /* Handle the constant case even without -O
13175 to make sure the warnings are given. */
13176 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13178 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13183 /* Optimize comparisons of strlen vs zero to a compare of the
13184 first character of the string vs zero. To wit,
13185 strlen(ptr) == 0 => *ptr == 0
13186 strlen(ptr) != 0 => *ptr != 0
13187 Other cases should reduce to one of these two (or a constant)
13188 due to the return value of strlen being unsigned. */
13189 if (TREE_CODE (arg0
) == CALL_EXPR
13190 && integer_zerop (arg1
))
13192 tree fndecl
= get_callee_fndecl (arg0
);
13195 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13196 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13197 && call_expr_nargs (arg0
) == 1
13198 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13200 tree iref
= build_fold_indirect_ref_loc (loc
,
13201 CALL_EXPR_ARG (arg0
, 0));
13202 return fold_build2_loc (loc
, code
, type
, iref
,
13203 build_int_cst (TREE_TYPE (iref
), 0));
13207 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13208 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13209 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13210 && integer_zerop (arg1
)
13211 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13213 tree arg00
= TREE_OPERAND (arg0
, 0);
13214 tree arg01
= TREE_OPERAND (arg0
, 1);
13215 tree itype
= TREE_TYPE (arg00
);
13216 if (wi::eq_p (arg01
, TYPE_PRECISION (itype
) - 1))
13218 if (TYPE_UNSIGNED (itype
))
13220 itype
= signed_type_for (itype
);
13221 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13223 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13224 type
, arg00
, build_zero_cst (itype
));
13228 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13229 if (integer_zerop (arg1
)
13230 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13231 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13232 TREE_OPERAND (arg0
, 1));
13234 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13235 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13236 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13237 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13238 build_zero_cst (TREE_TYPE (arg0
)));
13239 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13240 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13241 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13242 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13243 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13244 build_zero_cst (TREE_TYPE (arg0
)));
13246 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13247 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13248 && TREE_CODE (arg1
) == INTEGER_CST
13249 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13250 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13251 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13252 TREE_OPERAND (arg0
, 1), arg1
));
13254 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13255 (X & C) == 0 when C is a single bit. */
13256 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13257 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13258 && integer_zerop (arg1
)
13259 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13261 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13262 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13263 TREE_OPERAND (arg0
, 1));
13264 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13266 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13270 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13271 constant C is a power of two, i.e. a single bit. */
13272 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13273 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13274 && integer_zerop (arg1
)
13275 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13276 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13277 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13279 tree arg00
= TREE_OPERAND (arg0
, 0);
13280 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13281 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13284 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13285 when is C is a power of two, i.e. a single bit. */
13286 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13287 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13288 && integer_zerop (arg1
)
13289 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13290 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13291 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13293 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13294 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13295 arg000
, TREE_OPERAND (arg0
, 1));
13296 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13297 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13300 if (integer_zerop (arg1
)
13301 && tree_expr_nonzero_p (arg0
))
13303 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13304 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13307 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13308 if (TREE_CODE (arg0
) == NEGATE_EXPR
13309 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13310 return fold_build2_loc (loc
, code
, type
,
13311 TREE_OPERAND (arg0
, 0),
13312 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13313 TREE_OPERAND (arg1
, 0)));
13315 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13316 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13317 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13319 tree arg00
= TREE_OPERAND (arg0
, 0);
13320 tree arg01
= TREE_OPERAND (arg0
, 1);
13321 tree arg10
= TREE_OPERAND (arg1
, 0);
13322 tree arg11
= TREE_OPERAND (arg1
, 1);
13323 tree itype
= TREE_TYPE (arg0
);
13325 if (operand_equal_p (arg01
, arg11
, 0))
13326 return fold_build2_loc (loc
, code
, type
,
13327 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13328 fold_build2_loc (loc
,
13329 BIT_XOR_EXPR
, itype
,
13332 build_zero_cst (itype
));
13334 if (operand_equal_p (arg01
, arg10
, 0))
13335 return fold_build2_loc (loc
, code
, type
,
13336 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13337 fold_build2_loc (loc
,
13338 BIT_XOR_EXPR
, itype
,
13341 build_zero_cst (itype
));
13343 if (operand_equal_p (arg00
, arg11
, 0))
13344 return fold_build2_loc (loc
, code
, type
,
13345 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13346 fold_build2_loc (loc
,
13347 BIT_XOR_EXPR
, itype
,
13350 build_zero_cst (itype
));
13352 if (operand_equal_p (arg00
, arg10
, 0))
13353 return fold_build2_loc (loc
, code
, type
,
13354 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13355 fold_build2_loc (loc
,
13356 BIT_XOR_EXPR
, itype
,
13359 build_zero_cst (itype
));
13362 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13363 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13365 tree arg00
= TREE_OPERAND (arg0
, 0);
13366 tree arg01
= TREE_OPERAND (arg0
, 1);
13367 tree arg10
= TREE_OPERAND (arg1
, 0);
13368 tree arg11
= TREE_OPERAND (arg1
, 1);
13369 tree itype
= TREE_TYPE (arg0
);
13371 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13372 operand_equal_p guarantees no side-effects so we don't need
13373 to use omit_one_operand on Z. */
13374 if (operand_equal_p (arg01
, arg11
, 0))
13375 return fold_build2_loc (loc
, code
, type
, arg00
,
13376 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13378 if (operand_equal_p (arg01
, arg10
, 0))
13379 return fold_build2_loc (loc
, code
, type
, arg00
,
13380 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13382 if (operand_equal_p (arg00
, arg11
, 0))
13383 return fold_build2_loc (loc
, code
, type
, arg01
,
13384 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13386 if (operand_equal_p (arg00
, arg10
, 0))
13387 return fold_build2_loc (loc
, code
, type
, arg01
,
13388 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13391 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13392 if (TREE_CODE (arg01
) == INTEGER_CST
13393 && TREE_CODE (arg11
) == INTEGER_CST
)
13395 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13396 fold_convert_loc (loc
, itype
, arg11
));
13397 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13398 return fold_build2_loc (loc
, code
, type
, tem
,
13399 fold_convert_loc (loc
, itype
, arg10
));
13403 /* Attempt to simplify equality/inequality comparisons of complex
13404 values. Only lower the comparison if the result is known or
13405 can be simplified to a single scalar comparison. */
13406 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13407 || TREE_CODE (arg0
) == COMPLEX_CST
)
13408 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13409 || TREE_CODE (arg1
) == COMPLEX_CST
))
13411 tree real0
, imag0
, real1
, imag1
;
13414 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13416 real0
= TREE_OPERAND (arg0
, 0);
13417 imag0
= TREE_OPERAND (arg0
, 1);
13421 real0
= TREE_REALPART (arg0
);
13422 imag0
= TREE_IMAGPART (arg0
);
13425 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13427 real1
= TREE_OPERAND (arg1
, 0);
13428 imag1
= TREE_OPERAND (arg1
, 1);
13432 real1
= TREE_REALPART (arg1
);
13433 imag1
= TREE_IMAGPART (arg1
);
13436 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13437 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13439 if (integer_zerop (rcond
))
13441 if (code
== EQ_EXPR
)
13442 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13444 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13448 if (code
== NE_EXPR
)
13449 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13451 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13455 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13456 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13458 if (integer_zerop (icond
))
13460 if (code
== EQ_EXPR
)
13461 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13463 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13467 if (code
== NE_EXPR
)
13468 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13470 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13481 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13482 if (tem
!= NULL_TREE
)
13485 /* Transform comparisons of the form X +- C CMP X. */
13486 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13487 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13488 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13489 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13490 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13491 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13493 tree arg01
= TREE_OPERAND (arg0
, 1);
13494 enum tree_code code0
= TREE_CODE (arg0
);
13497 if (TREE_CODE (arg01
) == REAL_CST
)
13498 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13500 is_positive
= tree_int_cst_sgn (arg01
);
13502 /* (X - c) > X becomes false. */
13503 if (code
== GT_EXPR
13504 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13505 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13507 if (TREE_CODE (arg01
) == INTEGER_CST
13508 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13509 fold_overflow_warning (("assuming signed overflow does not "
13510 "occur when assuming that (X - c) > X "
13511 "is always false"),
13512 WARN_STRICT_OVERFLOW_ALL
);
13513 return constant_boolean_node (0, type
);
13516 /* Likewise (X + c) < X becomes false. */
13517 if (code
== LT_EXPR
13518 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13519 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13521 if (TREE_CODE (arg01
) == INTEGER_CST
13522 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13523 fold_overflow_warning (("assuming signed overflow does not "
13524 "occur when assuming that "
13525 "(X + c) < X is always false"),
13526 WARN_STRICT_OVERFLOW_ALL
);
13527 return constant_boolean_node (0, type
);
13530 /* Convert (X - c) <= X to true. */
13531 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13533 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13534 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13536 if (TREE_CODE (arg01
) == INTEGER_CST
13537 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13538 fold_overflow_warning (("assuming signed overflow does not "
13539 "occur when assuming that "
13540 "(X - c) <= X is always true"),
13541 WARN_STRICT_OVERFLOW_ALL
);
13542 return constant_boolean_node (1, type
);
13545 /* Convert (X + c) >= X to true. */
13546 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13548 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13549 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13551 if (TREE_CODE (arg01
) == INTEGER_CST
13552 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13553 fold_overflow_warning (("assuming signed overflow does not "
13554 "occur when assuming that "
13555 "(X + c) >= X is always true"),
13556 WARN_STRICT_OVERFLOW_ALL
);
13557 return constant_boolean_node (1, type
);
13560 if (TREE_CODE (arg01
) == INTEGER_CST
)
13562 /* Convert X + c > X and X - c < X to true for integers. */
13563 if (code
== GT_EXPR
13564 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13565 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13567 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13568 fold_overflow_warning (("assuming signed overflow does "
13569 "not occur when assuming that "
13570 "(X + c) > X is always true"),
13571 WARN_STRICT_OVERFLOW_ALL
);
13572 return constant_boolean_node (1, type
);
13575 if (code
== LT_EXPR
13576 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13577 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13579 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13580 fold_overflow_warning (("assuming signed overflow does "
13581 "not occur when assuming that "
13582 "(X - c) < X is always true"),
13583 WARN_STRICT_OVERFLOW_ALL
);
13584 return constant_boolean_node (1, type
);
13587 /* Convert X + c <= X and X - c >= X to false for integers. */
13588 if (code
== LE_EXPR
13589 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13590 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13592 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13593 fold_overflow_warning (("assuming signed overflow does "
13594 "not occur when assuming that "
13595 "(X + c) <= X is always false"),
13596 WARN_STRICT_OVERFLOW_ALL
);
13597 return constant_boolean_node (0, type
);
13600 if (code
== GE_EXPR
13601 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13602 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13604 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13605 fold_overflow_warning (("assuming signed overflow does "
13606 "not occur when assuming that "
13607 "(X - c) >= X is always false"),
13608 WARN_STRICT_OVERFLOW_ALL
);
13609 return constant_boolean_node (0, type
);
13614 /* Comparisons with the highest or lowest possible integer of
13615 the specified precision will have known values. */
13617 tree arg1_type
= TREE_TYPE (arg1
);
13618 unsigned int prec
= TYPE_PRECISION (arg1_type
);
13620 if (TREE_CODE (arg1
) == INTEGER_CST
13621 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13623 wide_int max
= wi::max_value (arg1_type
);
13624 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
13625 wide_int min
= wi::min_value (arg1_type
);
13627 if (wi::eq_p (arg1
, max
))
13631 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13634 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13637 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13640 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13642 /* The GE_EXPR and LT_EXPR cases above are not normally
13643 reached because of previous transformations. */
13648 else if (wi::eq_p (arg1
, max
- 1))
13652 arg1
= const_binop (PLUS_EXPR
, arg1
,
13653 build_int_cst (TREE_TYPE (arg1
), 1));
13654 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13655 fold_convert_loc (loc
,
13656 TREE_TYPE (arg1
), arg0
),
13659 arg1
= const_binop (PLUS_EXPR
, arg1
,
13660 build_int_cst (TREE_TYPE (arg1
), 1));
13661 return fold_build2_loc (loc
, NE_EXPR
, type
,
13662 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13668 else if (wi::eq_p (arg1
, min
))
13672 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13675 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13678 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13681 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13686 else if (wi::eq_p (arg1
, min
+ 1))
13690 arg1
= const_binop (MINUS_EXPR
, arg1
,
13691 build_int_cst (TREE_TYPE (arg1
), 1));
13692 return fold_build2_loc (loc
, NE_EXPR
, type
,
13693 fold_convert_loc (loc
,
13694 TREE_TYPE (arg1
), arg0
),
13697 arg1
= const_binop (MINUS_EXPR
, arg1
,
13698 build_int_cst (TREE_TYPE (arg1
), 1));
13699 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13700 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13707 else if (wi::eq_p (arg1
, signed_max
)
13708 && TYPE_UNSIGNED (arg1_type
)
13709 /* We will flip the signedness of the comparison operator
13710 associated with the mode of arg1, so the sign bit is
13711 specified by this mode. Check that arg1 is the signed
13712 max associated with this sign bit. */
13713 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13714 /* signed_type does not work on pointer types. */
13715 && INTEGRAL_TYPE_P (arg1_type
))
13717 /* The following case also applies to X < signed_max+1
13718 and X >= signed_max+1 because previous transformations. */
13719 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13721 tree st
= signed_type_for (arg1_type
);
13722 return fold_build2_loc (loc
,
13723 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13724 type
, fold_convert_loc (loc
, st
, arg0
),
13725 build_int_cst (st
, 0));
13731 /* If we are comparing an ABS_EXPR with a constant, we can
13732 convert all the cases into explicit comparisons, but they may
13733 well not be faster than doing the ABS and one comparison.
13734 But ABS (X) <= C is a range comparison, which becomes a subtraction
13735 and a comparison, and is probably faster. */
13736 if (code
== LE_EXPR
13737 && TREE_CODE (arg1
) == INTEGER_CST
13738 && TREE_CODE (arg0
) == ABS_EXPR
13739 && ! TREE_SIDE_EFFECTS (arg0
)
13740 && (0 != (tem
= negate_expr (arg1
)))
13741 && TREE_CODE (tem
) == INTEGER_CST
13742 && !TREE_OVERFLOW (tem
))
13743 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13744 build2 (GE_EXPR
, type
,
13745 TREE_OPERAND (arg0
, 0), tem
),
13746 build2 (LE_EXPR
, type
,
13747 TREE_OPERAND (arg0
, 0), arg1
));
13749 /* Convert ABS_EXPR<x> >= 0 to true. */
13750 strict_overflow_p
= false;
13751 if (code
== GE_EXPR
13752 && (integer_zerop (arg1
)
13753 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13754 && real_zerop (arg1
)))
13755 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13757 if (strict_overflow_p
)
13758 fold_overflow_warning (("assuming signed overflow does not occur "
13759 "when simplifying comparison of "
13760 "absolute value and zero"),
13761 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13762 return omit_one_operand_loc (loc
, type
,
13763 constant_boolean_node (true, type
),
13767 /* Convert ABS_EXPR<x> < 0 to false. */
13768 strict_overflow_p
= false;
13769 if (code
== LT_EXPR
13770 && (integer_zerop (arg1
) || real_zerop (arg1
))
13771 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13773 if (strict_overflow_p
)
13774 fold_overflow_warning (("assuming signed overflow does not occur "
13775 "when simplifying comparison of "
13776 "absolute value and zero"),
13777 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13778 return omit_one_operand_loc (loc
, type
,
13779 constant_boolean_node (false, type
),
13783 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13784 and similarly for >= into !=. */
13785 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13786 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13787 && TREE_CODE (arg1
) == LSHIFT_EXPR
13788 && integer_onep (TREE_OPERAND (arg1
, 0)))
13789 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13790 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13791 TREE_OPERAND (arg1
, 1)),
13792 build_zero_cst (TREE_TYPE (arg0
)));
13794 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13795 otherwise Y might be >= # of bits in X's type and thus e.g.
13796 (unsigned char) (1 << Y) for Y 15 might be 0.
13797 If the cast is widening, then 1 << Y should have unsigned type,
13798 otherwise if Y is number of bits in the signed shift type minus 1,
13799 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13800 31 might be 0xffffffff80000000. */
13801 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13802 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13803 && CONVERT_EXPR_P (arg1
)
13804 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13805 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13806 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13807 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13808 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13809 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13810 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13812 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13813 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13814 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13815 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13816 build_zero_cst (TREE_TYPE (arg0
)));
13821 case UNORDERED_EXPR
:
13829 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13831 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13832 if (t1
!= NULL_TREE
)
13836 /* If the first operand is NaN, the result is constant. */
13837 if (TREE_CODE (arg0
) == REAL_CST
13838 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13839 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13841 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13842 ? integer_zero_node
13843 : integer_one_node
;
13844 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13847 /* If the second operand is NaN, the result is constant. */
13848 if (TREE_CODE (arg1
) == REAL_CST
13849 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13850 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13852 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13853 ? integer_zero_node
13854 : integer_one_node
;
13855 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13858 /* Simplify unordered comparison of something with itself. */
13859 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13860 && operand_equal_p (arg0
, arg1
, 0))
13861 return constant_boolean_node (1, type
);
13863 if (code
== LTGT_EXPR
13864 && !flag_trapping_math
13865 && operand_equal_p (arg0
, arg1
, 0))
13866 return constant_boolean_node (0, type
);
13868 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13870 tree targ0
= strip_float_extensions (arg0
);
13871 tree targ1
= strip_float_extensions (arg1
);
13872 tree newtype
= TREE_TYPE (targ0
);
13874 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13875 newtype
= TREE_TYPE (targ1
);
13877 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13878 return fold_build2_loc (loc
, code
, type
,
13879 fold_convert_loc (loc
, newtype
, targ0
),
13880 fold_convert_loc (loc
, newtype
, targ1
));
13885 case COMPOUND_EXPR
:
13886 /* When pedantic, a compound expression can be neither an lvalue
13887 nor an integer constant expression. */
13888 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13890 /* Don't let (0, 0) be null pointer constant. */
13891 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13892 : fold_convert_loc (loc
, type
, arg1
);
13893 return pedantic_non_lvalue_loc (loc
, tem
);
13896 if ((TREE_CODE (arg0
) == REAL_CST
13897 && TREE_CODE (arg1
) == REAL_CST
)
13898 || (TREE_CODE (arg0
) == INTEGER_CST
13899 && TREE_CODE (arg1
) == INTEGER_CST
))
13900 return build_complex (type
, arg0
, arg1
);
13901 if (TREE_CODE (arg0
) == REALPART_EXPR
13902 && TREE_CODE (arg1
) == IMAGPART_EXPR
13903 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13904 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13905 TREE_OPERAND (arg1
, 0), 0))
13906 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13907 TREE_OPERAND (arg1
, 0));
13911 /* An ASSERT_EXPR should never be passed to fold_binary. */
13912 gcc_unreachable ();
13914 case VEC_PACK_TRUNC_EXPR
:
13915 case VEC_PACK_FIX_TRUNC_EXPR
:
13917 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13920 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13921 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13922 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13925 elts
= XALLOCAVEC (tree
, nelts
);
13926 if (!vec_cst_ctor_to_array (arg0
, elts
)
13927 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13930 for (i
= 0; i
< nelts
; i
++)
13932 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13933 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13934 TREE_TYPE (type
), elts
[i
]);
13935 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13939 return build_vector (type
, elts
);
13942 case VEC_WIDEN_MULT_LO_EXPR
:
13943 case VEC_WIDEN_MULT_HI_EXPR
:
13944 case VEC_WIDEN_MULT_EVEN_EXPR
:
13945 case VEC_WIDEN_MULT_ODD_EXPR
:
13947 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13948 unsigned int out
, ofs
, scale
;
13951 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13952 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13953 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13956 elts
= XALLOCAVEC (tree
, nelts
* 4);
13957 if (!vec_cst_ctor_to_array (arg0
, elts
)
13958 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13961 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13962 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13963 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13964 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13965 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13966 scale
= 1, ofs
= 0;
13967 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13968 scale
= 1, ofs
= 1;
13970 for (out
= 0; out
< nelts
; out
++)
13972 unsigned int in1
= (out
<< scale
) + ofs
;
13973 unsigned int in2
= in1
+ nelts
* 2;
13976 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13977 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13979 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13981 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13982 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13986 return build_vector (type
, elts
);
13991 } /* switch (code) */
13994 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13995 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13999 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
14001 switch (TREE_CODE (*tp
))
14007 *walk_subtrees
= 0;
14009 /* ... fall through ... */
14016 /* Return whether the sub-tree ST contains a label which is accessible from
14017 outside the sub-tree. */
14020 contains_label_p (tree st
)
14023 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
14026 /* Fold a ternary expression of code CODE and type TYPE with operands
14027 OP0, OP1, and OP2. Return the folded expression if folding is
14028 successful. Otherwise, return NULL_TREE. */
14031 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
14032 tree op0
, tree op1
, tree op2
)
14035 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
14036 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14038 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
14039 && TREE_CODE_LENGTH (code
) == 3);
14041 /* Strip any conversions that don't change the mode. This is safe
14042 for every expression, except for a comparison expression because
14043 its signedness is derived from its operands. So, in the latter
14044 case, only strip conversions that don't change the signedness.
14046 Note that this is done as an internal manipulation within the
14047 constant folder, in order to find the simplest representation of
14048 the arguments so that their form can be studied. In any cases,
14049 the appropriate type conversions should be put back in the tree
14050 that will get out of the constant folder. */
14071 case COMPONENT_REF
:
14072 if (TREE_CODE (arg0
) == CONSTRUCTOR
14073 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
14075 unsigned HOST_WIDE_INT idx
;
14077 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
14084 case VEC_COND_EXPR
:
14085 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14086 so all simple results must be passed through pedantic_non_lvalue. */
14087 if (TREE_CODE (arg0
) == INTEGER_CST
)
14089 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
14090 tem
= integer_zerop (arg0
) ? op2
: op1
;
14091 /* Only optimize constant conditions when the selected branch
14092 has the same type as the COND_EXPR. This avoids optimizing
14093 away "c ? x : throw", where the throw has a void type.
14094 Avoid throwing away that operand which contains label. */
14095 if ((!TREE_SIDE_EFFECTS (unused_op
)
14096 || !contains_label_p (unused_op
))
14097 && (! VOID_TYPE_P (TREE_TYPE (tem
))
14098 || VOID_TYPE_P (type
)))
14099 return pedantic_non_lvalue_loc (loc
, tem
);
14102 else if (TREE_CODE (arg0
) == VECTOR_CST
)
14104 if (integer_all_onesp (arg0
))
14105 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
14106 if (integer_zerop (arg0
))
14107 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
14109 if ((TREE_CODE (arg1
) == VECTOR_CST
14110 || TREE_CODE (arg1
) == CONSTRUCTOR
)
14111 && (TREE_CODE (arg2
) == VECTOR_CST
14112 || TREE_CODE (arg2
) == CONSTRUCTOR
))
14114 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14115 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14116 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
14117 for (i
= 0; i
< nelts
; i
++)
14119 tree val
= VECTOR_CST_ELT (arg0
, i
);
14120 if (integer_all_onesp (val
))
14122 else if (integer_zerop (val
))
14123 sel
[i
] = nelts
+ i
;
14124 else /* Currently unreachable. */
14127 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14128 if (t
!= NULL_TREE
)
14133 if (operand_equal_p (arg1
, op2
, 0))
14134 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14136 /* If we have A op B ? A : C, we may be able to convert this to a
14137 simpler expression, depending on the operation and the values
14138 of B and C. Signed zeros prevent all of these transformations,
14139 for reasons given above each one.
14141 Also try swapping the arguments and inverting the conditional. */
14142 if (COMPARISON_CLASS_P (arg0
)
14143 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14144 arg1
, TREE_OPERAND (arg0
, 1))
14145 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14147 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14152 if (COMPARISON_CLASS_P (arg0
)
14153 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14155 TREE_OPERAND (arg0
, 1))
14156 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14158 location_t loc0
= expr_location_or (arg0
, loc
);
14159 tem
= fold_invert_truthvalue (loc0
, arg0
);
14160 if (tem
&& COMPARISON_CLASS_P (tem
))
14162 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14168 /* If the second operand is simpler than the third, swap them
14169 since that produces better jump optimization results. */
14170 if (truth_value_p (TREE_CODE (arg0
))
14171 && tree_swap_operands_p (op1
, op2
, false))
14173 location_t loc0
= expr_location_or (arg0
, loc
);
14174 /* See if this can be inverted. If it can't, possibly because
14175 it was a floating-point inequality comparison, don't do
14177 tem
= fold_invert_truthvalue (loc0
, arg0
);
14179 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14182 /* Convert A ? 1 : 0 to simply A. */
14183 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
14184 : (integer_onep (op1
)
14185 && !VECTOR_TYPE_P (type
)))
14186 && integer_zerop (op2
)
14187 /* If we try to convert OP0 to our type, the
14188 call to fold will try to move the conversion inside
14189 a COND, which will recurse. In that case, the COND_EXPR
14190 is probably the best choice, so leave it alone. */
14191 && type
== TREE_TYPE (arg0
))
14192 return pedantic_non_lvalue_loc (loc
, arg0
);
14194 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14195 over COND_EXPR in cases such as floating point comparisons. */
14196 if (integer_zerop (op1
)
14197 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
14198 : (integer_onep (op2
)
14199 && !VECTOR_TYPE_P (type
)))
14200 && truth_value_p (TREE_CODE (arg0
)))
14201 return pedantic_non_lvalue_loc (loc
,
14202 fold_convert_loc (loc
, type
,
14203 invert_truthvalue_loc (loc
,
14206 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14207 if (TREE_CODE (arg0
) == LT_EXPR
14208 && integer_zerop (TREE_OPERAND (arg0
, 1))
14209 && integer_zerop (op2
)
14210 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14212 /* sign_bit_p looks through both zero and sign extensions,
14213 but for this optimization only sign extensions are
14215 tree tem2
= TREE_OPERAND (arg0
, 0);
14216 while (tem
!= tem2
)
14218 if (TREE_CODE (tem2
) != NOP_EXPR
14219 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14224 tem2
= TREE_OPERAND (tem2
, 0);
14226 /* sign_bit_p only checks ARG1 bits within A's precision.
14227 If <sign bit of A> has wider type than A, bits outside
14228 of A's precision in <sign bit of A> need to be checked.
14229 If they are all 0, this optimization needs to be done
14230 in unsigned A's type, if they are all 1 in signed A's type,
14231 otherwise this can't be done. */
14233 && TYPE_PRECISION (TREE_TYPE (tem
))
14234 < TYPE_PRECISION (TREE_TYPE (arg1
))
14235 && TYPE_PRECISION (TREE_TYPE (tem
))
14236 < TYPE_PRECISION (type
))
14238 int inner_width
, outer_width
;
14241 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14242 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14243 if (outer_width
> TYPE_PRECISION (type
))
14244 outer_width
= TYPE_PRECISION (type
);
14246 wide_int mask
= wi::shifted_mask
14247 (inner_width
, outer_width
- inner_width
, false,
14248 TYPE_PRECISION (TREE_TYPE (arg1
)));
14250 wide_int common
= mask
& arg1
;
14251 if (common
== mask
)
14253 tem_type
= signed_type_for (TREE_TYPE (tem
));
14254 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14256 else if (common
== 0)
14258 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14259 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14267 fold_convert_loc (loc
, type
,
14268 fold_build2_loc (loc
, BIT_AND_EXPR
,
14269 TREE_TYPE (tem
), tem
,
14270 fold_convert_loc (loc
,
14275 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14276 already handled above. */
14277 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14278 && integer_onep (TREE_OPERAND (arg0
, 1))
14279 && integer_zerop (op2
)
14280 && integer_pow2p (arg1
))
14282 tree tem
= TREE_OPERAND (arg0
, 0);
14284 if (TREE_CODE (tem
) == RSHIFT_EXPR
14285 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
14286 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14287 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
14288 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14289 TREE_OPERAND (tem
, 0), arg1
);
14292 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14293 is probably obsolete because the first operand should be a
14294 truth value (that's why we have the two cases above), but let's
14295 leave it in until we can confirm this for all front-ends. */
14296 if (integer_zerop (op2
)
14297 && TREE_CODE (arg0
) == NE_EXPR
14298 && integer_zerop (TREE_OPERAND (arg0
, 1))
14299 && integer_pow2p (arg1
)
14300 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14301 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14302 arg1
, OEP_ONLY_CONST
))
14303 return pedantic_non_lvalue_loc (loc
,
14304 fold_convert_loc (loc
, type
,
14305 TREE_OPERAND (arg0
, 0)));
14307 /* Disable the transformations below for vectors, since
14308 fold_binary_op_with_conditional_arg may undo them immediately,
14309 yielding an infinite loop. */
14310 if (code
== VEC_COND_EXPR
)
14313 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14314 if (integer_zerop (op2
)
14315 && truth_value_p (TREE_CODE (arg0
))
14316 && truth_value_p (TREE_CODE (arg1
))
14317 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14318 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14319 : TRUTH_ANDIF_EXPR
,
14320 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14322 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14323 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14324 && truth_value_p (TREE_CODE (arg0
))
14325 && truth_value_p (TREE_CODE (arg1
))
14326 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14328 location_t loc0
= expr_location_or (arg0
, loc
);
14329 /* Only perform transformation if ARG0 is easily inverted. */
14330 tem
= fold_invert_truthvalue (loc0
, arg0
);
14332 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14335 type
, fold_convert_loc (loc
, type
, tem
),
14339 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14340 if (integer_zerop (arg1
)
14341 && truth_value_p (TREE_CODE (arg0
))
14342 && truth_value_p (TREE_CODE (op2
))
14343 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14345 location_t loc0
= expr_location_or (arg0
, loc
);
14346 /* Only perform transformation if ARG0 is easily inverted. */
14347 tem
= fold_invert_truthvalue (loc0
, arg0
);
14349 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14350 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14351 type
, fold_convert_loc (loc
, type
, tem
),
14355 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14356 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14357 && truth_value_p (TREE_CODE (arg0
))
14358 && truth_value_p (TREE_CODE (op2
))
14359 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14360 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14361 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14362 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14367 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14368 of fold_ternary on them. */
14369 gcc_unreachable ();
14371 case BIT_FIELD_REF
:
14372 if ((TREE_CODE (arg0
) == VECTOR_CST
14373 || (TREE_CODE (arg0
) == CONSTRUCTOR
14374 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14375 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14376 || (TREE_CODE (type
) == VECTOR_TYPE
14377 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14379 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14380 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14381 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14382 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14385 && (idx
% width
) == 0
14386 && (n
% width
) == 0
14387 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14392 if (TREE_CODE (arg0
) == VECTOR_CST
)
14395 return VECTOR_CST_ELT (arg0
, idx
);
14397 tree
*vals
= XALLOCAVEC (tree
, n
);
14398 for (unsigned i
= 0; i
< n
; ++i
)
14399 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14400 return build_vector (type
, vals
);
14403 /* Constructor elements can be subvectors. */
14404 unsigned HOST_WIDE_INT k
= 1;
14405 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14407 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14408 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14409 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14412 /* We keep an exact subset of the constructor elements. */
14413 if ((idx
% k
) == 0 && (n
% k
) == 0)
14415 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14416 return build_constructor (type
, NULL
);
14421 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14422 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14423 return build_zero_cst (type
);
14426 vec
<constructor_elt
, va_gc
> *vals
;
14427 vec_alloc (vals
, n
);
14428 for (unsigned i
= 0;
14429 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14431 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14433 (arg0
, idx
+ i
)->value
);
14434 return build_constructor (type
, vals
);
14436 /* The bitfield references a single constructor element. */
14437 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14439 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14440 return build_zero_cst (type
);
14442 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14444 return fold_build3_loc (loc
, code
, type
,
14445 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14446 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14451 /* A bit-field-ref that referenced the full argument can be stripped. */
14452 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14453 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14454 && integer_zerop (op2
))
14455 return fold_convert_loc (loc
, type
, arg0
);
14457 /* On constants we can use native encode/interpret to constant
14458 fold (nearly) all BIT_FIELD_REFs. */
14459 if (CONSTANT_CLASS_P (arg0
)
14460 && can_native_interpret_type_p (type
)
14461 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14462 /* This limitation should not be necessary, we just need to
14463 round this up to mode size. */
14464 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14465 /* Need bit-shifting of the buffer to relax the following. */
14466 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14468 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14469 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14470 unsigned HOST_WIDE_INT clen
;
14471 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14472 /* ??? We cannot tell native_encode_expr to start at
14473 some random byte only. So limit us to a reasonable amount
14477 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14478 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14480 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14482 tree v
= native_interpret_expr (type
,
14483 b
+ bitpos
/ BITS_PER_UNIT
,
14484 bitsize
/ BITS_PER_UNIT
);
14494 /* For integers we can decompose the FMA if possible. */
14495 if (TREE_CODE (arg0
) == INTEGER_CST
14496 && TREE_CODE (arg1
) == INTEGER_CST
)
14497 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14498 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14499 if (integer_zerop (arg2
))
14500 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14502 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14504 case VEC_PERM_EXPR
:
14505 if (TREE_CODE (arg2
) == VECTOR_CST
)
14507 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14508 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14509 bool need_mask_canon
= false;
14510 bool all_in_vec0
= true;
14511 bool all_in_vec1
= true;
14512 bool maybe_identity
= true;
14513 bool single_arg
= (op0
== op1
);
14514 bool changed
= false;
14516 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14517 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14518 for (i
= 0; i
< nelts
; i
++)
14520 tree val
= VECTOR_CST_ELT (arg2
, i
);
14521 if (TREE_CODE (val
) != INTEGER_CST
)
14524 /* Make sure that the perm value is in an acceptable
14527 if (wi::gtu_p (t
, mask
))
14529 need_mask_canon
= true;
14530 sel
[i
] = t
.to_uhwi () & mask
;
14533 sel
[i
] = t
.to_uhwi ();
14535 if (sel
[i
] < nelts
)
14536 all_in_vec1
= false;
14538 all_in_vec0
= false;
14540 if ((sel
[i
] & (nelts
-1)) != i
)
14541 maybe_identity
= false;
14544 if (maybe_identity
)
14554 else if (all_in_vec1
)
14557 for (i
= 0; i
< nelts
; i
++)
14559 need_mask_canon
= true;
14562 if ((TREE_CODE (op0
) == VECTOR_CST
14563 || TREE_CODE (op0
) == CONSTRUCTOR
)
14564 && (TREE_CODE (op1
) == VECTOR_CST
14565 || TREE_CODE (op1
) == CONSTRUCTOR
))
14567 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
14568 if (t
!= NULL_TREE
)
14572 if (op0
== op1
&& !single_arg
)
14575 if (need_mask_canon
&& arg2
== op2
)
14577 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14578 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14579 for (i
= 0; i
< nelts
; i
++)
14580 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14581 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14586 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14592 } /* switch (code) */
14595 /* Perform constant folding and related simplification of EXPR.
14596 The related simplifications include x*1 => x, x*0 => 0, etc.,
14597 and application of the associative law.
14598 NOP_EXPR conversions may be removed freely (as long as we
14599 are careful not to change the type of the overall expression).
14600 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14601 but we can constant-fold them if they have constant operands. */
14603 #ifdef ENABLE_FOLD_CHECKING
14604 # define fold(x) fold_1 (x)
14605 static tree
fold_1 (tree
);
14611 const tree t
= expr
;
14612 enum tree_code code
= TREE_CODE (t
);
14613 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14615 location_t loc
= EXPR_LOCATION (expr
);
14617 /* Return right away if a constant. */
14618 if (kind
== tcc_constant
)
14621 /* CALL_EXPR-like objects with variable numbers of operands are
14622 treated specially. */
14623 if (kind
== tcc_vl_exp
)
14625 if (code
== CALL_EXPR
)
14627 tem
= fold_call_expr (loc
, expr
, false);
14628 return tem
? tem
: expr
;
14633 if (IS_EXPR_CODE_CLASS (kind
))
14635 tree type
= TREE_TYPE (t
);
14636 tree op0
, op1
, op2
;
14638 switch (TREE_CODE_LENGTH (code
))
14641 op0
= TREE_OPERAND (t
, 0);
14642 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14643 return tem
? tem
: expr
;
14645 op0
= TREE_OPERAND (t
, 0);
14646 op1
= TREE_OPERAND (t
, 1);
14647 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14648 return tem
? tem
: expr
;
14650 op0
= TREE_OPERAND (t
, 0);
14651 op1
= TREE_OPERAND (t
, 1);
14652 op2
= TREE_OPERAND (t
, 2);
14653 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14654 return tem
? tem
: expr
;
14664 tree op0
= TREE_OPERAND (t
, 0);
14665 tree op1
= TREE_OPERAND (t
, 1);
14667 if (TREE_CODE (op1
) == INTEGER_CST
14668 && TREE_CODE (op0
) == CONSTRUCTOR
14669 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14671 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14672 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14673 unsigned HOST_WIDE_INT begin
= 0;
14675 /* Find a matching index by means of a binary search. */
14676 while (begin
!= end
)
14678 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14679 tree index
= (*elts
)[middle
].index
;
14681 if (TREE_CODE (index
) == INTEGER_CST
14682 && tree_int_cst_lt (index
, op1
))
14683 begin
= middle
+ 1;
14684 else if (TREE_CODE (index
) == INTEGER_CST
14685 && tree_int_cst_lt (op1
, index
))
14687 else if (TREE_CODE (index
) == RANGE_EXPR
14688 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14689 begin
= middle
+ 1;
14690 else if (TREE_CODE (index
) == RANGE_EXPR
14691 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14694 return (*elts
)[middle
].value
;
14701 /* Return a VECTOR_CST if possible. */
14704 tree type
= TREE_TYPE (t
);
14705 if (TREE_CODE (type
) != VECTOR_TYPE
)
14708 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14709 unsigned HOST_WIDE_INT idx
, pos
= 0;
14712 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14714 if (!CONSTANT_CLASS_P (value
))
14716 if (TREE_CODE (value
) == VECTOR_CST
)
14718 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14719 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14722 vec
[pos
++] = value
;
14724 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14725 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14727 return build_vector (type
, vec
);
14731 return fold (DECL_INITIAL (t
));
14735 } /* switch (code) */
14738 #ifdef ENABLE_FOLD_CHECKING
14741 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14742 hash_table
<pointer_hash
<const tree_node
> > *);
14743 static void fold_check_failed (const_tree
, const_tree
);
14744 void print_fold_checksum (const_tree
);
14746 /* When --enable-checking=fold, compute a digest of expr before
14747 and after actual fold call to see if fold did not accidentally
14748 change original expr. */
14754 struct md5_ctx ctx
;
14755 unsigned char checksum_before
[16], checksum_after
[16];
14756 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14758 md5_init_ctx (&ctx
);
14759 fold_checksum_tree (expr
, &ctx
, &ht
);
14760 md5_finish_ctx (&ctx
, checksum_before
);
14763 ret
= fold_1 (expr
);
14765 md5_init_ctx (&ctx
);
14766 fold_checksum_tree (expr
, &ctx
, &ht
);
14767 md5_finish_ctx (&ctx
, checksum_after
);
14769 if (memcmp (checksum_before
, checksum_after
, 16))
14770 fold_check_failed (expr
, ret
);
14776 print_fold_checksum (const_tree expr
)
14778 struct md5_ctx ctx
;
14779 unsigned char checksum
[16], cnt
;
14780 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14782 md5_init_ctx (&ctx
);
14783 fold_checksum_tree (expr
, &ctx
, &ht
);
14784 md5_finish_ctx (&ctx
, checksum
);
14785 for (cnt
= 0; cnt
< 16; ++cnt
)
14786 fprintf (stderr
, "%02x", checksum
[cnt
]);
14787 putc ('\n', stderr
);
14791 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14793 internal_error ("fold check: original tree changed by fold");
14797 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14798 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14800 const tree_node
**slot
;
14801 enum tree_code code
;
14802 union tree_node buf
;
14808 slot
= ht
->find_slot (expr
, INSERT
);
14812 code
= TREE_CODE (expr
);
14813 if (TREE_CODE_CLASS (code
) == tcc_declaration
14814 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14816 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14817 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14818 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14819 expr
= (tree
) &buf
;
14821 else if (TREE_CODE_CLASS (code
) == tcc_type
14822 && (TYPE_POINTER_TO (expr
)
14823 || TYPE_REFERENCE_TO (expr
)
14824 || TYPE_CACHED_VALUES_P (expr
)
14825 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14826 || TYPE_NEXT_VARIANT (expr
)))
14828 /* Allow these fields to be modified. */
14830 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14831 expr
= tmp
= (tree
) &buf
;
14832 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14833 TYPE_POINTER_TO (tmp
) = NULL
;
14834 TYPE_REFERENCE_TO (tmp
) = NULL
;
14835 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14836 if (TYPE_CACHED_VALUES_P (tmp
))
14838 TYPE_CACHED_VALUES_P (tmp
) = 0;
14839 TYPE_CACHED_VALUES (tmp
) = NULL
;
14842 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14843 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14844 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14845 if (TREE_CODE_CLASS (code
) != tcc_type
14846 && TREE_CODE_CLASS (code
) != tcc_declaration
14847 && code
!= TREE_LIST
14848 && code
!= SSA_NAME
14849 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14850 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14851 switch (TREE_CODE_CLASS (code
))
14857 md5_process_bytes (TREE_STRING_POINTER (expr
),
14858 TREE_STRING_LENGTH (expr
), ctx
);
14861 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14862 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14865 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14866 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14872 case tcc_exceptional
:
14876 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14877 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14878 expr
= TREE_CHAIN (expr
);
14879 goto recursive_label
;
14882 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14883 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14889 case tcc_expression
:
14890 case tcc_reference
:
14891 case tcc_comparison
:
14894 case tcc_statement
:
14896 len
= TREE_OPERAND_LENGTH (expr
);
14897 for (i
= 0; i
< len
; ++i
)
14898 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14900 case tcc_declaration
:
14901 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14902 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14903 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14905 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14906 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14907 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14908 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14909 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14912 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14914 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14916 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14917 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14919 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14923 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14924 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14925 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14926 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14927 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14928 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14929 if (INTEGRAL_TYPE_P (expr
)
14930 || SCALAR_FLOAT_TYPE_P (expr
))
14932 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14933 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14935 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14936 if (TREE_CODE (expr
) == RECORD_TYPE
14937 || TREE_CODE (expr
) == UNION_TYPE
14938 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14939 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14940 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14947 /* Helper function for outputting the checksum of a tree T. When
14948 debugging with gdb, you can "define mynext" to be "next" followed
14949 by "call debug_fold_checksum (op0)", then just trace down till the
14952 DEBUG_FUNCTION
void
14953 debug_fold_checksum (const_tree t
)
14956 unsigned char checksum
[16];
14957 struct md5_ctx ctx
;
14958 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14960 md5_init_ctx (&ctx
);
14961 fold_checksum_tree (t
, &ctx
, &ht
);
14962 md5_finish_ctx (&ctx
, checksum
);
14965 for (i
= 0; i
< 16; i
++)
14966 fprintf (stderr
, "%d ", checksum
[i
]);
14968 fprintf (stderr
, "\n");
14973 /* Fold a unary tree expression with code CODE of type TYPE with an
14974 operand OP0. LOC is the location of the resulting expression.
14975 Return a folded expression if successful. Otherwise, return a tree
14976 expression with code CODE of type TYPE with an operand OP0. */
14979 fold_build1_stat_loc (location_t loc
,
14980 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14983 #ifdef ENABLE_FOLD_CHECKING
14984 unsigned char checksum_before
[16], checksum_after
[16];
14985 struct md5_ctx ctx
;
14986 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14988 md5_init_ctx (&ctx
);
14989 fold_checksum_tree (op0
, &ctx
, &ht
);
14990 md5_finish_ctx (&ctx
, checksum_before
);
14994 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14996 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14998 #ifdef ENABLE_FOLD_CHECKING
14999 md5_init_ctx (&ctx
);
15000 fold_checksum_tree (op0
, &ctx
, &ht
);
15001 md5_finish_ctx (&ctx
, checksum_after
);
15003 if (memcmp (checksum_before
, checksum_after
, 16))
15004 fold_check_failed (op0
, tem
);
15009 /* Fold a binary tree expression with code CODE of type TYPE with
15010 operands OP0 and OP1. LOC is the location of the resulting
15011 expression. Return a folded expression if successful. Otherwise,
15012 return a tree expression with code CODE of type TYPE with operands
15016 fold_build2_stat_loc (location_t loc
,
15017 enum tree_code code
, tree type
, tree op0
, tree op1
15021 #ifdef ENABLE_FOLD_CHECKING
15022 unsigned char checksum_before_op0
[16],
15023 checksum_before_op1
[16],
15024 checksum_after_op0
[16],
15025 checksum_after_op1
[16];
15026 struct md5_ctx ctx
;
15027 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15029 md5_init_ctx (&ctx
);
15030 fold_checksum_tree (op0
, &ctx
, &ht
);
15031 md5_finish_ctx (&ctx
, checksum_before_op0
);
15034 md5_init_ctx (&ctx
);
15035 fold_checksum_tree (op1
, &ctx
, &ht
);
15036 md5_finish_ctx (&ctx
, checksum_before_op1
);
15040 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
15042 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
15044 #ifdef ENABLE_FOLD_CHECKING
15045 md5_init_ctx (&ctx
);
15046 fold_checksum_tree (op0
, &ctx
, &ht
);
15047 md5_finish_ctx (&ctx
, checksum_after_op0
);
15050 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15051 fold_check_failed (op0
, tem
);
15053 md5_init_ctx (&ctx
);
15054 fold_checksum_tree (op1
, &ctx
, &ht
);
15055 md5_finish_ctx (&ctx
, checksum_after_op1
);
15057 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15058 fold_check_failed (op1
, tem
);
15063 /* Fold a ternary tree expression with code CODE of type TYPE with
15064 operands OP0, OP1, and OP2. Return a folded expression if
15065 successful. Otherwise, return a tree expression with code CODE of
15066 type TYPE with operands OP0, OP1, and OP2. */
15069 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
15070 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
15073 #ifdef ENABLE_FOLD_CHECKING
15074 unsigned char checksum_before_op0
[16],
15075 checksum_before_op1
[16],
15076 checksum_before_op2
[16],
15077 checksum_after_op0
[16],
15078 checksum_after_op1
[16],
15079 checksum_after_op2
[16];
15080 struct md5_ctx ctx
;
15081 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15083 md5_init_ctx (&ctx
);
15084 fold_checksum_tree (op0
, &ctx
, &ht
);
15085 md5_finish_ctx (&ctx
, checksum_before_op0
);
15088 md5_init_ctx (&ctx
);
15089 fold_checksum_tree (op1
, &ctx
, &ht
);
15090 md5_finish_ctx (&ctx
, checksum_before_op1
);
15093 md5_init_ctx (&ctx
);
15094 fold_checksum_tree (op2
, &ctx
, &ht
);
15095 md5_finish_ctx (&ctx
, checksum_before_op2
);
15099 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
15100 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
15102 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
15104 #ifdef ENABLE_FOLD_CHECKING
15105 md5_init_ctx (&ctx
);
15106 fold_checksum_tree (op0
, &ctx
, &ht
);
15107 md5_finish_ctx (&ctx
, checksum_after_op0
);
15110 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15111 fold_check_failed (op0
, tem
);
15113 md5_init_ctx (&ctx
);
15114 fold_checksum_tree (op1
, &ctx
, &ht
);
15115 md5_finish_ctx (&ctx
, checksum_after_op1
);
15118 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15119 fold_check_failed (op1
, tem
);
15121 md5_init_ctx (&ctx
);
15122 fold_checksum_tree (op2
, &ctx
, &ht
);
15123 md5_finish_ctx (&ctx
, checksum_after_op2
);
15125 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15126 fold_check_failed (op2
, tem
);
15131 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15132 arguments in ARGARRAY, and a null static chain.
15133 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15134 of type TYPE from the given operands as constructed by build_call_array. */
15137 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15138 int nargs
, tree
*argarray
)
15141 #ifdef ENABLE_FOLD_CHECKING
15142 unsigned char checksum_before_fn
[16],
15143 checksum_before_arglist
[16],
15144 checksum_after_fn
[16],
15145 checksum_after_arglist
[16];
15146 struct md5_ctx ctx
;
15147 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15150 md5_init_ctx (&ctx
);
15151 fold_checksum_tree (fn
, &ctx
, &ht
);
15152 md5_finish_ctx (&ctx
, checksum_before_fn
);
15155 md5_init_ctx (&ctx
);
15156 for (i
= 0; i
< nargs
; i
++)
15157 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
15158 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15162 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15164 #ifdef ENABLE_FOLD_CHECKING
15165 md5_init_ctx (&ctx
);
15166 fold_checksum_tree (fn
, &ctx
, &ht
);
15167 md5_finish_ctx (&ctx
, checksum_after_fn
);
15170 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15171 fold_check_failed (fn
, tem
);
15173 md5_init_ctx (&ctx
);
15174 for (i
= 0; i
< nargs
; i
++)
15175 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
15176 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15178 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15179 fold_check_failed (NULL_TREE
, tem
);
15184 /* Perform constant folding and related simplification of initializer
15185 expression EXPR. These behave identically to "fold_buildN" but ignore
15186 potential run-time traps and exceptions that fold must preserve. */
15188 #define START_FOLD_INIT \
15189 int saved_signaling_nans = flag_signaling_nans;\
15190 int saved_trapping_math = flag_trapping_math;\
15191 int saved_rounding_math = flag_rounding_math;\
15192 int saved_trapv = flag_trapv;\
15193 int saved_folding_initializer = folding_initializer;\
15194 flag_signaling_nans = 0;\
15195 flag_trapping_math = 0;\
15196 flag_rounding_math = 0;\
15198 folding_initializer = 1;
15200 #define END_FOLD_INIT \
15201 flag_signaling_nans = saved_signaling_nans;\
15202 flag_trapping_math = saved_trapping_math;\
15203 flag_rounding_math = saved_rounding_math;\
15204 flag_trapv = saved_trapv;\
15205 folding_initializer = saved_folding_initializer;
15208 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15209 tree type
, tree op
)
15214 result
= fold_build1_loc (loc
, code
, type
, op
);
15221 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15222 tree type
, tree op0
, tree op1
)
15227 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15234 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15235 int nargs
, tree
*argarray
)
15240 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15246 #undef START_FOLD_INIT
15247 #undef END_FOLD_INIT
15249 /* Determine if first argument is a multiple of second argument. Return 0 if
15250 it is not, or we cannot easily determined it to be.
15252 An example of the sort of thing we care about (at this point; this routine
15253 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15254 fold cases do now) is discovering that
15256 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15262 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15264 This code also handles discovering that
15266 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15268 is a multiple of 8 so we don't have to worry about dealing with a
15269 possible remainder.
15271 Note that we *look* inside a SAVE_EXPR only to determine how it was
15272 calculated; it is not safe for fold to do much of anything else with the
15273 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15274 at run time. For example, the latter example above *cannot* be implemented
15275 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15276 evaluation time of the original SAVE_EXPR is not necessarily the same at
15277 the time the new expression is evaluated. The only optimization of this
15278 sort that would be valid is changing
15280 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15284 SAVE_EXPR (I) * SAVE_EXPR (J)
15286 (where the same SAVE_EXPR (J) is used in the original and the
15287 transformed version). */
15290 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15292 if (operand_equal_p (top
, bottom
, 0))
15295 if (TREE_CODE (type
) != INTEGER_TYPE
)
15298 switch (TREE_CODE (top
))
15301 /* Bitwise and provides a power of two multiple. If the mask is
15302 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15303 if (!integer_pow2p (bottom
))
15308 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15309 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15313 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15314 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15317 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15321 op1
= TREE_OPERAND (top
, 1);
15322 /* const_binop may not detect overflow correctly,
15323 so check for it explicitly here. */
15324 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
15325 && 0 != (t1
= fold_convert (type
,
15326 const_binop (LSHIFT_EXPR
,
15329 && !TREE_OVERFLOW (t1
))
15330 return multiple_of_p (type
, t1
, bottom
);
15335 /* Can't handle conversions from non-integral or wider integral type. */
15336 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15337 || (TYPE_PRECISION (type
)
15338 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15341 /* .. fall through ... */
15344 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15347 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15348 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15351 if (TREE_CODE (bottom
) != INTEGER_CST
15352 || integer_zerop (bottom
)
15353 || (TYPE_UNSIGNED (type
)
15354 && (tree_int_cst_sgn (top
) < 0
15355 || tree_int_cst_sgn (bottom
) < 0)))
15357 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
15365 /* Return true if CODE or TYPE is known to be non-negative. */
15368 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15370 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15371 && truth_value_p (code
))
15372 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15373 have a signed:1 type (where the value is -1 and 0). */
15378 /* Return true if (CODE OP0) is known to be non-negative. If the return
15379 value is based on the assumption that signed overflow is undefined,
15380 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15381 *STRICT_OVERFLOW_P. */
15384 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15385 bool *strict_overflow_p
)
15387 if (TYPE_UNSIGNED (type
))
15393 /* We can't return 1 if flag_wrapv is set because
15394 ABS_EXPR<INT_MIN> = INT_MIN. */
15395 if (!INTEGRAL_TYPE_P (type
))
15397 if (TYPE_OVERFLOW_UNDEFINED (type
))
15399 *strict_overflow_p
= true;
15404 case NON_LVALUE_EXPR
:
15406 case FIX_TRUNC_EXPR
:
15407 return tree_expr_nonnegative_warnv_p (op0
,
15408 strict_overflow_p
);
15412 tree inner_type
= TREE_TYPE (op0
);
15413 tree outer_type
= type
;
15415 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15417 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15418 return tree_expr_nonnegative_warnv_p (op0
,
15419 strict_overflow_p
);
15420 if (INTEGRAL_TYPE_P (inner_type
))
15422 if (TYPE_UNSIGNED (inner_type
))
15424 return tree_expr_nonnegative_warnv_p (op0
,
15425 strict_overflow_p
);
15428 else if (INTEGRAL_TYPE_P (outer_type
))
15430 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15431 return tree_expr_nonnegative_warnv_p (op0
,
15432 strict_overflow_p
);
15433 if (INTEGRAL_TYPE_P (inner_type
))
15434 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15435 && TYPE_UNSIGNED (inner_type
);
15441 return tree_simple_nonnegative_warnv_p (code
, type
);
15444 /* We don't know sign of `t', so be conservative and return false. */
15448 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15449 value is based on the assumption that signed overflow is undefined,
15450 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15451 *STRICT_OVERFLOW_P. */
15454 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15455 tree op1
, bool *strict_overflow_p
)
15457 if (TYPE_UNSIGNED (type
))
15462 case POINTER_PLUS_EXPR
:
15464 if (FLOAT_TYPE_P (type
))
15465 return (tree_expr_nonnegative_warnv_p (op0
,
15467 && tree_expr_nonnegative_warnv_p (op1
,
15468 strict_overflow_p
));
15470 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15471 both unsigned and at least 2 bits shorter than the result. */
15472 if (TREE_CODE (type
) == INTEGER_TYPE
15473 && TREE_CODE (op0
) == NOP_EXPR
15474 && TREE_CODE (op1
) == NOP_EXPR
)
15476 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15477 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15478 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15479 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15481 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15482 TYPE_PRECISION (inner2
)) + 1;
15483 return prec
< TYPE_PRECISION (type
);
15489 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15491 /* x * x is always non-negative for floating point x
15492 or without overflow. */
15493 if (operand_equal_p (op0
, op1
, 0)
15494 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15495 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15497 if (TYPE_OVERFLOW_UNDEFINED (type
))
15498 *strict_overflow_p
= true;
15503 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15504 both unsigned and their total bits is shorter than the result. */
15505 if (TREE_CODE (type
) == INTEGER_TYPE
15506 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15507 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15509 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15510 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15512 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15513 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15516 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15517 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15519 if (TREE_CODE (op0
) == INTEGER_CST
)
15520 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15522 if (TREE_CODE (op1
) == INTEGER_CST
)
15523 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15525 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15526 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15528 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15529 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
15530 : TYPE_PRECISION (inner0
);
15532 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15533 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
15534 : TYPE_PRECISION (inner1
);
15536 return precision0
+ precision1
< TYPE_PRECISION (type
);
15543 return (tree_expr_nonnegative_warnv_p (op0
,
15545 || tree_expr_nonnegative_warnv_p (op1
,
15546 strict_overflow_p
));
15552 case TRUNC_DIV_EXPR
:
15553 case CEIL_DIV_EXPR
:
15554 case FLOOR_DIV_EXPR
:
15555 case ROUND_DIV_EXPR
:
15556 return (tree_expr_nonnegative_warnv_p (op0
,
15558 && tree_expr_nonnegative_warnv_p (op1
,
15559 strict_overflow_p
));
15561 case TRUNC_MOD_EXPR
:
15562 case CEIL_MOD_EXPR
:
15563 case FLOOR_MOD_EXPR
:
15564 case ROUND_MOD_EXPR
:
15565 return tree_expr_nonnegative_warnv_p (op0
,
15566 strict_overflow_p
);
15568 return tree_simple_nonnegative_warnv_p (code
, type
);
15571 /* We don't know sign of `t', so be conservative and return false. */
15575 /* Return true if T is known to be non-negative. If the return
15576 value is based on the assumption that signed overflow is undefined,
15577 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15578 *STRICT_OVERFLOW_P. */
15581 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15583 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15586 switch (TREE_CODE (t
))
15589 return tree_int_cst_sgn (t
) >= 0;
15592 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15595 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15598 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15600 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15601 strict_overflow_p
));
15603 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15606 /* We don't know sign of `t', so be conservative and return false. */
15610 /* Return true if T is known to be non-negative. If the return
15611 value is based on the assumption that signed overflow is undefined,
15612 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15613 *STRICT_OVERFLOW_P. */
15616 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15617 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15619 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15620 switch (DECL_FUNCTION_CODE (fndecl
))
15622 CASE_FLT_FN (BUILT_IN_ACOS
):
15623 CASE_FLT_FN (BUILT_IN_ACOSH
):
15624 CASE_FLT_FN (BUILT_IN_CABS
):
15625 CASE_FLT_FN (BUILT_IN_COSH
):
15626 CASE_FLT_FN (BUILT_IN_ERFC
):
15627 CASE_FLT_FN (BUILT_IN_EXP
):
15628 CASE_FLT_FN (BUILT_IN_EXP10
):
15629 CASE_FLT_FN (BUILT_IN_EXP2
):
15630 CASE_FLT_FN (BUILT_IN_FABS
):
15631 CASE_FLT_FN (BUILT_IN_FDIM
):
15632 CASE_FLT_FN (BUILT_IN_HYPOT
):
15633 CASE_FLT_FN (BUILT_IN_POW10
):
15634 CASE_INT_FN (BUILT_IN_FFS
):
15635 CASE_INT_FN (BUILT_IN_PARITY
):
15636 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15637 CASE_INT_FN (BUILT_IN_CLZ
):
15638 CASE_INT_FN (BUILT_IN_CLRSB
):
15639 case BUILT_IN_BSWAP32
:
15640 case BUILT_IN_BSWAP64
:
15644 CASE_FLT_FN (BUILT_IN_SQRT
):
15645 /* sqrt(-0.0) is -0.0. */
15646 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15648 return tree_expr_nonnegative_warnv_p (arg0
,
15649 strict_overflow_p
);
15651 CASE_FLT_FN (BUILT_IN_ASINH
):
15652 CASE_FLT_FN (BUILT_IN_ATAN
):
15653 CASE_FLT_FN (BUILT_IN_ATANH
):
15654 CASE_FLT_FN (BUILT_IN_CBRT
):
15655 CASE_FLT_FN (BUILT_IN_CEIL
):
15656 CASE_FLT_FN (BUILT_IN_ERF
):
15657 CASE_FLT_FN (BUILT_IN_EXPM1
):
15658 CASE_FLT_FN (BUILT_IN_FLOOR
):
15659 CASE_FLT_FN (BUILT_IN_FMOD
):
15660 CASE_FLT_FN (BUILT_IN_FREXP
):
15661 CASE_FLT_FN (BUILT_IN_ICEIL
):
15662 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15663 CASE_FLT_FN (BUILT_IN_IRINT
):
15664 CASE_FLT_FN (BUILT_IN_IROUND
):
15665 CASE_FLT_FN (BUILT_IN_LCEIL
):
15666 CASE_FLT_FN (BUILT_IN_LDEXP
):
15667 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15668 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15669 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15670 CASE_FLT_FN (BUILT_IN_LLRINT
):
15671 CASE_FLT_FN (BUILT_IN_LLROUND
):
15672 CASE_FLT_FN (BUILT_IN_LRINT
):
15673 CASE_FLT_FN (BUILT_IN_LROUND
):
15674 CASE_FLT_FN (BUILT_IN_MODF
):
15675 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15676 CASE_FLT_FN (BUILT_IN_RINT
):
15677 CASE_FLT_FN (BUILT_IN_ROUND
):
15678 CASE_FLT_FN (BUILT_IN_SCALB
):
15679 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15680 CASE_FLT_FN (BUILT_IN_SCALBN
):
15681 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15682 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15683 CASE_FLT_FN (BUILT_IN_SINH
):
15684 CASE_FLT_FN (BUILT_IN_TANH
):
15685 CASE_FLT_FN (BUILT_IN_TRUNC
):
15686 /* True if the 1st argument is nonnegative. */
15687 return tree_expr_nonnegative_warnv_p (arg0
,
15688 strict_overflow_p
);
15690 CASE_FLT_FN (BUILT_IN_FMAX
):
15691 /* True if the 1st OR 2nd arguments are nonnegative. */
15692 return (tree_expr_nonnegative_warnv_p (arg0
,
15694 || (tree_expr_nonnegative_warnv_p (arg1
,
15695 strict_overflow_p
)));
15697 CASE_FLT_FN (BUILT_IN_FMIN
):
15698 /* True if the 1st AND 2nd arguments are nonnegative. */
15699 return (tree_expr_nonnegative_warnv_p (arg0
,
15701 && (tree_expr_nonnegative_warnv_p (arg1
,
15702 strict_overflow_p
)));
15704 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15705 /* True if the 2nd argument is nonnegative. */
15706 return tree_expr_nonnegative_warnv_p (arg1
,
15707 strict_overflow_p
);
15709 CASE_FLT_FN (BUILT_IN_POWI
):
15710 /* True if the 1st argument is nonnegative or the second
15711 argument is an even integer. */
15712 if (TREE_CODE (arg1
) == INTEGER_CST
15713 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15715 return tree_expr_nonnegative_warnv_p (arg0
,
15716 strict_overflow_p
);
15718 CASE_FLT_FN (BUILT_IN_POW
):
15719 /* True if the 1st argument is nonnegative or the second
15720 argument is an even integer valued real. */
15721 if (TREE_CODE (arg1
) == REAL_CST
)
15726 c
= TREE_REAL_CST (arg1
);
15727 n
= real_to_integer (&c
);
15730 REAL_VALUE_TYPE cint
;
15731 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
15732 if (real_identical (&c
, &cint
))
15736 return tree_expr_nonnegative_warnv_p (arg0
,
15737 strict_overflow_p
);
15742 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15746 /* Return true if T is known to be non-negative. If the return
15747 value is based on the assumption that signed overflow is undefined,
15748 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15749 *STRICT_OVERFLOW_P. */
15752 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15754 enum tree_code code
= TREE_CODE (t
);
15755 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15762 tree temp
= TARGET_EXPR_SLOT (t
);
15763 t
= TARGET_EXPR_INITIAL (t
);
15765 /* If the initializer is non-void, then it's a normal expression
15766 that will be assigned to the slot. */
15767 if (!VOID_TYPE_P (t
))
15768 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15770 /* Otherwise, the initializer sets the slot in some way. One common
15771 way is an assignment statement at the end of the initializer. */
15774 if (TREE_CODE (t
) == BIND_EXPR
)
15775 t
= expr_last (BIND_EXPR_BODY (t
));
15776 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15777 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15778 t
= expr_last (TREE_OPERAND (t
, 0));
15779 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15784 if (TREE_CODE (t
) == MODIFY_EXPR
15785 && TREE_OPERAND (t
, 0) == temp
)
15786 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15787 strict_overflow_p
);
15794 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15795 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15797 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15798 get_callee_fndecl (t
),
15801 strict_overflow_p
);
15803 case COMPOUND_EXPR
:
15805 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15806 strict_overflow_p
);
15808 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15809 strict_overflow_p
);
15811 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15812 strict_overflow_p
);
15815 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15819 /* We don't know sign of `t', so be conservative and return false. */
15823 /* Return true if T is known to be non-negative. If the return
15824 value is based on the assumption that signed overflow is undefined,
15825 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15826 *STRICT_OVERFLOW_P. */
15829 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15831 enum tree_code code
;
15832 if (t
== error_mark_node
)
15835 code
= TREE_CODE (t
);
15836 switch (TREE_CODE_CLASS (code
))
15839 case tcc_comparison
:
15840 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15842 TREE_OPERAND (t
, 0),
15843 TREE_OPERAND (t
, 1),
15844 strict_overflow_p
);
15847 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15849 TREE_OPERAND (t
, 0),
15850 strict_overflow_p
);
15853 case tcc_declaration
:
15854 case tcc_reference
:
15855 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15863 case TRUTH_AND_EXPR
:
15864 case TRUTH_OR_EXPR
:
15865 case TRUTH_XOR_EXPR
:
15866 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15868 TREE_OPERAND (t
, 0),
15869 TREE_OPERAND (t
, 1),
15870 strict_overflow_p
);
15871 case TRUTH_NOT_EXPR
:
15872 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15874 TREE_OPERAND (t
, 0),
15875 strict_overflow_p
);
15882 case WITH_SIZE_EXPR
:
15884 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15887 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15891 /* Return true if `t' is known to be non-negative. Handle warnings
15892 about undefined signed overflow. */
15895 tree_expr_nonnegative_p (tree t
)
15897 bool ret
, strict_overflow_p
;
15899 strict_overflow_p
= false;
15900 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15901 if (strict_overflow_p
)
15902 fold_overflow_warning (("assuming signed overflow does not occur when "
15903 "determining that expression is always "
15905 WARN_STRICT_OVERFLOW_MISC
);
15910 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15911 For floating point we further ensure that T is not denormal.
15912 Similar logic is present in nonzero_address in rtlanal.h.
15914 If the return value is based on the assumption that signed overflow
15915 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15916 change *STRICT_OVERFLOW_P. */
15919 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15920 bool *strict_overflow_p
)
15925 return tree_expr_nonzero_warnv_p (op0
,
15926 strict_overflow_p
);
15930 tree inner_type
= TREE_TYPE (op0
);
15931 tree outer_type
= type
;
15933 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15934 && tree_expr_nonzero_warnv_p (op0
,
15935 strict_overflow_p
));
15939 case NON_LVALUE_EXPR
:
15940 return tree_expr_nonzero_warnv_p (op0
,
15941 strict_overflow_p
);
15950 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15951 For floating point we further ensure that T is not denormal.
15952 Similar logic is present in nonzero_address in rtlanal.h.
15954 If the return value is based on the assumption that signed overflow
15955 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15956 change *STRICT_OVERFLOW_P. */
15959 tree_binary_nonzero_warnv_p (enum tree_code code
,
15962 tree op1
, bool *strict_overflow_p
)
15964 bool sub_strict_overflow_p
;
15967 case POINTER_PLUS_EXPR
:
15969 if (TYPE_OVERFLOW_UNDEFINED (type
))
15971 /* With the presence of negative values it is hard
15972 to say something. */
15973 sub_strict_overflow_p
= false;
15974 if (!tree_expr_nonnegative_warnv_p (op0
,
15975 &sub_strict_overflow_p
)
15976 || !tree_expr_nonnegative_warnv_p (op1
,
15977 &sub_strict_overflow_p
))
15979 /* One of operands must be positive and the other non-negative. */
15980 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15981 overflows, on a twos-complement machine the sum of two
15982 nonnegative numbers can never be zero. */
15983 return (tree_expr_nonzero_warnv_p (op0
,
15985 || tree_expr_nonzero_warnv_p (op1
,
15986 strict_overflow_p
));
15991 if (TYPE_OVERFLOW_UNDEFINED (type
))
15993 if (tree_expr_nonzero_warnv_p (op0
,
15995 && tree_expr_nonzero_warnv_p (op1
,
15996 strict_overflow_p
))
15998 *strict_overflow_p
= true;
16005 sub_strict_overflow_p
= false;
16006 if (tree_expr_nonzero_warnv_p (op0
,
16007 &sub_strict_overflow_p
)
16008 && tree_expr_nonzero_warnv_p (op1
,
16009 &sub_strict_overflow_p
))
16011 if (sub_strict_overflow_p
)
16012 *strict_overflow_p
= true;
16017 sub_strict_overflow_p
= false;
16018 if (tree_expr_nonzero_warnv_p (op0
,
16019 &sub_strict_overflow_p
))
16021 if (sub_strict_overflow_p
)
16022 *strict_overflow_p
= true;
16024 /* When both operands are nonzero, then MAX must be too. */
16025 if (tree_expr_nonzero_warnv_p (op1
,
16026 strict_overflow_p
))
16029 /* MAX where operand 0 is positive is positive. */
16030 return tree_expr_nonnegative_warnv_p (op0
,
16031 strict_overflow_p
);
16033 /* MAX where operand 1 is positive is positive. */
16034 else if (tree_expr_nonzero_warnv_p (op1
,
16035 &sub_strict_overflow_p
)
16036 && tree_expr_nonnegative_warnv_p (op1
,
16037 &sub_strict_overflow_p
))
16039 if (sub_strict_overflow_p
)
16040 *strict_overflow_p
= true;
16046 return (tree_expr_nonzero_warnv_p (op1
,
16048 || tree_expr_nonzero_warnv_p (op0
,
16049 strict_overflow_p
));
16058 /* Return true when T is an address and is known to be nonzero.
16059 For floating point we further ensure that T is not denormal.
16060 Similar logic is present in nonzero_address in rtlanal.h.
16062 If the return value is based on the assumption that signed overflow
16063 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16064 change *STRICT_OVERFLOW_P. */
16067 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16069 bool sub_strict_overflow_p
;
16070 switch (TREE_CODE (t
))
16073 return !integer_zerop (t
);
16077 tree base
= TREE_OPERAND (t
, 0);
16079 if (!DECL_P (base
))
16080 base
= get_base_address (base
);
16085 /* For objects in symbol table check if we know they are non-zero.
16086 Don't do anything for variables and functions before symtab is built;
16087 it is quite possible that they will be declared weak later. */
16088 if (DECL_P (base
) && decl_in_symtab_p (base
))
16090 struct symtab_node
*symbol
;
16092 symbol
= symtab_node::get (base
);
16094 return symbol
->nonzero_address ();
16099 /* Function local objects are never NULL. */
16101 && (DECL_CONTEXT (base
)
16102 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
16103 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
16106 /* Constants are never weak. */
16107 if (CONSTANT_CLASS_P (base
))
16114 sub_strict_overflow_p
= false;
16115 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16116 &sub_strict_overflow_p
)
16117 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16118 &sub_strict_overflow_p
))
16120 if (sub_strict_overflow_p
)
16121 *strict_overflow_p
= true;
16132 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16133 attempt to fold the expression to a constant without modifying TYPE,
16136 If the expression could be simplified to a constant, then return
16137 the constant. If the expression would not be simplified to a
16138 constant, then return NULL_TREE. */
16141 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16143 tree tem
= fold_binary (code
, type
, op0
, op1
);
16144 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16147 /* Given the components of a unary expression CODE, TYPE and OP0,
16148 attempt to fold the expression to a constant without modifying
16151 If the expression could be simplified to a constant, then return
16152 the constant. If the expression would not be simplified to a
16153 constant, then return NULL_TREE. */
16156 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16158 tree tem
= fold_unary (code
, type
, op0
);
16159 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16162 /* If EXP represents referencing an element in a constant string
16163 (either via pointer arithmetic or array indexing), return the
16164 tree representing the value accessed, otherwise return NULL. */
16167 fold_read_from_constant_string (tree exp
)
16169 if ((TREE_CODE (exp
) == INDIRECT_REF
16170 || TREE_CODE (exp
) == ARRAY_REF
)
16171 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16173 tree exp1
= TREE_OPERAND (exp
, 0);
16176 location_t loc
= EXPR_LOCATION (exp
);
16178 if (TREE_CODE (exp
) == INDIRECT_REF
)
16179 string
= string_constant (exp1
, &index
);
16182 tree low_bound
= array_ref_low_bound (exp
);
16183 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16185 /* Optimize the special-case of a zero lower bound.
16187 We convert the low_bound to sizetype to avoid some problems
16188 with constant folding. (E.g. suppose the lower bound is 1,
16189 and its mode is QI. Without the conversion,l (ARRAY
16190 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16191 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16192 if (! integer_zerop (low_bound
))
16193 index
= size_diffop_loc (loc
, index
,
16194 fold_convert_loc (loc
, sizetype
, low_bound
));
16200 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16201 && TREE_CODE (string
) == STRING_CST
16202 && TREE_CODE (index
) == INTEGER_CST
16203 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16204 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16206 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16207 return build_int_cst_type (TREE_TYPE (exp
),
16208 (TREE_STRING_POINTER (string
)
16209 [TREE_INT_CST_LOW (index
)]));
16214 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16215 an integer constant, real, or fixed-point constant.
16217 TYPE is the type of the result. */
16220 fold_negate_const (tree arg0
, tree type
)
16222 tree t
= NULL_TREE
;
16224 switch (TREE_CODE (arg0
))
16229 wide_int val
= wi::neg (arg0
, &overflow
);
16230 t
= force_fit_type (type
, val
, 1,
16231 (overflow
| TREE_OVERFLOW (arg0
))
16232 && !TYPE_UNSIGNED (type
));
16237 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16242 FIXED_VALUE_TYPE f
;
16243 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16244 &(TREE_FIXED_CST (arg0
)), NULL
,
16245 TYPE_SATURATING (type
));
16246 t
= build_fixed (type
, f
);
16247 /* Propagate overflow flags. */
16248 if (overflow_p
| TREE_OVERFLOW (arg0
))
16249 TREE_OVERFLOW (t
) = 1;
16254 gcc_unreachable ();
16260 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16261 an integer constant or real constant.
16263 TYPE is the type of the result. */
16266 fold_abs_const (tree arg0
, tree type
)
16268 tree t
= NULL_TREE
;
16270 switch (TREE_CODE (arg0
))
16274 /* If the value is unsigned or non-negative, then the absolute value
16275 is the same as the ordinary value. */
16276 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
16279 /* If the value is negative, then the absolute value is
16284 wide_int val
= wi::neg (arg0
, &overflow
);
16285 t
= force_fit_type (type
, val
, -1,
16286 overflow
| TREE_OVERFLOW (arg0
));
16292 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16293 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16299 gcc_unreachable ();
16305 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16306 constant. TYPE is the type of the result. */
16309 fold_not_const (const_tree arg0
, tree type
)
16311 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16313 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
16316 /* Given CODE, a relational operator, the target type, TYPE and two
16317 constant operands OP0 and OP1, return the result of the
16318 relational operation. If the result is not a compile time
16319 constant, then return NULL_TREE. */
16322 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16324 int result
, invert
;
16326 /* From here on, the only cases we handle are when the result is
16327 known to be a constant. */
16329 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16331 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16332 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16334 /* Handle the cases where either operand is a NaN. */
16335 if (real_isnan (c0
) || real_isnan (c1
))
16345 case UNORDERED_EXPR
:
16359 if (flag_trapping_math
)
16365 gcc_unreachable ();
16368 return constant_boolean_node (result
, type
);
16371 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16374 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16376 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16377 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16378 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16381 /* Handle equality/inequality of complex constants. */
16382 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16384 tree rcond
= fold_relational_const (code
, type
,
16385 TREE_REALPART (op0
),
16386 TREE_REALPART (op1
));
16387 tree icond
= fold_relational_const (code
, type
,
16388 TREE_IMAGPART (op0
),
16389 TREE_IMAGPART (op1
));
16390 if (code
== EQ_EXPR
)
16391 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16392 else if (code
== NE_EXPR
)
16393 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16398 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16400 unsigned count
= VECTOR_CST_NELTS (op0
);
16401 tree
*elts
= XALLOCAVEC (tree
, count
);
16402 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16403 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16405 for (unsigned i
= 0; i
< count
; i
++)
16407 tree elem_type
= TREE_TYPE (type
);
16408 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16409 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16411 tree tem
= fold_relational_const (code
, elem_type
,
16414 if (tem
== NULL_TREE
)
16417 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16420 return build_vector (type
, elts
);
16423 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16425 To compute GT, swap the arguments and do LT.
16426 To compute GE, do LT and invert the result.
16427 To compute LE, swap the arguments, do LT and invert the result.
16428 To compute NE, do EQ and invert the result.
16430 Therefore, the code below must handle only EQ and LT. */
16432 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16437 code
= swap_tree_comparison (code
);
16440 /* Note that it is safe to invert for real values here because we
16441 have already handled the one case that it matters. */
16444 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16447 code
= invert_tree_comparison (code
, false);
16450 /* Compute a result for LT or EQ if args permit;
16451 Otherwise return T. */
16452 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16454 if (code
== EQ_EXPR
)
16455 result
= tree_int_cst_equal (op0
, op1
);
16457 result
= tree_int_cst_lt (op0
, op1
);
16464 return constant_boolean_node (result
, type
);
16467 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16468 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16472 fold_build_cleanup_point_expr (tree type
, tree expr
)
16474 /* If the expression does not have side effects then we don't have to wrap
16475 it with a cleanup point expression. */
16476 if (!TREE_SIDE_EFFECTS (expr
))
16479 /* If the expression is a return, check to see if the expression inside the
16480 return has no side effects or the right hand side of the modify expression
16481 inside the return. If either don't have side effects set we don't need to
16482 wrap the expression in a cleanup point expression. Note we don't check the
16483 left hand side of the modify because it should always be a return decl. */
16484 if (TREE_CODE (expr
) == RETURN_EXPR
)
16486 tree op
= TREE_OPERAND (expr
, 0);
16487 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16489 op
= TREE_OPERAND (op
, 1);
16490 if (!TREE_SIDE_EFFECTS (op
))
16494 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16497 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16498 of an indirection through OP0, or NULL_TREE if no simplification is
16502 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16508 subtype
= TREE_TYPE (sub
);
16509 if (!POINTER_TYPE_P (subtype
))
16512 if (TREE_CODE (sub
) == ADDR_EXPR
)
16514 tree op
= TREE_OPERAND (sub
, 0);
16515 tree optype
= TREE_TYPE (op
);
16516 /* *&CONST_DECL -> to the value of the const decl. */
16517 if (TREE_CODE (op
) == CONST_DECL
)
16518 return DECL_INITIAL (op
);
16519 /* *&p => p; make sure to handle *&"str"[cst] here. */
16520 if (type
== optype
)
16522 tree fop
= fold_read_from_constant_string (op
);
16528 /* *(foo *)&fooarray => fooarray[0] */
16529 else if (TREE_CODE (optype
) == ARRAY_TYPE
16530 && type
== TREE_TYPE (optype
)
16531 && (!in_gimple_form
16532 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16534 tree type_domain
= TYPE_DOMAIN (optype
);
16535 tree min_val
= size_zero_node
;
16536 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16537 min_val
= TYPE_MIN_VALUE (type_domain
);
16539 && TREE_CODE (min_val
) != INTEGER_CST
)
16541 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16542 NULL_TREE
, NULL_TREE
);
16544 /* *(foo *)&complexfoo => __real__ complexfoo */
16545 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16546 && type
== TREE_TYPE (optype
))
16547 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16548 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16549 else if (TREE_CODE (optype
) == VECTOR_TYPE
16550 && type
== TREE_TYPE (optype
))
16552 tree part_width
= TYPE_SIZE (type
);
16553 tree index
= bitsize_int (0);
16554 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16558 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16559 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16561 tree op00
= TREE_OPERAND (sub
, 0);
16562 tree op01
= TREE_OPERAND (sub
, 1);
16565 if (TREE_CODE (op00
) == ADDR_EXPR
)
16568 op00
= TREE_OPERAND (op00
, 0);
16569 op00type
= TREE_TYPE (op00
);
16571 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16572 if (TREE_CODE (op00type
) == VECTOR_TYPE
16573 && type
== TREE_TYPE (op00type
))
16575 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16576 tree part_width
= TYPE_SIZE (type
);
16577 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16578 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16579 tree index
= bitsize_int (indexi
);
16581 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
16582 return fold_build3_loc (loc
,
16583 BIT_FIELD_REF
, type
, op00
,
16584 part_width
, index
);
16587 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16588 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16589 && type
== TREE_TYPE (op00type
))
16591 tree size
= TYPE_SIZE_UNIT (type
);
16592 if (tree_int_cst_equal (size
, op01
))
16593 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16595 /* ((foo *)&fooarray)[1] => fooarray[1] */
16596 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16597 && type
== TREE_TYPE (op00type
))
16599 tree type_domain
= TYPE_DOMAIN (op00type
);
16600 tree min_val
= size_zero_node
;
16601 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16602 min_val
= TYPE_MIN_VALUE (type_domain
);
16603 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16604 TYPE_SIZE_UNIT (type
));
16605 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16606 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16607 NULL_TREE
, NULL_TREE
);
16612 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16613 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16614 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16615 && (!in_gimple_form
16616 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16619 tree min_val
= size_zero_node
;
16620 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16621 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16622 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16623 min_val
= TYPE_MIN_VALUE (type_domain
);
16625 && TREE_CODE (min_val
) != INTEGER_CST
)
16627 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16634 /* Builds an expression for an indirection through T, simplifying some
16638 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16640 tree type
= TREE_TYPE (TREE_TYPE (t
));
16641 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16646 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16649 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16652 fold_indirect_ref_loc (location_t loc
, tree t
)
16654 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16662 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16663 whose result is ignored. The type of the returned tree need not be
16664 the same as the original expression. */
16667 fold_ignored_result (tree t
)
16669 if (!TREE_SIDE_EFFECTS (t
))
16670 return integer_zero_node
;
16673 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16676 t
= TREE_OPERAND (t
, 0);
16680 case tcc_comparison
:
16681 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16682 t
= TREE_OPERAND (t
, 0);
16683 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16684 t
= TREE_OPERAND (t
, 1);
16689 case tcc_expression
:
16690 switch (TREE_CODE (t
))
16692 case COMPOUND_EXPR
:
16693 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16695 t
= TREE_OPERAND (t
, 0);
16699 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16700 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16702 t
= TREE_OPERAND (t
, 0);
16715 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16718 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16720 tree div
= NULL_TREE
;
16725 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16726 have to do anything. Only do this when we are not given a const,
16727 because in that case, this check is more expensive than just
16729 if (TREE_CODE (value
) != INTEGER_CST
)
16731 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16733 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16737 /* If divisor is a power of two, simplify this to bit manipulation. */
16738 if (divisor
== (divisor
& -divisor
))
16740 if (TREE_CODE (value
) == INTEGER_CST
)
16742 wide_int val
= value
;
16745 if ((val
& (divisor
- 1)) == 0)
16748 overflow_p
= TREE_OVERFLOW (value
);
16749 val
&= ~(divisor
- 1);
16754 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16760 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16761 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16762 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16763 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16769 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16770 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16771 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16777 /* Likewise, but round down. */
16780 round_down_loc (location_t loc
, tree value
, int divisor
)
16782 tree div
= NULL_TREE
;
16784 gcc_assert (divisor
> 0);
16788 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16789 have to do anything. Only do this when we are not given a const,
16790 because in that case, this check is more expensive than just
16792 if (TREE_CODE (value
) != INTEGER_CST
)
16794 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16796 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16800 /* If divisor is a power of two, simplify this to bit manipulation. */
16801 if (divisor
== (divisor
& -divisor
))
16805 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16806 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16811 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16812 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16813 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16819 /* Returns the pointer to the base of the object addressed by EXP and
16820 extracts the information about the offset of the access, storing it
16821 to PBITPOS and POFFSET. */
16824 split_address_to_core_and_offset (tree exp
,
16825 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16828 enum machine_mode mode
;
16829 int unsignedp
, volatilep
;
16830 HOST_WIDE_INT bitsize
;
16831 location_t loc
= EXPR_LOCATION (exp
);
16833 if (TREE_CODE (exp
) == ADDR_EXPR
)
16835 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16836 poffset
, &mode
, &unsignedp
, &volatilep
,
16838 core
= build_fold_addr_expr_loc (loc
, core
);
16844 *poffset
= NULL_TREE
;
16850 /* Returns true if addresses of E1 and E2 differ by a constant, false
16851 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16854 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16857 HOST_WIDE_INT bitpos1
, bitpos2
;
16858 tree toffset1
, toffset2
, tdiff
, type
;
16860 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16861 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16863 if (bitpos1
% BITS_PER_UNIT
!= 0
16864 || bitpos2
% BITS_PER_UNIT
!= 0
16865 || !operand_equal_p (core1
, core2
, 0))
16868 if (toffset1
&& toffset2
)
16870 type
= TREE_TYPE (toffset1
);
16871 if (type
!= TREE_TYPE (toffset2
))
16872 toffset2
= fold_convert (type
, toffset2
);
16874 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16875 if (!cst_and_fits_in_hwi (tdiff
))
16878 *diff
= int_cst_value (tdiff
);
16880 else if (toffset1
|| toffset2
)
16882 /* If only one of the offsets is non-constant, the difference cannot
16889 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16893 /* Simplify the floating point expression EXP when the sign of the
16894 result is not significant. Return NULL_TREE if no simplification
16898 fold_strip_sign_ops (tree exp
)
16901 location_t loc
= EXPR_LOCATION (exp
);
16903 switch (TREE_CODE (exp
))
16907 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16908 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16912 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16914 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16915 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16916 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16917 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16918 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16919 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16922 case COMPOUND_EXPR
:
16923 arg0
= TREE_OPERAND (exp
, 0);
16924 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16926 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16930 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16931 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16933 return fold_build3_loc (loc
,
16934 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16935 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16936 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16941 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16944 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16945 /* Strip copysign function call, return the 1st argument. */
16946 arg0
= CALL_EXPR_ARG (exp
, 0);
16947 arg1
= CALL_EXPR_ARG (exp
, 1);
16948 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16951 /* Strip sign ops from the argument of "odd" math functions. */
16952 if (negate_mathfn_p (fcode
))
16954 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16956 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);