1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
65 #include "gimple-expr.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
74 /* Nonzero if we are folding constants inside an initializer; zero
76 int folding_initializer
= 0;
78 /* The following constants represent a bit based encoding of GCC's
79 comparison operators. This encoding simplifies transformations
80 on relational comparison operators, such as AND and OR. */
81 enum comparison_code
{
100 static bool negate_mathfn_p (enum built_in_function
);
101 static bool negate_expr_p (tree
);
102 static tree
negate_expr (tree
);
103 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
104 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
105 static tree
const_binop (enum tree_code
, tree
, tree
);
106 static enum comparison_code
comparison_to_compcode (enum tree_code
);
107 static enum tree_code
compcode_to_comparison (enum comparison_code
);
108 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
109 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
110 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
111 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
112 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
113 static tree
make_bit_field_ref (location_t
, tree
, tree
,
114 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
115 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
117 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
119 enum machine_mode
*, int *, int *,
121 static tree
sign_bit_p (tree
, const_tree
);
122 static int simple_operand_p (const_tree
);
123 static bool simple_operand_p_2 (tree
);
124 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
125 static tree
range_predecessor (tree
);
126 static tree
range_successor (tree
);
127 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
132 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
134 static tree
fold_binary_op_with_conditional_arg (location_t
,
135 enum tree_code
, tree
,
138 static tree
fold_mathfn_compare (location_t
,
139 enum built_in_function
, enum tree_code
,
141 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
143 static bool reorder_operands_p (const_tree
, const_tree
);
144 static tree
fold_negate_const (tree
, tree
);
145 static tree
fold_not_const (const_tree
, tree
);
146 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
147 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
149 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
150 Otherwise, return LOC. */
153 expr_location_or (tree t
, location_t loc
)
155 location_t tloc
= EXPR_LOCATION (t
);
156 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
159 /* Similar to protected_set_expr_location, but never modify x in place,
160 if location can and needs to be set, unshare it. */
163 protected_set_expr_location_unshare (tree x
, location_t loc
)
165 if (CAN_HAVE_LOCATION_P (x
)
166 && EXPR_LOCATION (x
) != loc
167 && !(TREE_CODE (x
) == SAVE_EXPR
168 || TREE_CODE (x
) == TARGET_EXPR
169 || TREE_CODE (x
) == BIND_EXPR
))
172 SET_EXPR_LOCATION (x
, loc
);
177 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
178 division and returns the quotient. Otherwise returns
182 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
186 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
188 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
202 static int fold_deferring_overflow_warnings
;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning
;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings
;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
235 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
240 gcc_assert (fold_deferring_overflow_warnings
> 0);
241 --fold_deferring_overflow_warnings
;
242 if (fold_deferring_overflow_warnings
> 0)
244 if (fold_deferred_overflow_warning
!= NULL
246 && code
< (int) fold_deferred_overflow_code
)
247 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
251 warnmsg
= fold_deferred_overflow_warning
;
252 fold_deferred_overflow_warning
= NULL
;
254 if (!issue
|| warnmsg
== NULL
)
257 if (gimple_no_warning_p (stmt
))
260 /* Use the smallest code level when deciding to issue the
262 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
263 code
= fold_deferred_overflow_code
;
265 if (!issue_strict_overflow_warning (code
))
269 locus
= input_location
;
271 locus
= gimple_location (stmt
);
272 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
275 /* Stop deferring overflow warnings, ignoring any deferred
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL
, 0);
284 /* Whether we are deferring overflow warnings. */
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings
> 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
296 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
298 if (fold_deferring_overflow_warnings
> 0)
300 if (fold_deferred_overflow_warning
== NULL
301 || wc
< fold_deferred_overflow_code
)
303 fold_deferred_overflow_warning
= gmsgid
;
304 fold_deferred_overflow_code
= wc
;
307 else if (issue_strict_overflow_warning (wc
))
308 warning (OPT_Wstrict_overflow
, gmsgid
);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
315 negate_mathfn_p (enum built_in_function code
)
319 CASE_FLT_FN (BUILT_IN_ASIN
):
320 CASE_FLT_FN (BUILT_IN_ASINH
):
321 CASE_FLT_FN (BUILT_IN_ATAN
):
322 CASE_FLT_FN (BUILT_IN_ATANH
):
323 CASE_FLT_FN (BUILT_IN_CASIN
):
324 CASE_FLT_FN (BUILT_IN_CASINH
):
325 CASE_FLT_FN (BUILT_IN_CATAN
):
326 CASE_FLT_FN (BUILT_IN_CATANH
):
327 CASE_FLT_FN (BUILT_IN_CBRT
):
328 CASE_FLT_FN (BUILT_IN_CPROJ
):
329 CASE_FLT_FN (BUILT_IN_CSIN
):
330 CASE_FLT_FN (BUILT_IN_CSINH
):
331 CASE_FLT_FN (BUILT_IN_CTAN
):
332 CASE_FLT_FN (BUILT_IN_CTANH
):
333 CASE_FLT_FN (BUILT_IN_ERF
):
334 CASE_FLT_FN (BUILT_IN_LLROUND
):
335 CASE_FLT_FN (BUILT_IN_LROUND
):
336 CASE_FLT_FN (BUILT_IN_ROUND
):
337 CASE_FLT_FN (BUILT_IN_SIN
):
338 CASE_FLT_FN (BUILT_IN_SINH
):
339 CASE_FLT_FN (BUILT_IN_TAN
):
340 CASE_FLT_FN (BUILT_IN_TANH
):
341 CASE_FLT_FN (BUILT_IN_TRUNC
):
344 CASE_FLT_FN (BUILT_IN_LLRINT
):
345 CASE_FLT_FN (BUILT_IN_LRINT
):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
347 CASE_FLT_FN (BUILT_IN_RINT
):
348 return !flag_rounding_math
;
356 /* Check whether we may negate an integer constant T without causing
360 may_negate_without_overflow_p (const_tree t
)
364 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
366 type
= TREE_TYPE (t
);
367 if (TYPE_UNSIGNED (type
))
370 return !wi::only_sign_bit_p (t
);
373 /* Determine whether an expression T can be cheaply negated using
374 the function negate_expr without introducing undefined overflow. */
377 negate_expr_p (tree t
)
384 type
= TREE_TYPE (t
);
387 switch (TREE_CODE (t
))
390 if (TYPE_OVERFLOW_WRAPS (type
))
393 /* Check that -CST will not overflow type. */
394 return may_negate_without_overflow_p (t
);
396 return (INTEGRAL_TYPE_P (type
)
397 && TYPE_OVERFLOW_WRAPS (type
));
404 /* We want to canonicalize to positive real constants. Pretend
405 that only negative ones can be easily negated. */
406 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
409 return negate_expr_p (TREE_REALPART (t
))
410 && negate_expr_p (TREE_IMAGPART (t
));
414 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
417 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
419 for (i
= 0; i
< count
; i
++)
420 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
427 return negate_expr_p (TREE_OPERAND (t
, 0))
428 && negate_expr_p (TREE_OPERAND (t
, 1));
431 return negate_expr_p (TREE_OPERAND (t
, 0));
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t
, 1))
439 && reorder_operands_p (TREE_OPERAND (t
, 0),
440 TREE_OPERAND (t
, 1)))
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t
, 0));
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
449 && reorder_operands_p (TREE_OPERAND (t
, 0),
450 TREE_OPERAND (t
, 1));
453 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
460 return negate_expr_p (TREE_OPERAND (t
, 1))
461 || negate_expr_p (TREE_OPERAND (t
, 0));
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
474 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
476 /* If overflow is undefined then we have to be careful because
477 we ask whether it's ok to associate the negate with the
478 division which is not ok for example for
479 -((a - b) / c) where (-(a - b)) / c may invoke undefined
480 overflow because of negating INT_MIN. So do not use
481 negate_expr_p here but open-code the two important cases. */
482 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
483 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
484 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
487 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
489 return negate_expr_p (TREE_OPERAND (t
, 1));
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type
) == REAL_TYPE
)
495 tree tem
= strip_float_extensions (t
);
497 return negate_expr_p (tem
);
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t
)))
504 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
509 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
511 tree op1
= TREE_OPERAND (t
, 1);
512 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
529 fold_negate_expr (location_t loc
, tree t
)
531 tree type
= TREE_TYPE (t
);
534 switch (TREE_CODE (t
))
536 /* Convert - (~A) to A + 1. */
538 if (INTEGRAL_TYPE_P (type
))
539 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
540 build_one_cst (type
));
544 tem
= fold_negate_const (t
, type
);
545 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
546 || !TYPE_OVERFLOW_TRAPS (type
))
551 tem
= fold_negate_const (t
, type
);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
558 tem
= fold_negate_const (t
, type
);
563 tree rpart
= negate_expr (TREE_REALPART (t
));
564 tree ipart
= negate_expr (TREE_IMAGPART (t
));
566 if ((TREE_CODE (rpart
) == REAL_CST
567 && TREE_CODE (ipart
) == REAL_CST
)
568 || (TREE_CODE (rpart
) == INTEGER_CST
569 && TREE_CODE (ipart
) == INTEGER_CST
))
570 return build_complex (type
, rpart
, ipart
);
576 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
577 tree
*elts
= XALLOCAVEC (tree
, count
);
579 for (i
= 0; i
< count
; i
++)
581 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
582 if (elts
[i
] == NULL_TREE
)
586 return build_vector (type
, elts
);
590 if (negate_expr_p (t
))
591 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
592 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
593 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
597 if (negate_expr_p (t
))
598 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
599 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
603 return TREE_OPERAND (t
, 0);
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
607 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t
, 1))
611 && reorder_operands_p (TREE_OPERAND (t
, 0),
612 TREE_OPERAND (t
, 1)))
614 tem
= negate_expr (TREE_OPERAND (t
, 1));
615 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
616 tem
, TREE_OPERAND (t
, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t
, 0)))
622 tem
= negate_expr (TREE_OPERAND (t
, 0));
623 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
624 tem
, TREE_OPERAND (t
, 1));
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
633 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
634 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
635 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
639 if (TYPE_UNSIGNED (type
))
645 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
647 tem
= TREE_OPERAND (t
, 1);
648 if (negate_expr_p (tem
))
649 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
650 TREE_OPERAND (t
, 0), negate_expr (tem
));
651 tem
= TREE_OPERAND (t
, 0);
652 if (negate_expr_p (tem
))
653 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
654 negate_expr (tem
), TREE_OPERAND (t
, 1));
661 /* In general we can't negate A / B, because if A is INT_MIN and
662 B is 1, we may turn this into INT_MIN / -1 which is undefined
663 and actually traps on some architectures. But if overflow is
664 undefined, we can negate, because - (INT_MIN / 1) is an
666 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
668 const char * const warnmsg
= G_("assuming signed overflow does not "
669 "occur when negating a division");
670 tem
= TREE_OPERAND (t
, 1);
671 if (negate_expr_p (tem
))
673 if (INTEGRAL_TYPE_P (type
)
674 && (TREE_CODE (tem
) != INTEGER_CST
675 || integer_onep (tem
)))
676 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
677 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
678 TREE_OPERAND (t
, 0), negate_expr (tem
));
680 /* If overflow is undefined then we have to be careful because
681 we ask whether it's ok to associate the negate with the
682 division which is not ok for example for
683 -((a - b) / c) where (-(a - b)) / c may invoke undefined
684 overflow because of negating INT_MIN. So do not use
685 negate_expr_p here but open-code the two important cases. */
686 tem
= TREE_OPERAND (t
, 0);
687 if ((INTEGRAL_TYPE_P (type
)
688 && (TREE_CODE (tem
) == NEGATE_EXPR
689 || (TREE_CODE (tem
) == INTEGER_CST
690 && may_negate_without_overflow_p (tem
))))
691 || !INTEGRAL_TYPE_P (type
))
692 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
693 negate_expr (tem
), TREE_OPERAND (t
, 1));
698 /* Convert -((double)float) into (double)(-float). */
699 if (TREE_CODE (type
) == REAL_TYPE
)
701 tem
= strip_float_extensions (t
);
702 if (tem
!= t
&& negate_expr_p (tem
))
703 return fold_convert_loc (loc
, type
, negate_expr (tem
));
708 /* Negate -f(x) as f(-x). */
709 if (negate_mathfn_p (builtin_mathfn_code (t
))
710 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
714 fndecl
= get_callee_fndecl (t
);
715 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
716 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
721 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
722 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
724 tree op1
= TREE_OPERAND (t
, 1);
725 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
727 tree ntype
= TYPE_UNSIGNED (type
)
728 ? signed_type_for (type
)
729 : unsigned_type_for (type
);
730 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
731 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
732 return fold_convert_loc (loc
, type
, temp
);
744 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
745 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
757 loc
= EXPR_LOCATION (t
);
758 type
= TREE_TYPE (t
);
761 tem
= fold_negate_expr (loc
, t
);
763 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
764 return fold_convert_loc (loc
, type
, tem
);
767 /* Split a tree IN into a constant, literal and variable parts that could be
768 combined with CODE to make IN. "constant" means an expression with
769 TREE_CONSTANT but that isn't an actual constant. CODE must be a
770 commutative arithmetic operation. Store the constant part into *CONP,
771 the literal in *LITP and return the variable part. If a part isn't
772 present, set it to null. If the tree does not decompose in this way,
773 return the entire tree as the variable part and the other parts as null.
775 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
776 case, we negate an operand that was subtracted. Except if it is a
777 literal for which we use *MINUS_LITP instead.
779 If NEGATE_P is true, we are negating all of IN, again except a literal
780 for which we use *MINUS_LITP instead.
782 If IN is itself a literal or constant, return it as appropriate.
784 Note that we do not guarantee that any of the three values will be the
785 same type as IN, but they will have the same signedness and mode. */
788 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
789 tree
*minus_litp
, int negate_p
)
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in
);
800 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
801 || TREE_CODE (in
) == FIXED_CST
)
803 else if (TREE_CODE (in
) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
811 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
813 tree op0
= TREE_OPERAND (in
, 0);
814 tree op1
= TREE_OPERAND (in
, 1);
815 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
816 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
818 /* First see if either of the operands is a literal, then a constant. */
819 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
820 || TREE_CODE (op0
) == FIXED_CST
)
821 *litp
= op0
, op0
= 0;
822 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
823 || TREE_CODE (op1
) == FIXED_CST
)
824 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
826 if (op0
!= 0 && TREE_CONSTANT (op0
))
827 *conp
= op0
, op0
= 0;
828 else if (op1
!= 0 && TREE_CONSTANT (op1
))
829 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
831 /* If we haven't dealt with either operand, this is not a case we can
832 decompose. Otherwise, VAR is either of the ones remaining, if any. */
833 if (op0
!= 0 && op1
!= 0)
838 var
= op1
, neg_var_p
= neg1_p
;
840 /* Now do any needed negations. */
842 *minus_litp
= *litp
, *litp
= 0;
844 *conp
= negate_expr (*conp
);
846 var
= negate_expr (var
);
848 else if (TREE_CODE (in
) == BIT_NOT_EXPR
849 && code
== PLUS_EXPR
)
851 /* -X - 1 is folded to ~X, undo that here. */
852 *minus_litp
= build_one_cst (TREE_TYPE (in
));
853 var
= negate_expr (TREE_OPERAND (in
, 0));
855 else if (TREE_CONSTANT (in
))
863 *minus_litp
= *litp
, *litp
= 0;
864 else if (*minus_litp
)
865 *litp
= *minus_litp
, *minus_litp
= 0;
866 *conp
= negate_expr (*conp
);
867 var
= negate_expr (var
);
873 /* Re-associate trees split by the above function. T1 and T2 are
874 either expressions to associate or null. Return the new
875 expression, if any. LOC is the location of the new expression. If
876 we build an operation, do it in TYPE and with CODE. */
879 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
886 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
887 try to fold this since we will have infinite recursion. But do
888 deal with any NEGATE_EXPRs. */
889 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
890 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
892 if (code
== PLUS_EXPR
)
894 if (TREE_CODE (t1
) == NEGATE_EXPR
)
895 return build2_loc (loc
, MINUS_EXPR
, type
,
896 fold_convert_loc (loc
, type
, t2
),
897 fold_convert_loc (loc
, type
,
898 TREE_OPERAND (t1
, 0)));
899 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
900 return build2_loc (loc
, MINUS_EXPR
, type
,
901 fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
,
903 TREE_OPERAND (t2
, 0)));
904 else if (integer_zerop (t2
))
905 return fold_convert_loc (loc
, type
, t1
);
907 else if (code
== MINUS_EXPR
)
909 if (integer_zerop (t2
))
910 return fold_convert_loc (loc
, type
, t1
);
913 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
914 fold_convert_loc (loc
, type
, t2
));
917 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
918 fold_convert_loc (loc
, type
, t2
));
921 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
922 for use in int_const_binop, size_binop and size_diffop. */
925 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
927 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
929 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
944 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
945 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
946 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
950 /* Combine two integer constants ARG1 and ARG2 under operation CODE
951 to produce a new constant. Return NULL_TREE if we don't know how
952 to evaluate CODE at compile-time. */
955 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
960 tree type
= TREE_TYPE (arg1
);
961 signop sign
= TYPE_SIGN (type
);
962 bool overflow
= false;
964 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
965 TYPE_SIGN (TREE_TYPE (parg2
)));
970 res
= wi::bit_or (arg1
, arg2
);
974 res
= wi::bit_xor (arg1
, arg2
);
978 res
= wi::bit_and (arg1
, arg2
);
983 if (wi::neg_p (arg2
))
986 if (code
== RSHIFT_EXPR
)
992 if (code
== RSHIFT_EXPR
)
993 /* It's unclear from the C standard whether shifts can overflow.
994 The following code ignores overflow; perhaps a C standard
995 interpretation ruling is needed. */
996 res
= wi::rshift (arg1
, arg2
, sign
);
998 res
= wi::lshift (arg1
, arg2
);
1003 if (wi::neg_p (arg2
))
1006 if (code
== RROTATE_EXPR
)
1007 code
= LROTATE_EXPR
;
1009 code
= RROTATE_EXPR
;
1012 if (code
== RROTATE_EXPR
)
1013 res
= wi::rrotate (arg1
, arg2
);
1015 res
= wi::lrotate (arg1
, arg2
);
1019 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1023 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1027 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1030 case MULT_HIGHPART_EXPR
:
1031 res
= wi::mul_high (arg1
, arg2
, sign
);
1034 case TRUNC_DIV_EXPR
:
1035 case EXACT_DIV_EXPR
:
1038 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1041 case FLOOR_DIV_EXPR
:
1044 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1050 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1053 case ROUND_DIV_EXPR
:
1056 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1059 case TRUNC_MOD_EXPR
:
1062 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1065 case FLOOR_MOD_EXPR
:
1068 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1074 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1077 case ROUND_MOD_EXPR
:
1080 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1084 res
= wi::min (arg1
, arg2
, sign
);
1088 res
= wi::max (arg1
, arg2
, sign
);
1095 t
= force_fit_type (type
, res
, overflowable
,
1096 (((sign
== SIGNED
|| overflowable
== -1)
1098 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1104 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1106 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1109 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1110 constant. We assume ARG1 and ARG2 have the same data type, or at least
1111 are the same kind of constant and the same machine mode. Return zero if
1112 combining the constants is not allowed in the current operating mode. */
1115 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1117 /* Sanity check for the recursive cases. */
1124 if (TREE_CODE (arg1
) == INTEGER_CST
)
1125 return int_const_binop (code
, arg1
, arg2
);
1127 if (TREE_CODE (arg1
) == REAL_CST
)
1129 enum machine_mode mode
;
1132 REAL_VALUE_TYPE value
;
1133 REAL_VALUE_TYPE result
;
1137 /* The following codes are handled by real_arithmetic. */
1152 d1
= TREE_REAL_CST (arg1
);
1153 d2
= TREE_REAL_CST (arg2
);
1155 type
= TREE_TYPE (arg1
);
1156 mode
= TYPE_MODE (type
);
1158 /* Don't perform operation if we honor signaling NaNs and
1159 either operand is a NaN. */
1160 if (HONOR_SNANS (mode
)
1161 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1164 /* Don't perform operation if it would raise a division
1165 by zero exception. */
1166 if (code
== RDIV_EXPR
1167 && REAL_VALUES_EQUAL (d2
, dconst0
)
1168 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1171 /* If either operand is a NaN, just return it. Otherwise, set up
1172 for floating-point trap; we return an overflow. */
1173 if (REAL_VALUE_ISNAN (d1
))
1175 else if (REAL_VALUE_ISNAN (d2
))
1178 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1179 real_convert (&result
, mode
, &value
);
1181 /* Don't constant fold this floating point operation if
1182 the result has overflowed and flag_trapping_math. */
1183 if (flag_trapping_math
1184 && MODE_HAS_INFINITIES (mode
)
1185 && REAL_VALUE_ISINF (result
)
1186 && !REAL_VALUE_ISINF (d1
)
1187 && !REAL_VALUE_ISINF (d2
))
1190 /* Don't constant fold this floating point operation if the
1191 result may dependent upon the run-time rounding mode and
1192 flag_rounding_math is set, or if GCC's software emulation
1193 is unable to accurately represent the result. */
1194 if ((flag_rounding_math
1195 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1196 && (inexact
|| !real_identical (&result
, &value
)))
1199 t
= build_real (type
, result
);
1201 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1205 if (TREE_CODE (arg1
) == FIXED_CST
)
1207 FIXED_VALUE_TYPE f1
;
1208 FIXED_VALUE_TYPE f2
;
1209 FIXED_VALUE_TYPE result
;
1214 /* The following codes are handled by fixed_arithmetic. */
1220 case TRUNC_DIV_EXPR
:
1221 f2
= TREE_FIXED_CST (arg2
);
1228 f2
.data
.high
= w2
.elt (1);
1229 f2
.data
.low
= w2
.elt (0);
1238 f1
= TREE_FIXED_CST (arg1
);
1239 type
= TREE_TYPE (arg1
);
1240 sat_p
= TYPE_SATURATING (type
);
1241 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1242 t
= build_fixed (type
, result
);
1243 /* Propagate overflow flags. */
1244 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1245 TREE_OVERFLOW (t
) = 1;
1249 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1251 tree type
= TREE_TYPE (arg1
);
1252 tree r1
= TREE_REALPART (arg1
);
1253 tree i1
= TREE_IMAGPART (arg1
);
1254 tree r2
= TREE_REALPART (arg2
);
1255 tree i2
= TREE_IMAGPART (arg2
);
1262 real
= const_binop (code
, r1
, r2
);
1263 imag
= const_binop (code
, i1
, i2
);
1267 if (COMPLEX_FLOAT_TYPE_P (type
))
1268 return do_mpc_arg2 (arg1
, arg2
, type
,
1269 /* do_nonfinite= */ folding_initializer
,
1272 real
= const_binop (MINUS_EXPR
,
1273 const_binop (MULT_EXPR
, r1
, r2
),
1274 const_binop (MULT_EXPR
, i1
, i2
));
1275 imag
= const_binop (PLUS_EXPR
,
1276 const_binop (MULT_EXPR
, r1
, i2
),
1277 const_binop (MULT_EXPR
, i1
, r2
));
1281 if (COMPLEX_FLOAT_TYPE_P (type
))
1282 return do_mpc_arg2 (arg1
, arg2
, type
,
1283 /* do_nonfinite= */ folding_initializer
,
1286 case TRUNC_DIV_EXPR
:
1288 case FLOOR_DIV_EXPR
:
1289 case ROUND_DIV_EXPR
:
1290 if (flag_complex_method
== 0)
1292 /* Keep this algorithm in sync with
1293 tree-complex.c:expand_complex_div_straight().
1295 Expand complex division to scalars, straightforward algorithm.
1296 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1300 = const_binop (PLUS_EXPR
,
1301 const_binop (MULT_EXPR
, r2
, r2
),
1302 const_binop (MULT_EXPR
, i2
, i2
));
1304 = const_binop (PLUS_EXPR
,
1305 const_binop (MULT_EXPR
, r1
, r2
),
1306 const_binop (MULT_EXPR
, i1
, i2
));
1308 = const_binop (MINUS_EXPR
,
1309 const_binop (MULT_EXPR
, i1
, r2
),
1310 const_binop (MULT_EXPR
, r1
, i2
));
1312 real
= const_binop (code
, t1
, magsquared
);
1313 imag
= const_binop (code
, t2
, magsquared
);
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_wide().
1320 Expand complex division to scalars, modified algorithm to minimize
1321 overflow with wide input ranges. */
1322 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1323 fold_abs_const (r2
, TREE_TYPE (type
)),
1324 fold_abs_const (i2
, TREE_TYPE (type
)));
1326 if (integer_nonzerop (compare
))
1328 /* In the TRUE branch, we compute
1330 div = (br * ratio) + bi;
1331 tr = (ar * ratio) + ai;
1332 ti = (ai * ratio) - ar;
1335 tree ratio
= const_binop (code
, r2
, i2
);
1336 tree div
= const_binop (PLUS_EXPR
, i2
,
1337 const_binop (MULT_EXPR
, r2
, ratio
));
1338 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1339 real
= const_binop (PLUS_EXPR
, real
, i1
);
1340 real
= const_binop (code
, real
, div
);
1342 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1343 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1344 imag
= const_binop (code
, imag
, div
);
1348 /* In the FALSE branch, we compute
1350 divisor = (d * ratio) + c;
1351 tr = (b * ratio) + a;
1352 ti = b - (a * ratio);
1355 tree ratio
= const_binop (code
, i2
, r2
);
1356 tree div
= const_binop (PLUS_EXPR
, r2
,
1357 const_binop (MULT_EXPR
, i2
, ratio
));
1359 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1360 real
= const_binop (PLUS_EXPR
, real
, r1
);
1361 real
= const_binop (code
, real
, div
);
1363 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1364 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1365 imag
= const_binop (code
, imag
, div
);
1375 return build_complex (type
, real
, imag
);
1378 if (TREE_CODE (arg1
) == VECTOR_CST
1379 && TREE_CODE (arg2
) == VECTOR_CST
)
1381 tree type
= TREE_TYPE (arg1
);
1382 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1383 tree
*elts
= XALLOCAVEC (tree
, count
);
1385 for (i
= 0; i
< count
; i
++)
1387 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1388 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1390 elts
[i
] = const_binop (code
, elem1
, elem2
);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if (elts
[i
] == NULL_TREE
)
1398 return build_vector (type
, elts
);
1401 /* Shifts allow a scalar offset for a vector. */
1402 if (TREE_CODE (arg1
) == VECTOR_CST
1403 && TREE_CODE (arg2
) == INTEGER_CST
)
1405 tree type
= TREE_TYPE (arg1
);
1406 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1407 tree
*elts
= XALLOCAVEC (tree
, count
);
1409 if (code
== VEC_LSHIFT_EXPR
1410 || code
== VEC_RSHIFT_EXPR
)
1412 if (!tree_fits_uhwi_p (arg2
))
1415 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1416 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1417 unsigned HOST_WIDE_INT innerc
1418 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1419 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1421 int offset
= shiftc
/ innerc
;
1422 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1423 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1424 for !BYTES_BIG_ENDIAN picks first vector element, but
1425 for BYTES_BIG_ENDIAN last element from the vector. */
1426 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1428 tree zero
= build_zero_cst (TREE_TYPE (type
));
1429 for (i
= 0; i
< count
; i
++)
1431 if (i
+ offset
< 0 || i
+ offset
>= count
)
1434 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1438 for (i
= 0; i
< count
; i
++)
1440 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1442 elts
[i
] = const_binop (code
, elem1
, arg2
);
1444 /* It is possible that const_binop cannot handle the given
1445 code and return NULL_TREE */
1446 if (elts
[i
] == NULL_TREE
)
1450 return build_vector (type
, elts
);
1455 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1456 indicates which particular sizetype to create. */
1459 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1461 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1464 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1465 is a tree code. The type of the result is taken from the operands.
1466 Both must be equivalent integer types, ala int_binop_types_match_p.
1467 If the operands are constant, so is the result. */
1470 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1472 tree type
= TREE_TYPE (arg0
);
1474 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1475 return error_mark_node
;
1477 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1480 /* Handle the special case of two integer constants faster. */
1481 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1483 /* And some specific cases even faster than that. */
1484 if (code
== PLUS_EXPR
)
1486 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1488 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1491 else if (code
== MINUS_EXPR
)
1493 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1496 else if (code
== MULT_EXPR
)
1498 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1502 /* Handle general case of two integer constants. For sizetype
1503 constant calculations we always want to know about overflow,
1504 even in the unsigned case. */
1505 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1508 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1511 /* Given two values, either both of sizetype or both of bitsizetype,
1512 compute the difference between the two values. Return the value
1513 in signed type corresponding to the type of the operands. */
1516 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1518 tree type
= TREE_TYPE (arg0
);
1521 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1524 /* If the type is already signed, just do the simple thing. */
1525 if (!TYPE_UNSIGNED (type
))
1526 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1528 if (type
== sizetype
)
1530 else if (type
== bitsizetype
)
1531 ctype
= sbitsizetype
;
1533 ctype
= signed_type_for (type
);
1535 /* If either operand is not a constant, do the conversions to the signed
1536 type and subtract. The hardware will do the right thing with any
1537 overflow in the subtraction. */
1538 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1539 return size_binop_loc (loc
, MINUS_EXPR
,
1540 fold_convert_loc (loc
, ctype
, arg0
),
1541 fold_convert_loc (loc
, ctype
, arg1
));
1543 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1544 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1545 overflow) and negate (which can't either). Special-case a result
1546 of zero while we're here. */
1547 if (tree_int_cst_equal (arg0
, arg1
))
1548 return build_int_cst (ctype
, 0);
1549 else if (tree_int_cst_lt (arg1
, arg0
))
1550 return fold_convert_loc (loc
, ctype
,
1551 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1553 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1554 fold_convert_loc (loc
, ctype
,
1555 size_binop_loc (loc
,
1560 /* A subroutine of fold_convert_const handling conversions of an
1561 INTEGER_CST to another integer type. */
1564 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1566 /* Given an integer constant, make new constant with new type,
1567 appropriately sign-extended or truncated. Use widest_int
1568 so that any extension is done according ARG1's type. */
1569 return force_fit_type (type
, wi::to_widest (arg1
),
1570 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1571 TREE_OVERFLOW (arg1
));
1574 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1575 to an integer type. */
1578 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1580 bool overflow
= false;
1583 /* The following code implements the floating point to integer
1584 conversion rules required by the Java Language Specification,
1585 that IEEE NaNs are mapped to zero and values that overflow
1586 the target precision saturate, i.e. values greater than
1587 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1588 are mapped to INT_MIN. These semantics are allowed by the
1589 C and C++ standards that simply state that the behavior of
1590 FP-to-integer conversion is unspecified upon overflow. */
1594 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1598 case FIX_TRUNC_EXPR
:
1599 real_trunc (&r
, VOIDmode
, &x
);
1606 /* If R is NaN, return zero and show we have an overflow. */
1607 if (REAL_VALUE_ISNAN (r
))
1610 val
= wi::zero (TYPE_PRECISION (type
));
1613 /* See if R is less than the lower bound or greater than the
1618 tree lt
= TYPE_MIN_VALUE (type
);
1619 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1620 if (REAL_VALUES_LESS (r
, l
))
1629 tree ut
= TYPE_MAX_VALUE (type
);
1632 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1633 if (REAL_VALUES_LESS (u
, r
))
1642 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1644 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1648 /* A subroutine of fold_convert_const handling conversions of a
1649 FIXED_CST to an integer type. */
1652 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1655 double_int temp
, temp_trunc
;
1658 /* Right shift FIXED_CST to temp by fbit. */
1659 temp
= TREE_FIXED_CST (arg1
).data
;
1660 mode
= TREE_FIXED_CST (arg1
).mode
;
1661 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1663 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1664 HOST_BITS_PER_DOUBLE_INT
,
1665 SIGNED_FIXED_POINT_MODE_P (mode
));
1667 /* Left shift temp to temp_trunc by fbit. */
1668 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1669 HOST_BITS_PER_DOUBLE_INT
,
1670 SIGNED_FIXED_POINT_MODE_P (mode
));
1674 temp
= double_int_zero
;
1675 temp_trunc
= double_int_zero
;
1678 /* If FIXED_CST is negative, we need to round the value toward 0.
1679 By checking if the fractional bits are not zero to add 1 to temp. */
1680 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1681 && temp_trunc
.is_negative ()
1682 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1683 temp
+= double_int_one
;
1685 /* Given a fixed-point constant, make new constant with new type,
1686 appropriately sign-extended or truncated. */
1687 t
= force_fit_type (type
, temp
, -1,
1688 (temp
.is_negative ()
1689 && (TYPE_UNSIGNED (type
)
1690 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1691 | TREE_OVERFLOW (arg1
));
1696 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1697 to another floating point type. */
1700 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1702 REAL_VALUE_TYPE value
;
1705 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1706 t
= build_real (type
, value
);
1708 /* If converting an infinity or NAN to a representation that doesn't
1709 have one, set the overflow bit so that we can produce some kind of
1710 error message at the appropriate point if necessary. It's not the
1711 most user-friendly message, but it's better than nothing. */
1712 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1713 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1714 TREE_OVERFLOW (t
) = 1;
1715 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1716 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1717 TREE_OVERFLOW (t
) = 1;
1718 /* Regular overflow, conversion produced an infinity in a mode that
1719 can't represent them. */
1720 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1721 && REAL_VALUE_ISINF (value
)
1722 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1723 TREE_OVERFLOW (t
) = 1;
1725 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1729 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1730 to a floating point type. */
1733 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1735 REAL_VALUE_TYPE value
;
1738 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1739 t
= build_real (type
, value
);
1741 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1745 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1746 to another fixed-point type. */
1749 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1751 FIXED_VALUE_TYPE value
;
1755 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1756 TYPE_SATURATING (type
));
1757 t
= build_fixed (type
, value
);
1759 /* Propagate overflow flags. */
1760 if (overflow_p
| TREE_OVERFLOW (arg1
))
1761 TREE_OVERFLOW (t
) = 1;
1765 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1766 to a fixed-point type. */
1769 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1771 FIXED_VALUE_TYPE value
;
1776 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1778 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1779 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1780 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1782 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1784 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1785 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1786 TYPE_SATURATING (type
));
1787 t
= build_fixed (type
, value
);
1789 /* Propagate overflow flags. */
1790 if (overflow_p
| TREE_OVERFLOW (arg1
))
1791 TREE_OVERFLOW (t
) = 1;
1795 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1796 to a fixed-point type. */
1799 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1801 FIXED_VALUE_TYPE value
;
1805 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1806 &TREE_REAL_CST (arg1
),
1807 TYPE_SATURATING (type
));
1808 t
= build_fixed (type
, value
);
1810 /* Propagate overflow flags. */
1811 if (overflow_p
| TREE_OVERFLOW (arg1
))
1812 TREE_OVERFLOW (t
) = 1;
1816 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1817 type TYPE. If no simplification can be done return NULL_TREE. */
1820 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1822 if (TREE_TYPE (arg1
) == type
)
1825 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1826 || TREE_CODE (type
) == OFFSET_TYPE
)
1828 if (TREE_CODE (arg1
) == INTEGER_CST
)
1829 return fold_convert_const_int_from_int (type
, arg1
);
1830 else if (TREE_CODE (arg1
) == REAL_CST
)
1831 return fold_convert_const_int_from_real (code
, type
, arg1
);
1832 else if (TREE_CODE (arg1
) == FIXED_CST
)
1833 return fold_convert_const_int_from_fixed (type
, arg1
);
1835 else if (TREE_CODE (type
) == REAL_TYPE
)
1837 if (TREE_CODE (arg1
) == INTEGER_CST
)
1838 return build_real_from_int_cst (type
, arg1
);
1839 else if (TREE_CODE (arg1
) == REAL_CST
)
1840 return fold_convert_const_real_from_real (type
, arg1
);
1841 else if (TREE_CODE (arg1
) == FIXED_CST
)
1842 return fold_convert_const_real_from_fixed (type
, arg1
);
1844 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1846 if (TREE_CODE (arg1
) == FIXED_CST
)
1847 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1848 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1849 return fold_convert_const_fixed_from_int (type
, arg1
);
1850 else if (TREE_CODE (arg1
) == REAL_CST
)
1851 return fold_convert_const_fixed_from_real (type
, arg1
);
1856 /* Construct a vector of zero elements of vector type TYPE. */
1859 build_zero_vector (tree type
)
1863 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1864 return build_vector_from_val (type
, t
);
1867 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1870 fold_convertible_p (const_tree type
, const_tree arg
)
1872 tree orig
= TREE_TYPE (arg
);
1877 if (TREE_CODE (arg
) == ERROR_MARK
1878 || TREE_CODE (type
) == ERROR_MARK
1879 || TREE_CODE (orig
) == ERROR_MARK
)
1882 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1885 switch (TREE_CODE (type
))
1887 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1888 case POINTER_TYPE
: case REFERENCE_TYPE
:
1890 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1891 || TREE_CODE (orig
) == OFFSET_TYPE
)
1893 return (TREE_CODE (orig
) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1897 case FIXED_POINT_TYPE
:
1901 return TREE_CODE (type
) == TREE_CODE (orig
);
1908 /* Convert expression ARG to type TYPE. Used by the middle-end for
1909 simple conversions in preference to calling the front-end's convert. */
1912 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1914 tree orig
= TREE_TYPE (arg
);
1920 if (TREE_CODE (arg
) == ERROR_MARK
1921 || TREE_CODE (type
) == ERROR_MARK
1922 || TREE_CODE (orig
) == ERROR_MARK
)
1923 return error_mark_node
;
1925 switch (TREE_CODE (type
))
1928 case REFERENCE_TYPE
:
1929 /* Handle conversions between pointers to different address spaces. */
1930 if (POINTER_TYPE_P (orig
)
1931 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1932 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1933 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1936 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1938 if (TREE_CODE (arg
) == INTEGER_CST
)
1940 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1941 if (tem
!= NULL_TREE
)
1944 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1945 || TREE_CODE (orig
) == OFFSET_TYPE
)
1946 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1947 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1948 return fold_convert_loc (loc
, type
,
1949 fold_build1_loc (loc
, REALPART_EXPR
,
1950 TREE_TYPE (orig
), arg
));
1951 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1952 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1953 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1956 if (TREE_CODE (arg
) == INTEGER_CST
)
1958 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1959 if (tem
!= NULL_TREE
)
1962 else if (TREE_CODE (arg
) == REAL_CST
)
1964 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1965 if (tem
!= NULL_TREE
)
1968 else if (TREE_CODE (arg
) == FIXED_CST
)
1970 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1971 if (tem
!= NULL_TREE
)
1975 switch (TREE_CODE (orig
))
1978 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1979 case POINTER_TYPE
: case REFERENCE_TYPE
:
1980 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1983 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1985 case FIXED_POINT_TYPE
:
1986 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1989 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1990 return fold_convert_loc (loc
, type
, tem
);
1996 case FIXED_POINT_TYPE
:
1997 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1998 || TREE_CODE (arg
) == REAL_CST
)
2000 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2001 if (tem
!= NULL_TREE
)
2002 goto fold_convert_exit
;
2005 switch (TREE_CODE (orig
))
2007 case FIXED_POINT_TYPE
:
2012 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2015 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2016 return fold_convert_loc (loc
, type
, tem
);
2023 switch (TREE_CODE (orig
))
2026 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2027 case POINTER_TYPE
: case REFERENCE_TYPE
:
2029 case FIXED_POINT_TYPE
:
2030 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2031 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2032 fold_convert_loc (loc
, TREE_TYPE (type
),
2033 integer_zero_node
));
2038 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2040 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2041 TREE_OPERAND (arg
, 0));
2042 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2043 TREE_OPERAND (arg
, 1));
2044 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2047 arg
= save_expr (arg
);
2048 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2049 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2050 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2051 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2052 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2060 if (integer_zerop (arg
))
2061 return build_zero_vector (type
);
2062 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2063 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2064 || TREE_CODE (orig
) == VECTOR_TYPE
);
2065 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2068 tem
= fold_ignored_result (arg
);
2069 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2072 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2073 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2077 protected_set_expr_location_unshare (tem
, loc
);
2081 /* Return false if expr can be assumed not to be an lvalue, true
2085 maybe_lvalue_p (const_tree x
)
2087 /* We only need to wrap lvalue tree codes. */
2088 switch (TREE_CODE (x
))
2101 case ARRAY_RANGE_REF
:
2107 case PREINCREMENT_EXPR
:
2108 case PREDECREMENT_EXPR
:
2110 case TRY_CATCH_EXPR
:
2111 case WITH_CLEANUP_EXPR
:
2120 /* Assume the worst for front-end tree codes. */
2121 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2129 /* Return an expr equal to X but certainly not valid as an lvalue. */
2132 non_lvalue_loc (location_t loc
, tree x
)
2134 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2139 if (! maybe_lvalue_p (x
))
2141 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2144 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2145 Zero means allow extended lvalues. */
2147 int pedantic_lvalues
;
2149 /* When pedantic, return an expr equal to X but certainly not valid as a
2150 pedantic lvalue. Otherwise, return X. */
2153 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2155 if (pedantic_lvalues
)
2156 return non_lvalue_loc (loc
, x
);
2158 return protected_set_expr_location_unshare (x
, loc
);
2161 /* Given a tree comparison code, return the code that is the logical inverse.
2162 It is generally not safe to do this for floating-point comparisons, except
2163 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2164 ERROR_MARK in this case. */
2167 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2169 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2170 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2180 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2182 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2184 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2186 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2200 return UNORDERED_EXPR
;
2201 case UNORDERED_EXPR
:
2202 return ORDERED_EXPR
;
2208 /* Similar, but return the comparison that results if the operands are
2209 swapped. This is safe for floating-point. */
2212 swap_tree_comparison (enum tree_code code
)
2219 case UNORDERED_EXPR
:
2245 /* Convert a comparison tree code from an enum tree_code representation
2246 into a compcode bit-based encoding. This function is the inverse of
2247 compcode_to_comparison. */
2249 static enum comparison_code
2250 comparison_to_compcode (enum tree_code code
)
2267 return COMPCODE_ORD
;
2268 case UNORDERED_EXPR
:
2269 return COMPCODE_UNORD
;
2271 return COMPCODE_UNLT
;
2273 return COMPCODE_UNEQ
;
2275 return COMPCODE_UNLE
;
2277 return COMPCODE_UNGT
;
2279 return COMPCODE_LTGT
;
2281 return COMPCODE_UNGE
;
2287 /* Convert a compcode bit-based encoding of a comparison operator back
2288 to GCC's enum tree_code representation. This function is the
2289 inverse of comparison_to_compcode. */
2291 static enum tree_code
2292 compcode_to_comparison (enum comparison_code code
)
2309 return ORDERED_EXPR
;
2310 case COMPCODE_UNORD
:
2311 return UNORDERED_EXPR
;
2329 /* Return a tree for the comparison which is the combination of
2330 doing the AND or OR (depending on CODE) of the two operations LCODE
2331 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2332 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2333 if this makes the transformation invalid. */
2336 combine_comparisons (location_t loc
,
2337 enum tree_code code
, enum tree_code lcode
,
2338 enum tree_code rcode
, tree truth_type
,
2339 tree ll_arg
, tree lr_arg
)
2341 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2342 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2343 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2348 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2349 compcode
= lcompcode
& rcompcode
;
2352 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2353 compcode
= lcompcode
| rcompcode
;
2362 /* Eliminate unordered comparisons, as well as LTGT and ORD
2363 which are not used unless the mode has NaNs. */
2364 compcode
&= ~COMPCODE_UNORD
;
2365 if (compcode
== COMPCODE_LTGT
)
2366 compcode
= COMPCODE_NE
;
2367 else if (compcode
== COMPCODE_ORD
)
2368 compcode
= COMPCODE_TRUE
;
2370 else if (flag_trapping_math
)
2372 /* Check that the original operation and the optimized ones will trap
2373 under the same condition. */
2374 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2375 && (lcompcode
!= COMPCODE_EQ
)
2376 && (lcompcode
!= COMPCODE_ORD
);
2377 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2378 && (rcompcode
!= COMPCODE_EQ
)
2379 && (rcompcode
!= COMPCODE_ORD
);
2380 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2381 && (compcode
!= COMPCODE_EQ
)
2382 && (compcode
!= COMPCODE_ORD
);
2384 /* In a short-circuited boolean expression the LHS might be
2385 such that the RHS, if evaluated, will never trap. For
2386 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2387 if neither x nor y is NaN. (This is a mixed blessing: for
2388 example, the expression above will never trap, hence
2389 optimizing it to x < y would be invalid). */
2390 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2391 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2394 /* If the comparison was short-circuited, and only the RHS
2395 trapped, we may now generate a spurious trap. */
2397 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2400 /* If we changed the conditions that cause a trap, we lose. */
2401 if ((ltrap
|| rtrap
) != trap
)
2405 if (compcode
== COMPCODE_TRUE
)
2406 return constant_boolean_node (true, truth_type
);
2407 else if (compcode
== COMPCODE_FALSE
)
2408 return constant_boolean_node (false, truth_type
);
2411 enum tree_code tcode
;
2413 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2414 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2418 /* Return nonzero if two operands (typically of the same tree node)
2419 are necessarily equal. If either argument has side-effects this
2420 function returns zero. FLAGS modifies behavior as follows:
2422 If OEP_ONLY_CONST is set, only return nonzero for constants.
2423 This function tests whether the operands are indistinguishable;
2424 it does not test whether they are equal using C's == operation.
2425 The distinction is important for IEEE floating point, because
2426 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2427 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2429 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2430 even though it may hold multiple values during a function.
2431 This is because a GCC tree node guarantees that nothing else is
2432 executed between the evaluation of its "operands" (which may often
2433 be evaluated in arbitrary order). Hence if the operands themselves
2434 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2435 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2436 unset means assuming isochronic (or instantaneous) tree equivalence.
2437 Unless comparing arbitrary expression trees, such as from different
2438 statements, this flag can usually be left unset.
2440 If OEP_PURE_SAME is set, then pure functions with identical arguments
2441 are considered the same. It is used when the caller has other ways
2442 to ensure that global memory is unchanged in between. */
2445 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2447 /* If either is ERROR_MARK, they aren't equal. */
2448 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2449 || TREE_TYPE (arg0
) == error_mark_node
2450 || TREE_TYPE (arg1
) == error_mark_node
)
2453 /* Similar, if either does not have a type (like a released SSA name),
2454 they aren't equal. */
2455 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2458 /* Check equality of integer constants before bailing out due to
2459 precision differences. */
2460 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2461 return tree_int_cst_equal (arg0
, arg1
);
2463 /* If both types don't have the same signedness, then we can't consider
2464 them equal. We must check this before the STRIP_NOPS calls
2465 because they may change the signedness of the arguments. As pointers
2466 strictly don't have a signedness, require either two pointers or
2467 two non-pointers as well. */
2468 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2469 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2472 /* We cannot consider pointers to different address space equal. */
2473 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2474 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2475 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2478 /* If both types don't have the same precision, then it is not safe
2480 if (element_precision (TREE_TYPE (arg0
))
2481 != element_precision (TREE_TYPE (arg1
)))
2487 /* In case both args are comparisons but with different comparison
2488 code, try to swap the comparison operands of one arg to produce
2489 a match and compare that variant. */
2490 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2491 && COMPARISON_CLASS_P (arg0
)
2492 && COMPARISON_CLASS_P (arg1
))
2494 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2496 if (TREE_CODE (arg0
) == swap_code
)
2497 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2498 TREE_OPERAND (arg1
, 1), flags
)
2499 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2500 TREE_OPERAND (arg1
, 0), flags
);
2503 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2504 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2505 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2508 /* This is needed for conversions and for COMPONENT_REF.
2509 Might as well play it safe and always test this. */
2510 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2511 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2512 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2515 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2516 We don't care about side effects in that case because the SAVE_EXPR
2517 takes care of that for us. In all other cases, two expressions are
2518 equal if they have no side effects. If we have two identical
2519 expressions with side effects that should be treated the same due
2520 to the only side effects being identical SAVE_EXPR's, that will
2521 be detected in the recursive calls below.
2522 If we are taking an invariant address of two identical objects
2523 they are necessarily equal as well. */
2524 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2525 && (TREE_CODE (arg0
) == SAVE_EXPR
2526 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2527 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2530 /* Next handle constant cases, those for which we can return 1 even
2531 if ONLY_CONST is set. */
2532 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2533 switch (TREE_CODE (arg0
))
2536 return tree_int_cst_equal (arg0
, arg1
);
2539 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2540 TREE_FIXED_CST (arg1
));
2543 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2544 TREE_REAL_CST (arg1
)))
2548 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2550 /* If we do not distinguish between signed and unsigned zero,
2551 consider them equal. */
2552 if (real_zerop (arg0
) && real_zerop (arg1
))
2561 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2564 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2566 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2567 VECTOR_CST_ELT (arg1
, i
), flags
))
2574 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2576 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2580 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2581 && ! memcmp (TREE_STRING_POINTER (arg0
),
2582 TREE_STRING_POINTER (arg1
),
2583 TREE_STRING_LENGTH (arg0
)));
2586 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2587 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2588 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2593 if (flags
& OEP_ONLY_CONST
)
2596 /* Define macros to test an operand from arg0 and arg1 for equality and a
2597 variant that allows null and views null as being different from any
2598 non-null value. In the latter case, if either is null, the both
2599 must be; otherwise, do the normal comparison. */
2600 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2601 TREE_OPERAND (arg1, N), flags)
2603 #define OP_SAME_WITH_NULL(N) \
2604 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2605 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2610 /* Two conversions are equal only if signedness and modes match. */
2611 switch (TREE_CODE (arg0
))
2614 case FIX_TRUNC_EXPR
:
2615 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2616 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2626 case tcc_comparison
:
2628 if (OP_SAME (0) && OP_SAME (1))
2631 /* For commutative ops, allow the other order. */
2632 return (commutative_tree_code (TREE_CODE (arg0
))
2633 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2634 TREE_OPERAND (arg1
, 1), flags
)
2635 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2636 TREE_OPERAND (arg1
, 0), flags
));
2639 /* If either of the pointer (or reference) expressions we are
2640 dereferencing contain a side effect, these cannot be equal,
2641 but their addresses can be. */
2642 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2643 && (TREE_SIDE_EFFECTS (arg0
)
2644 || TREE_SIDE_EFFECTS (arg1
)))
2647 switch (TREE_CODE (arg0
))
2650 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2657 case TARGET_MEM_REF
:
2658 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2659 /* Require equal extra operands and then fall through to MEM_REF
2660 handling of the two common operands. */
2661 if (!OP_SAME_WITH_NULL (2)
2662 || !OP_SAME_WITH_NULL (3)
2663 || !OP_SAME_WITH_NULL (4))
2667 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2668 /* Require equal access sizes, and similar pointer types.
2669 We can have incomplete types for array references of
2670 variable-sized arrays from the Fortran frontend
2671 though. Also verify the types are compatible. */
2672 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2673 || (TYPE_SIZE (TREE_TYPE (arg0
))
2674 && TYPE_SIZE (TREE_TYPE (arg1
))
2675 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2676 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2677 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2678 && alias_ptr_types_compatible_p
2679 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2680 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2681 && OP_SAME (0) && OP_SAME (1));
2684 case ARRAY_RANGE_REF
:
2685 /* Operands 2 and 3 may be null.
2686 Compare the array index by value if it is constant first as we
2687 may have different types but same value here. */
2690 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2691 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2692 TREE_OPERAND (arg1
, 1))
2694 && OP_SAME_WITH_NULL (2)
2695 && OP_SAME_WITH_NULL (3));
2698 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2699 may be NULL when we're called to compare MEM_EXPRs. */
2700 if (!OP_SAME_WITH_NULL (0)
2703 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2704 return OP_SAME_WITH_NULL (2);
2709 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2710 return OP_SAME (1) && OP_SAME (2);
2716 case tcc_expression
:
2717 switch (TREE_CODE (arg0
))
2720 case TRUTH_NOT_EXPR
:
2723 case TRUTH_ANDIF_EXPR
:
2724 case TRUTH_ORIF_EXPR
:
2725 return OP_SAME (0) && OP_SAME (1);
2728 case WIDEN_MULT_PLUS_EXPR
:
2729 case WIDEN_MULT_MINUS_EXPR
:
2732 /* The multiplcation operands are commutative. */
2735 case TRUTH_AND_EXPR
:
2737 case TRUTH_XOR_EXPR
:
2738 if (OP_SAME (0) && OP_SAME (1))
2741 /* Otherwise take into account this is a commutative operation. */
2742 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2743 TREE_OPERAND (arg1
, 1), flags
)
2744 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2745 TREE_OPERAND (arg1
, 0), flags
));
2750 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2757 switch (TREE_CODE (arg0
))
2760 /* If the CALL_EXPRs call different functions, then they
2761 clearly can not be equal. */
2762 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2767 unsigned int cef
= call_expr_flags (arg0
);
2768 if (flags
& OEP_PURE_SAME
)
2769 cef
&= ECF_CONST
| ECF_PURE
;
2776 /* Now see if all the arguments are the same. */
2778 const_call_expr_arg_iterator iter0
, iter1
;
2780 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2781 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2783 a0
= next_const_call_expr_arg (&iter0
),
2784 a1
= next_const_call_expr_arg (&iter1
))
2785 if (! operand_equal_p (a0
, a1
, flags
))
2788 /* If we get here and both argument lists are exhausted
2789 then the CALL_EXPRs are equal. */
2790 return ! (a0
|| a1
);
2796 case tcc_declaration
:
2797 /* Consider __builtin_sqrt equal to sqrt. */
2798 return (TREE_CODE (arg0
) == FUNCTION_DECL
2799 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2800 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2801 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2808 #undef OP_SAME_WITH_NULL
2811 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2812 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2814 When in doubt, return 0. */
2817 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2819 int unsignedp1
, unsignedpo
;
2820 tree primarg0
, primarg1
, primother
;
2821 unsigned int correct_width
;
2823 if (operand_equal_p (arg0
, arg1
, 0))
2826 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2827 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2830 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2831 and see if the inner values are the same. This removes any
2832 signedness comparison, which doesn't matter here. */
2833 primarg0
= arg0
, primarg1
= arg1
;
2834 STRIP_NOPS (primarg0
);
2835 STRIP_NOPS (primarg1
);
2836 if (operand_equal_p (primarg0
, primarg1
, 0))
2839 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2840 actual comparison operand, ARG0.
2842 First throw away any conversions to wider types
2843 already present in the operands. */
2845 primarg1
= get_narrower (arg1
, &unsignedp1
);
2846 primother
= get_narrower (other
, &unsignedpo
);
2848 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2849 if (unsignedp1
== unsignedpo
2850 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2851 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2853 tree type
= TREE_TYPE (arg0
);
2855 /* Make sure shorter operand is extended the right way
2856 to match the longer operand. */
2857 primarg1
= fold_convert (signed_or_unsigned_type_for
2858 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2860 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2867 /* See if ARG is an expression that is either a comparison or is performing
2868 arithmetic on comparisons. The comparisons must only be comparing
2869 two different values, which will be stored in *CVAL1 and *CVAL2; if
2870 they are nonzero it means that some operands have already been found.
2871 No variables may be used anywhere else in the expression except in the
2872 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2873 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2875 If this is true, return 1. Otherwise, return zero. */
2878 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2880 enum tree_code code
= TREE_CODE (arg
);
2881 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2883 /* We can handle some of the tcc_expression cases here. */
2884 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2886 else if (tclass
== tcc_expression
2887 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2888 || code
== COMPOUND_EXPR
))
2889 tclass
= tcc_binary
;
2891 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2892 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2894 /* If we've already found a CVAL1 or CVAL2, this expression is
2895 two complex to handle. */
2896 if (*cval1
|| *cval2
)
2906 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2909 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2910 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2911 cval1
, cval2
, save_p
));
2916 case tcc_expression
:
2917 if (code
== COND_EXPR
)
2918 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2919 cval1
, cval2
, save_p
)
2920 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2921 cval1
, cval2
, save_p
)
2922 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2923 cval1
, cval2
, save_p
));
2926 case tcc_comparison
:
2927 /* First see if we can handle the first operand, then the second. For
2928 the second operand, we know *CVAL1 can't be zero. It must be that
2929 one side of the comparison is each of the values; test for the
2930 case where this isn't true by failing if the two operands
2933 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2934 TREE_OPERAND (arg
, 1), 0))
2938 *cval1
= TREE_OPERAND (arg
, 0);
2939 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2941 else if (*cval2
== 0)
2942 *cval2
= TREE_OPERAND (arg
, 0);
2943 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2948 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2950 else if (*cval2
== 0)
2951 *cval2
= TREE_OPERAND (arg
, 1);
2952 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2964 /* ARG is a tree that is known to contain just arithmetic operations and
2965 comparisons. Evaluate the operations in the tree substituting NEW0 for
2966 any occurrence of OLD0 as an operand of a comparison and likewise for
2970 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2971 tree old1
, tree new1
)
2973 tree type
= TREE_TYPE (arg
);
2974 enum tree_code code
= TREE_CODE (arg
);
2975 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2977 /* We can handle some of the tcc_expression cases here. */
2978 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2980 else if (tclass
== tcc_expression
2981 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2982 tclass
= tcc_binary
;
2987 return fold_build1_loc (loc
, code
, type
,
2988 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2989 old0
, new0
, old1
, new1
));
2992 return fold_build2_loc (loc
, code
, type
,
2993 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2994 old0
, new0
, old1
, new1
),
2995 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2996 old0
, new0
, old1
, new1
));
2998 case tcc_expression
:
3002 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3006 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3010 return fold_build3_loc (loc
, code
, type
,
3011 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3012 old0
, new0
, old1
, new1
),
3013 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3014 old0
, new0
, old1
, new1
),
3015 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3016 old0
, new0
, old1
, new1
));
3020 /* Fall through - ??? */
3022 case tcc_comparison
:
3024 tree arg0
= TREE_OPERAND (arg
, 0);
3025 tree arg1
= TREE_OPERAND (arg
, 1);
3027 /* We need to check both for exact equality and tree equality. The
3028 former will be true if the operand has a side-effect. In that
3029 case, we know the operand occurred exactly once. */
3031 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3033 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3036 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3038 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3041 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3049 /* Return a tree for the case when the result of an expression is RESULT
3050 converted to TYPE and OMITTED was previously an operand of the expression
3051 but is now not needed (e.g., we folded OMITTED * 0).
3053 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3054 the conversion of RESULT to TYPE. */
3057 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3059 tree t
= fold_convert_loc (loc
, type
, result
);
3061 /* If the resulting operand is an empty statement, just return the omitted
3062 statement casted to void. */
3063 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3064 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3065 fold_ignored_result (omitted
));
3067 if (TREE_SIDE_EFFECTS (omitted
))
3068 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3069 fold_ignored_result (omitted
), t
);
3071 return non_lvalue_loc (loc
, t
);
3074 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3077 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3080 tree t
= fold_convert_loc (loc
, type
, result
);
3082 /* If the resulting operand is an empty statement, just return the omitted
3083 statement casted to void. */
3084 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3085 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3086 fold_ignored_result (omitted
));
3088 if (TREE_SIDE_EFFECTS (omitted
))
3089 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3090 fold_ignored_result (omitted
), t
);
3092 return pedantic_non_lvalue_loc (loc
, t
);
3095 /* Return a tree for the case when the result of an expression is RESULT
3096 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3097 of the expression but are now not needed.
3099 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3100 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3101 evaluated before OMITTED2. Otherwise, if neither has side effects,
3102 just do the conversion of RESULT to TYPE. */
3105 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3106 tree omitted1
, tree omitted2
)
3108 tree t
= fold_convert_loc (loc
, type
, result
);
3110 if (TREE_SIDE_EFFECTS (omitted2
))
3111 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3112 if (TREE_SIDE_EFFECTS (omitted1
))
3113 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3115 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3119 /* Return a simplified tree node for the truth-negation of ARG. This
3120 never alters ARG itself. We assume that ARG is an operation that
3121 returns a truth value (0 or 1).
3123 FIXME: one would think we would fold the result, but it causes
3124 problems with the dominator optimizer. */
3127 fold_truth_not_expr (location_t loc
, tree arg
)
3129 tree type
= TREE_TYPE (arg
);
3130 enum tree_code code
= TREE_CODE (arg
);
3131 location_t loc1
, loc2
;
3133 /* If this is a comparison, we can simply invert it, except for
3134 floating-point non-equality comparisons, in which case we just
3135 enclose a TRUTH_NOT_EXPR around what we have. */
3137 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3139 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3140 if (FLOAT_TYPE_P (op_type
)
3141 && flag_trapping_math
3142 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3143 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3146 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3147 if (code
== ERROR_MARK
)
3150 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3151 TREE_OPERAND (arg
, 1));
3157 return constant_boolean_node (integer_zerop (arg
), type
);
3159 case TRUTH_AND_EXPR
:
3160 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3161 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3162 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3163 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3164 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3167 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3168 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3169 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3170 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3171 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3173 case TRUTH_XOR_EXPR
:
3174 /* Here we can invert either operand. We invert the first operand
3175 unless the second operand is a TRUTH_NOT_EXPR in which case our
3176 result is the XOR of the first operand with the inside of the
3177 negation of the second operand. */
3179 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3180 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3181 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3183 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3184 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3185 TREE_OPERAND (arg
, 1));
3187 case TRUTH_ANDIF_EXPR
:
3188 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3189 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3190 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3191 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3192 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3194 case TRUTH_ORIF_EXPR
:
3195 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3196 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3197 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3198 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3199 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3201 case TRUTH_NOT_EXPR
:
3202 return TREE_OPERAND (arg
, 0);
3206 tree arg1
= TREE_OPERAND (arg
, 1);
3207 tree arg2
= TREE_OPERAND (arg
, 2);
3209 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3210 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3212 /* A COND_EXPR may have a throw as one operand, which
3213 then has void type. Just leave void operands
3215 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3216 VOID_TYPE_P (TREE_TYPE (arg1
))
3217 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3218 VOID_TYPE_P (TREE_TYPE (arg2
))
3219 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3223 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3224 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3225 TREE_OPERAND (arg
, 0),
3226 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3228 case NON_LVALUE_EXPR
:
3229 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3230 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3233 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3234 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3236 /* ... fall through ... */
3239 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3240 return build1_loc (loc
, TREE_CODE (arg
), type
,
3241 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3244 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3246 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3249 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3251 case CLEANUP_POINT_EXPR
:
3252 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3253 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3254 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3261 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3262 assume that ARG is an operation that returns a truth value (0 or 1
3263 for scalars, 0 or -1 for vectors). Return the folded expression if
3264 folding is successful. Otherwise, return NULL_TREE. */
3267 fold_invert_truthvalue (location_t loc
, tree arg
)
3269 tree type
= TREE_TYPE (arg
);
3270 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3276 /* Return a simplified tree node for the truth-negation of ARG. This
3277 never alters ARG itself. We assume that ARG is an operation that
3278 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3281 invert_truthvalue_loc (location_t loc
, tree arg
)
3283 if (TREE_CODE (arg
) == ERROR_MARK
)
3286 tree type
= TREE_TYPE (arg
);
3287 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3293 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3294 operands are another bit-wise operation with a common input. If so,
3295 distribute the bit operations to save an operation and possibly two if
3296 constants are involved. For example, convert
3297 (A | B) & (A | C) into A | (B & C)
3298 Further simplification will occur if B and C are constants.
3300 If this optimization cannot be done, 0 will be returned. */
3303 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3304 tree arg0
, tree arg1
)
3309 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3310 || TREE_CODE (arg0
) == code
3311 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3312 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3315 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3317 common
= TREE_OPERAND (arg0
, 0);
3318 left
= TREE_OPERAND (arg0
, 1);
3319 right
= TREE_OPERAND (arg1
, 1);
3321 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3323 common
= TREE_OPERAND (arg0
, 0);
3324 left
= TREE_OPERAND (arg0
, 1);
3325 right
= TREE_OPERAND (arg1
, 0);
3327 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3329 common
= TREE_OPERAND (arg0
, 1);
3330 left
= TREE_OPERAND (arg0
, 0);
3331 right
= TREE_OPERAND (arg1
, 1);
3333 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3335 common
= TREE_OPERAND (arg0
, 1);
3336 left
= TREE_OPERAND (arg0
, 0);
3337 right
= TREE_OPERAND (arg1
, 0);
3342 common
= fold_convert_loc (loc
, type
, common
);
3343 left
= fold_convert_loc (loc
, type
, left
);
3344 right
= fold_convert_loc (loc
, type
, right
);
3345 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3346 fold_build2_loc (loc
, code
, type
, left
, right
));
3349 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3350 with code CODE. This optimization is unsafe. */
3352 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3353 tree arg0
, tree arg1
)
3355 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3356 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3358 /* (A / C) +- (B / C) -> (A +- B) / C. */
3360 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3361 TREE_OPERAND (arg1
, 1), 0))
3362 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3363 fold_build2_loc (loc
, code
, type
,
3364 TREE_OPERAND (arg0
, 0),
3365 TREE_OPERAND (arg1
, 0)),
3366 TREE_OPERAND (arg0
, 1));
3368 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3369 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3370 TREE_OPERAND (arg1
, 0), 0)
3371 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3372 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3374 REAL_VALUE_TYPE r0
, r1
;
3375 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3376 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3378 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3380 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3381 real_arithmetic (&r0
, code
, &r0
, &r1
);
3382 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3383 TREE_OPERAND (arg0
, 0),
3384 build_real (type
, r0
));
3390 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3391 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3394 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3395 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3397 tree result
, bftype
;
3401 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3402 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3403 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3404 && tree_fits_shwi_p (size
)
3405 && tree_to_shwi (size
) == bitsize
)
3406 return fold_convert_loc (loc
, type
, inner
);
3410 if (TYPE_PRECISION (bftype
) != bitsize
3411 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3412 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3414 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3415 size_int (bitsize
), bitsize_int (bitpos
));
3418 result
= fold_convert_loc (loc
, type
, result
);
3423 /* Optimize a bit-field compare.
3425 There are two cases: First is a compare against a constant and the
3426 second is a comparison of two items where the fields are at the same
3427 bit position relative to the start of a chunk (byte, halfword, word)
3428 large enough to contain it. In these cases we can avoid the shift
3429 implicit in bitfield extractions.
3431 For constants, we emit a compare of the shifted constant with the
3432 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3433 compared. For two fields at the same position, we do the ANDs with the
3434 similar mask and compare the result of the ANDs.
3436 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3437 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3438 are the left and right operands of the comparison, respectively.
3440 If the optimization described above can be done, we return the resulting
3441 tree. Otherwise we return zero. */
3444 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3445 tree compare_type
, tree lhs
, tree rhs
)
3447 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3448 tree type
= TREE_TYPE (lhs
);
3450 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3451 enum machine_mode lmode
, rmode
, nmode
;
3452 int lunsignedp
, runsignedp
;
3453 int lvolatilep
= 0, rvolatilep
= 0;
3454 tree linner
, rinner
= NULL_TREE
;
3458 /* Get all the information about the extractions being done. If the bit size
3459 if the same as the size of the underlying object, we aren't doing an
3460 extraction at all and so can do nothing. We also don't want to
3461 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3462 then will no longer be able to replace it. */
3463 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3464 &lunsignedp
, &lvolatilep
, false);
3465 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3466 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3471 /* If this is not a constant, we can only do something if bit positions,
3472 sizes, and signedness are the same. */
3473 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3474 &runsignedp
, &rvolatilep
, false);
3476 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3477 || lunsignedp
!= runsignedp
|| offset
!= 0
3478 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3482 /* See if we can find a mode to refer to this field. We should be able to,
3483 but fail if we can't. */
3484 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3485 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3486 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3487 TYPE_ALIGN (TREE_TYPE (rinner
))),
3489 if (nmode
== VOIDmode
)
3492 /* Set signed and unsigned types of the precision of this mode for the
3494 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3496 /* Compute the bit position and size for the new reference and our offset
3497 within it. If the new reference is the same size as the original, we
3498 won't optimize anything, so return zero. */
3499 nbitsize
= GET_MODE_BITSIZE (nmode
);
3500 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3502 if (nbitsize
== lbitsize
)
3505 if (BYTES_BIG_ENDIAN
)
3506 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3508 /* Make the mask to be used against the extracted field. */
3509 mask
= build_int_cst_type (unsigned_type
, -1);
3510 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3511 mask
= const_binop (RSHIFT_EXPR
, mask
,
3512 size_int (nbitsize
- lbitsize
- lbitpos
));
3515 /* If not comparing with constant, just rework the comparison
3517 return fold_build2_loc (loc
, code
, compare_type
,
3518 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3519 make_bit_field_ref (loc
, linner
,
3524 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3525 make_bit_field_ref (loc
, rinner
,
3531 /* Otherwise, we are handling the constant case. See if the constant is too
3532 big for the field. Warn and return a tree of for 0 (false) if so. We do
3533 this not only for its own sake, but to avoid having to test for this
3534 error case below. If we didn't, we might generate wrong code.
3536 For unsigned fields, the constant shifted right by the field length should
3537 be all zero. For signed fields, the high-order bits should agree with
3542 if (wi::lrshift (rhs
, lbitsize
) != 0)
3544 warning (0, "comparison is always %d due to width of bit-field",
3546 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3551 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3552 if (tem
!= 0 && tem
!= -1)
3554 warning (0, "comparison is always %d due to width of bit-field",
3556 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3560 /* Single-bit compares should always be against zero. */
3561 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3563 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3564 rhs
= build_int_cst (type
, 0);
3567 /* Make a new bitfield reference, shift the constant over the
3568 appropriate number of bits and mask it with the computed mask
3569 (in case this was a signed field). If we changed it, make a new one. */
3570 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3572 rhs
= const_binop (BIT_AND_EXPR
,
3573 const_binop (LSHIFT_EXPR
,
3574 fold_convert_loc (loc
, unsigned_type
, rhs
),
3575 size_int (lbitpos
)),
3578 lhs
= build2_loc (loc
, code
, compare_type
,
3579 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3583 /* Subroutine for fold_truth_andor_1: decode a field reference.
3585 If EXP is a comparison reference, we return the innermost reference.
3587 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3588 set to the starting bit number.
3590 If the innermost field can be completely contained in a mode-sized
3591 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3593 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3594 otherwise it is not changed.
3596 *PUNSIGNEDP is set to the signedness of the field.
3598 *PMASK is set to the mask used. This is either contained in a
3599 BIT_AND_EXPR or derived from the width of the field.
3601 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3603 Return 0 if this is not a component reference or is one that we can't
3604 do anything with. */
3607 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3608 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3609 int *punsignedp
, int *pvolatilep
,
3610 tree
*pmask
, tree
*pand_mask
)
3612 tree outer_type
= 0;
3614 tree mask
, inner
, offset
;
3616 unsigned int precision
;
3618 /* All the optimizations using this function assume integer fields.
3619 There are problems with FP fields since the type_for_size call
3620 below can fail for, e.g., XFmode. */
3621 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3624 /* We are interested in the bare arrangement of bits, so strip everything
3625 that doesn't affect the machine mode. However, record the type of the
3626 outermost expression if it may matter below. */
3627 if (CONVERT_EXPR_P (exp
)
3628 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3629 outer_type
= TREE_TYPE (exp
);
3632 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3634 and_mask
= TREE_OPERAND (exp
, 1);
3635 exp
= TREE_OPERAND (exp
, 0);
3636 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3637 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3641 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3642 punsignedp
, pvolatilep
, false);
3643 if ((inner
== exp
&& and_mask
== 0)
3644 || *pbitsize
< 0 || offset
!= 0
3645 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3648 /* If the number of bits in the reference is the same as the bitsize of
3649 the outer type, then the outer type gives the signedness. Otherwise
3650 (in case of a small bitfield) the signedness is unchanged. */
3651 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3652 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3654 /* Compute the mask to access the bitfield. */
3655 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3656 precision
= TYPE_PRECISION (unsigned_type
);
3658 mask
= build_int_cst_type (unsigned_type
, -1);
3660 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3661 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3663 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3665 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3666 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3669 *pand_mask
= and_mask
;
3673 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3674 bit positions and MASK is SIGNED. */
3677 all_ones_mask_p (const_tree mask
, unsigned int size
)
3679 tree type
= TREE_TYPE (mask
);
3680 unsigned int precision
= TYPE_PRECISION (type
);
3682 /* If this function returns true when the type of the mask is
3683 UNSIGNED, then there will be errors. In particular see
3684 gcc.c-torture/execute/990326-1.c. There does not appear to be
3685 any documentation paper trail as to why this is so. But the pre
3686 wide-int worked with that restriction and it has been preserved
3688 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3691 return wi::mask (size
, false, precision
) == mask
;
3694 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3695 represents the sign bit of EXP's type. If EXP represents a sign
3696 or zero extension, also test VAL against the unextended type.
3697 The return value is the (sub)expression whose sign bit is VAL,
3698 or NULL_TREE otherwise. */
3701 sign_bit_p (tree exp
, const_tree val
)
3706 /* Tree EXP must have an integral type. */
3707 t
= TREE_TYPE (exp
);
3708 if (! INTEGRAL_TYPE_P (t
))
3711 /* Tree VAL must be an integer constant. */
3712 if (TREE_CODE (val
) != INTEGER_CST
3713 || TREE_OVERFLOW (val
))
3716 width
= TYPE_PRECISION (t
);
3717 if (wi::only_sign_bit_p (val
, width
))
3720 /* Handle extension from a narrower type. */
3721 if (TREE_CODE (exp
) == NOP_EXPR
3722 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3723 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3728 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3729 to be evaluated unconditionally. */
3732 simple_operand_p (const_tree exp
)
3734 /* Strip any conversions that don't change the machine mode. */
3737 return (CONSTANT_CLASS_P (exp
)
3738 || TREE_CODE (exp
) == SSA_NAME
3740 && ! TREE_ADDRESSABLE (exp
)
3741 && ! TREE_THIS_VOLATILE (exp
)
3742 && ! DECL_NONLOCAL (exp
)
3743 /* Don't regard global variables as simple. They may be
3744 allocated in ways unknown to the compiler (shared memory,
3745 #pragma weak, etc). */
3746 && ! TREE_PUBLIC (exp
)
3747 && ! DECL_EXTERNAL (exp
)
3748 /* Weakrefs are not safe to be read, since they can be NULL.
3749 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3750 have DECL_WEAK flag set. */
3751 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3752 /* Loading a static variable is unduly expensive, but global
3753 registers aren't expensive. */
3754 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3757 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3758 to be evaluated unconditionally.
3759 I addition to simple_operand_p, we assume that comparisons, conversions,
3760 and logic-not operations are simple, if their operands are simple, too. */
3763 simple_operand_p_2 (tree exp
)
3765 enum tree_code code
;
3767 if (TREE_SIDE_EFFECTS (exp
)
3768 || tree_could_trap_p (exp
))
3771 while (CONVERT_EXPR_P (exp
))
3772 exp
= TREE_OPERAND (exp
, 0);
3774 code
= TREE_CODE (exp
);
3776 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3777 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3778 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3780 if (code
== TRUTH_NOT_EXPR
)
3781 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3783 return simple_operand_p (exp
);
3787 /* The following functions are subroutines to fold_range_test and allow it to
3788 try to change a logical combination of comparisons into a range test.
3791 X == 2 || X == 3 || X == 4 || X == 5
3795 (unsigned) (X - 2) <= 3
3797 We describe each set of comparisons as being either inside or outside
3798 a range, using a variable named like IN_P, and then describe the
3799 range with a lower and upper bound. If one of the bounds is omitted,
3800 it represents either the highest or lowest value of the type.
3802 In the comments below, we represent a range by two numbers in brackets
3803 preceded by a "+" to designate being inside that range, or a "-" to
3804 designate being outside that range, so the condition can be inverted by
3805 flipping the prefix. An omitted bound is represented by a "-". For
3806 example, "- [-, 10]" means being outside the range starting at the lowest
3807 possible value and ending at 10, in other words, being greater than 10.
3808 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3811 We set up things so that the missing bounds are handled in a consistent
3812 manner so neither a missing bound nor "true" and "false" need to be
3813 handled using a special case. */
3815 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3816 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3817 and UPPER1_P are nonzero if the respective argument is an upper bound
3818 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3819 must be specified for a comparison. ARG1 will be converted to ARG0's
3820 type if both are specified. */
3823 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3824 tree arg1
, int upper1_p
)
3830 /* If neither arg represents infinity, do the normal operation.
3831 Else, if not a comparison, return infinity. Else handle the special
3832 comparison rules. Note that most of the cases below won't occur, but
3833 are handled for consistency. */
3835 if (arg0
!= 0 && arg1
!= 0)
3837 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3838 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3840 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3843 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3846 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3847 for neither. In real maths, we cannot assume open ended ranges are
3848 the same. But, this is computer arithmetic, where numbers are finite.
3849 We can therefore make the transformation of any unbounded range with
3850 the value Z, Z being greater than any representable number. This permits
3851 us to treat unbounded ranges as equal. */
3852 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3853 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3857 result
= sgn0
== sgn1
;
3860 result
= sgn0
!= sgn1
;
3863 result
= sgn0
< sgn1
;
3866 result
= sgn0
<= sgn1
;
3869 result
= sgn0
> sgn1
;
3872 result
= sgn0
>= sgn1
;
3878 return constant_boolean_node (result
, type
);
3881 /* Helper routine for make_range. Perform one step for it, return
3882 new expression if the loop should continue or NULL_TREE if it should
3886 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3887 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3888 bool *strict_overflow_p
)
3890 tree arg0_type
= TREE_TYPE (arg0
);
3891 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3892 int in_p
= *p_in_p
, n_in_p
;
3896 case TRUTH_NOT_EXPR
:
3897 /* We can only do something if the range is testing for zero. */
3898 if (low
== NULL_TREE
|| high
== NULL_TREE
3899 || ! integer_zerop (low
) || ! integer_zerop (high
))
3904 case EQ_EXPR
: case NE_EXPR
:
3905 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3906 /* We can only do something if the range is testing for zero
3907 and if the second operand is an integer constant. Note that
3908 saying something is "in" the range we make is done by
3909 complementing IN_P since it will set in the initial case of
3910 being not equal to zero; "out" is leaving it alone. */
3911 if (low
== NULL_TREE
|| high
== NULL_TREE
3912 || ! integer_zerop (low
) || ! integer_zerop (high
)
3913 || TREE_CODE (arg1
) != INTEGER_CST
)
3918 case NE_EXPR
: /* - [c, c] */
3921 case EQ_EXPR
: /* + [c, c] */
3922 in_p
= ! in_p
, low
= high
= arg1
;
3924 case GT_EXPR
: /* - [-, c] */
3925 low
= 0, high
= arg1
;
3927 case GE_EXPR
: /* + [c, -] */
3928 in_p
= ! in_p
, low
= arg1
, high
= 0;
3930 case LT_EXPR
: /* - [c, -] */
3931 low
= arg1
, high
= 0;
3933 case LE_EXPR
: /* + [-, c] */
3934 in_p
= ! in_p
, low
= 0, high
= arg1
;
3940 /* If this is an unsigned comparison, we also know that EXP is
3941 greater than or equal to zero. We base the range tests we make
3942 on that fact, so we record it here so we can parse existing
3943 range tests. We test arg0_type since often the return type
3944 of, e.g. EQ_EXPR, is boolean. */
3945 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3947 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3949 build_int_cst (arg0_type
, 0),
3953 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3955 /* If the high bound is missing, but we have a nonzero low
3956 bound, reverse the range so it goes from zero to the low bound
3958 if (high
== 0 && low
&& ! integer_zerop (low
))
3961 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3962 build_int_cst (TREE_TYPE (low
), 1), 0);
3963 low
= build_int_cst (arg0_type
, 0);
3973 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3974 low and high are non-NULL, then normalize will DTRT. */
3975 if (!TYPE_UNSIGNED (arg0_type
)
3976 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3978 if (low
== NULL_TREE
)
3979 low
= TYPE_MIN_VALUE (arg0_type
);
3980 if (high
== NULL_TREE
)
3981 high
= TYPE_MAX_VALUE (arg0_type
);
3984 /* (-x) IN [a,b] -> x in [-b, -a] */
3985 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3986 build_int_cst (exp_type
, 0),
3988 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3989 build_int_cst (exp_type
, 0),
3991 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3997 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3998 build_int_cst (exp_type
, 1));
4002 if (TREE_CODE (arg1
) != INTEGER_CST
)
4005 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4006 move a constant to the other side. */
4007 if (!TYPE_UNSIGNED (arg0_type
)
4008 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4011 /* If EXP is signed, any overflow in the computation is undefined,
4012 so we don't worry about it so long as our computations on
4013 the bounds don't overflow. For unsigned, overflow is defined
4014 and this is exactly the right thing. */
4015 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4016 arg0_type
, low
, 0, arg1
, 0);
4017 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4018 arg0_type
, high
, 1, arg1
, 0);
4019 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4020 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4023 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4024 *strict_overflow_p
= true;
4027 /* Check for an unsigned range which has wrapped around the maximum
4028 value thus making n_high < n_low, and normalize it. */
4029 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4031 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4032 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4033 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4034 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4036 /* If the range is of the form +/- [ x+1, x ], we won't
4037 be able to normalize it. But then, it represents the
4038 whole range or the empty set, so make it
4040 if (tree_int_cst_equal (n_low
, low
)
4041 && tree_int_cst_equal (n_high
, high
))
4047 low
= n_low
, high
= n_high
;
4055 case NON_LVALUE_EXPR
:
4056 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4059 if (! INTEGRAL_TYPE_P (arg0_type
)
4060 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4061 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4064 n_low
= low
, n_high
= high
;
4067 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4070 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4072 /* If we're converting arg0 from an unsigned type, to exp,
4073 a signed type, we will be doing the comparison as unsigned.
4074 The tests above have already verified that LOW and HIGH
4077 So we have to ensure that we will handle large unsigned
4078 values the same way that the current signed bounds treat
4081 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4085 /* For fixed-point modes, we need to pass the saturating flag
4086 as the 2nd parameter. */
4087 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4089 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4090 TYPE_SATURATING (arg0_type
));
4093 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4095 /* A range without an upper bound is, naturally, unbounded.
4096 Since convert would have cropped a very large value, use
4097 the max value for the destination type. */
4099 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4100 : TYPE_MAX_VALUE (arg0_type
);
4102 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4103 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4104 fold_convert_loc (loc
, arg0_type
,
4106 build_int_cst (arg0_type
, 1));
4108 /* If the low bound is specified, "and" the range with the
4109 range for which the original unsigned value will be
4113 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4114 1, fold_convert_loc (loc
, arg0_type
,
4119 in_p
= (n_in_p
== in_p
);
4123 /* Otherwise, "or" the range with the range of the input
4124 that will be interpreted as negative. */
4125 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4126 1, fold_convert_loc (loc
, arg0_type
,
4131 in_p
= (in_p
!= n_in_p
);
4145 /* Given EXP, a logical expression, set the range it is testing into
4146 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4147 actually being tested. *PLOW and *PHIGH will be made of the same
4148 type as the returned expression. If EXP is not a comparison, we
4149 will most likely not be returning a useful value and range. Set
4150 *STRICT_OVERFLOW_P to true if the return value is only valid
4151 because signed overflow is undefined; otherwise, do not change
4152 *STRICT_OVERFLOW_P. */
4155 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4156 bool *strict_overflow_p
)
4158 enum tree_code code
;
4159 tree arg0
, arg1
= NULL_TREE
;
4160 tree exp_type
, nexp
;
4163 location_t loc
= EXPR_LOCATION (exp
);
4165 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4166 and see if we can refine the range. Some of the cases below may not
4167 happen, but it doesn't seem worth worrying about this. We "continue"
4168 the outer loop when we've changed something; otherwise we "break"
4169 the switch, which will "break" the while. */
4172 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4176 code
= TREE_CODE (exp
);
4177 exp_type
= TREE_TYPE (exp
);
4180 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4182 if (TREE_OPERAND_LENGTH (exp
) > 0)
4183 arg0
= TREE_OPERAND (exp
, 0);
4184 if (TREE_CODE_CLASS (code
) == tcc_binary
4185 || TREE_CODE_CLASS (code
) == tcc_comparison
4186 || (TREE_CODE_CLASS (code
) == tcc_expression
4187 && TREE_OPERAND_LENGTH (exp
) > 1))
4188 arg1
= TREE_OPERAND (exp
, 1);
4190 if (arg0
== NULL_TREE
)
4193 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4194 &high
, &in_p
, strict_overflow_p
);
4195 if (nexp
== NULL_TREE
)
4200 /* If EXP is a constant, we can evaluate whether this is true or false. */
4201 if (TREE_CODE (exp
) == INTEGER_CST
)
4203 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4205 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4211 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4215 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4216 type, TYPE, return an expression to test if EXP is in (or out of, depending
4217 on IN_P) the range. Return 0 if the test couldn't be created. */
4220 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4221 tree low
, tree high
)
4223 tree etype
= TREE_TYPE (exp
), value
;
4225 #ifdef HAVE_canonicalize_funcptr_for_compare
4226 /* Disable this optimization for function pointer expressions
4227 on targets that require function pointer canonicalization. */
4228 if (HAVE_canonicalize_funcptr_for_compare
4229 && TREE_CODE (etype
) == POINTER_TYPE
4230 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4236 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4238 return invert_truthvalue_loc (loc
, value
);
4243 if (low
== 0 && high
== 0)
4244 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4247 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4248 fold_convert_loc (loc
, etype
, high
));
4251 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4252 fold_convert_loc (loc
, etype
, low
));
4254 if (operand_equal_p (low
, high
, 0))
4255 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4256 fold_convert_loc (loc
, etype
, low
));
4258 if (integer_zerop (low
))
4260 if (! TYPE_UNSIGNED (etype
))
4262 etype
= unsigned_type_for (etype
);
4263 high
= fold_convert_loc (loc
, etype
, high
);
4264 exp
= fold_convert_loc (loc
, etype
, exp
);
4266 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4269 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4270 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4272 int prec
= TYPE_PRECISION (etype
);
4274 if (wi::mask (prec
- 1, false, prec
) == high
)
4276 if (TYPE_UNSIGNED (etype
))
4278 tree signed_etype
= signed_type_for (etype
);
4279 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4281 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4283 etype
= signed_etype
;
4284 exp
= fold_convert_loc (loc
, etype
, exp
);
4286 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4287 build_int_cst (etype
, 0));
4291 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4292 This requires wrap-around arithmetics for the type of the expression.
4293 First make sure that arithmetics in this type is valid, then make sure
4294 that it wraps around. */
4295 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4296 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4297 TYPE_UNSIGNED (etype
));
4299 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4301 tree utype
, minv
, maxv
;
4303 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4304 for the type in question, as we rely on this here. */
4305 utype
= unsigned_type_for (etype
);
4306 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4307 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4308 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4309 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4311 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4318 high
= fold_convert_loc (loc
, etype
, high
);
4319 low
= fold_convert_loc (loc
, etype
, low
);
4320 exp
= fold_convert_loc (loc
, etype
, exp
);
4322 value
= const_binop (MINUS_EXPR
, high
, low
);
4325 if (POINTER_TYPE_P (etype
))
4327 if (value
!= 0 && !TREE_OVERFLOW (value
))
4329 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4330 return build_range_check (loc
, type
,
4331 fold_build_pointer_plus_loc (loc
, exp
, low
),
4332 1, build_int_cst (etype
, 0), value
);
4337 if (value
!= 0 && !TREE_OVERFLOW (value
))
4338 return build_range_check (loc
, type
,
4339 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4340 1, build_int_cst (etype
, 0), value
);
4345 /* Return the predecessor of VAL in its type, handling the infinite case. */
4348 range_predecessor (tree val
)
4350 tree type
= TREE_TYPE (val
);
4352 if (INTEGRAL_TYPE_P (type
)
4353 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4356 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4357 build_int_cst (TREE_TYPE (val
), 1), 0);
4360 /* Return the successor of VAL in its type, handling the infinite case. */
4363 range_successor (tree val
)
4365 tree type
= TREE_TYPE (val
);
4367 if (INTEGRAL_TYPE_P (type
)
4368 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4371 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4372 build_int_cst (TREE_TYPE (val
), 1), 0);
4375 /* Given two ranges, see if we can merge them into one. Return 1 if we
4376 can, 0 if we can't. Set the output range into the specified parameters. */
4379 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4380 tree high0
, int in1_p
, tree low1
, tree high1
)
4388 int lowequal
= ((low0
== 0 && low1
== 0)
4389 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4390 low0
, 0, low1
, 0)));
4391 int highequal
= ((high0
== 0 && high1
== 0)
4392 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4393 high0
, 1, high1
, 1)));
4395 /* Make range 0 be the range that starts first, or ends last if they
4396 start at the same value. Swap them if it isn't. */
4397 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4400 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4401 high1
, 1, high0
, 1))))
4403 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4404 tem
= low0
, low0
= low1
, low1
= tem
;
4405 tem
= high0
, high0
= high1
, high1
= tem
;
4408 /* Now flag two cases, whether the ranges are disjoint or whether the
4409 second range is totally subsumed in the first. Note that the tests
4410 below are simplified by the ones above. */
4411 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4412 high0
, 1, low1
, 0));
4413 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4414 high1
, 1, high0
, 1));
4416 /* We now have four cases, depending on whether we are including or
4417 excluding the two ranges. */
4420 /* If they don't overlap, the result is false. If the second range
4421 is a subset it is the result. Otherwise, the range is from the start
4422 of the second to the end of the first. */
4424 in_p
= 0, low
= high
= 0;
4426 in_p
= 1, low
= low1
, high
= high1
;
4428 in_p
= 1, low
= low1
, high
= high0
;
4431 else if (in0_p
&& ! in1_p
)
4433 /* If they don't overlap, the result is the first range. If they are
4434 equal, the result is false. If the second range is a subset of the
4435 first, and the ranges begin at the same place, we go from just after
4436 the end of the second range to the end of the first. If the second
4437 range is not a subset of the first, or if it is a subset and both
4438 ranges end at the same place, the range starts at the start of the
4439 first range and ends just before the second range.
4440 Otherwise, we can't describe this as a single range. */
4442 in_p
= 1, low
= low0
, high
= high0
;
4443 else if (lowequal
&& highequal
)
4444 in_p
= 0, low
= high
= 0;
4445 else if (subset
&& lowequal
)
4447 low
= range_successor (high1
);
4452 /* We are in the weird situation where high0 > high1 but
4453 high1 has no successor. Punt. */
4457 else if (! subset
|| highequal
)
4460 high
= range_predecessor (low1
);
4464 /* low0 < low1 but low1 has no predecessor. Punt. */
4472 else if (! in0_p
&& in1_p
)
4474 /* If they don't overlap, the result is the second range. If the second
4475 is a subset of the first, the result is false. Otherwise,
4476 the range starts just after the first range and ends at the
4477 end of the second. */
4479 in_p
= 1, low
= low1
, high
= high1
;
4480 else if (subset
|| highequal
)
4481 in_p
= 0, low
= high
= 0;
4484 low
= range_successor (high0
);
4489 /* high1 > high0 but high0 has no successor. Punt. */
4497 /* The case where we are excluding both ranges. Here the complex case
4498 is if they don't overlap. In that case, the only time we have a
4499 range is if they are adjacent. If the second is a subset of the
4500 first, the result is the first. Otherwise, the range to exclude
4501 starts at the beginning of the first range and ends at the end of the
4505 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4506 range_successor (high0
),
4508 in_p
= 0, low
= low0
, high
= high1
;
4511 /* Canonicalize - [min, x] into - [-, x]. */
4512 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4513 switch (TREE_CODE (TREE_TYPE (low0
)))
4516 if (TYPE_PRECISION (TREE_TYPE (low0
))
4517 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4521 if (tree_int_cst_equal (low0
,
4522 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4526 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4527 && integer_zerop (low0
))
4534 /* Canonicalize - [x, max] into - [x, -]. */
4535 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4536 switch (TREE_CODE (TREE_TYPE (high1
)))
4539 if (TYPE_PRECISION (TREE_TYPE (high1
))
4540 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4544 if (tree_int_cst_equal (high1
,
4545 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4549 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4550 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4552 build_int_cst (TREE_TYPE (high1
), 1),
4560 /* The ranges might be also adjacent between the maximum and
4561 minimum values of the given type. For
4562 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4563 return + [x + 1, y - 1]. */
4564 if (low0
== 0 && high1
== 0)
4566 low
= range_successor (high0
);
4567 high
= range_predecessor (low1
);
4568 if (low
== 0 || high
== 0)
4578 in_p
= 0, low
= low0
, high
= high0
;
4580 in_p
= 0, low
= low0
, high
= high1
;
4583 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4588 /* Subroutine of fold, looking inside expressions of the form
4589 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4590 of the COND_EXPR. This function is being used also to optimize
4591 A op B ? C : A, by reversing the comparison first.
4593 Return a folded expression whose code is not a COND_EXPR
4594 anymore, or NULL_TREE if no folding opportunity is found. */
4597 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4598 tree arg0
, tree arg1
, tree arg2
)
4600 enum tree_code comp_code
= TREE_CODE (arg0
);
4601 tree arg00
= TREE_OPERAND (arg0
, 0);
4602 tree arg01
= TREE_OPERAND (arg0
, 1);
4603 tree arg1_type
= TREE_TYPE (arg1
);
4609 /* If we have A op 0 ? A : -A, consider applying the following
4612 A == 0? A : -A same as -A
4613 A != 0? A : -A same as A
4614 A >= 0? A : -A same as abs (A)
4615 A > 0? A : -A same as abs (A)
4616 A <= 0? A : -A same as -abs (A)
4617 A < 0? A : -A same as -abs (A)
4619 None of these transformations work for modes with signed
4620 zeros. If A is +/-0, the first two transformations will
4621 change the sign of the result (from +0 to -0, or vice
4622 versa). The last four will fix the sign of the result,
4623 even though the original expressions could be positive or
4624 negative, depending on the sign of A.
4626 Note that all these transformations are correct if A is
4627 NaN, since the two alternatives (A and -A) are also NaNs. */
4628 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4629 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4630 ? real_zerop (arg01
)
4631 : integer_zerop (arg01
))
4632 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4633 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4634 /* In the case that A is of the form X-Y, '-A' (arg2) may
4635 have already been folded to Y-X, check for that. */
4636 || (TREE_CODE (arg1
) == MINUS_EXPR
4637 && TREE_CODE (arg2
) == MINUS_EXPR
4638 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4639 TREE_OPERAND (arg2
, 1), 0)
4640 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4641 TREE_OPERAND (arg2
, 0), 0))))
4646 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4647 return pedantic_non_lvalue_loc (loc
,
4648 fold_convert_loc (loc
, type
,
4649 negate_expr (tem
)));
4652 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4655 if (flag_trapping_math
)
4660 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4661 arg1
= fold_convert_loc (loc
, signed_type_for
4662 (TREE_TYPE (arg1
)), arg1
);
4663 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4664 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4667 if (flag_trapping_math
)
4671 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4672 arg1
= fold_convert_loc (loc
, signed_type_for
4673 (TREE_TYPE (arg1
)), arg1
);
4674 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4675 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4677 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4681 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4682 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4683 both transformations are correct when A is NaN: A != 0
4684 is then true, and A == 0 is false. */
4686 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4687 && integer_zerop (arg01
) && integer_zerop (arg2
))
4689 if (comp_code
== NE_EXPR
)
4690 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4691 else if (comp_code
== EQ_EXPR
)
4692 return build_zero_cst (type
);
4695 /* Try some transformations of A op B ? A : B.
4697 A == B? A : B same as B
4698 A != B? A : B same as A
4699 A >= B? A : B same as max (A, B)
4700 A > B? A : B same as max (B, A)
4701 A <= B? A : B same as min (A, B)
4702 A < B? A : B same as min (B, A)
4704 As above, these transformations don't work in the presence
4705 of signed zeros. For example, if A and B are zeros of
4706 opposite sign, the first two transformations will change
4707 the sign of the result. In the last four, the original
4708 expressions give different results for (A=+0, B=-0) and
4709 (A=-0, B=+0), but the transformed expressions do not.
4711 The first two transformations are correct if either A or B
4712 is a NaN. In the first transformation, the condition will
4713 be false, and B will indeed be chosen. In the case of the
4714 second transformation, the condition A != B will be true,
4715 and A will be chosen.
4717 The conversions to max() and min() are not correct if B is
4718 a number and A is not. The conditions in the original
4719 expressions will be false, so all four give B. The min()
4720 and max() versions would give a NaN instead. */
4721 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4722 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4723 /* Avoid these transformations if the COND_EXPR may be used
4724 as an lvalue in the C++ front-end. PR c++/19199. */
4726 || VECTOR_TYPE_P (type
)
4727 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4728 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4729 || ! maybe_lvalue_p (arg1
)
4730 || ! maybe_lvalue_p (arg2
)))
4732 tree comp_op0
= arg00
;
4733 tree comp_op1
= arg01
;
4734 tree comp_type
= TREE_TYPE (comp_op0
);
4736 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4737 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4747 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4749 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4754 /* In C++ a ?: expression can be an lvalue, so put the
4755 operand which will be used if they are equal first
4756 so that we can convert this back to the
4757 corresponding COND_EXPR. */
4758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4760 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4761 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4762 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4763 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4764 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4765 comp_op1
, comp_op0
);
4766 return pedantic_non_lvalue_loc (loc
,
4767 fold_convert_loc (loc
, type
, tem
));
4774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4776 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4777 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4778 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4779 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4780 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4781 comp_op1
, comp_op0
);
4782 return pedantic_non_lvalue_loc (loc
,
4783 fold_convert_loc (loc
, type
, tem
));
4787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4788 return pedantic_non_lvalue_loc (loc
,
4789 fold_convert_loc (loc
, type
, arg2
));
4792 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4793 return pedantic_non_lvalue_loc (loc
,
4794 fold_convert_loc (loc
, type
, arg1
));
4797 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4802 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4803 we might still be able to simplify this. For example,
4804 if C1 is one less or one more than C2, this might have started
4805 out as a MIN or MAX and been transformed by this function.
4806 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4808 if (INTEGRAL_TYPE_P (type
)
4809 && TREE_CODE (arg01
) == INTEGER_CST
4810 && TREE_CODE (arg2
) == INTEGER_CST
)
4814 if (TREE_CODE (arg1
) == INTEGER_CST
)
4816 /* We can replace A with C1 in this case. */
4817 arg1
= fold_convert_loc (loc
, type
, arg01
);
4818 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4821 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4822 MIN_EXPR, to preserve the signedness of the comparison. */
4823 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4825 && operand_equal_p (arg01
,
4826 const_binop (PLUS_EXPR
, arg2
,
4827 build_int_cst (type
, 1)),
4830 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4831 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4833 return pedantic_non_lvalue_loc (loc
,
4834 fold_convert_loc (loc
, type
, tem
));
4839 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4841 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4843 && operand_equal_p (arg01
,
4844 const_binop (MINUS_EXPR
, arg2
,
4845 build_int_cst (type
, 1)),
4848 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4849 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4851 return pedantic_non_lvalue_loc (loc
,
4852 fold_convert_loc (loc
, type
, tem
));
4857 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4858 MAX_EXPR, to preserve the signedness of the comparison. */
4859 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4861 && operand_equal_p (arg01
,
4862 const_binop (MINUS_EXPR
, arg2
,
4863 build_int_cst (type
, 1)),
4866 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4867 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4869 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4874 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4875 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4877 && operand_equal_p (arg01
,
4878 const_binop (PLUS_EXPR
, arg2
,
4879 build_int_cst (type
, 1)),
4882 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4883 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4885 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4899 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4900 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4901 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4905 /* EXP is some logical combination of boolean tests. See if we can
4906 merge it into some range test. Return the new tree if so. */
4909 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4912 int or_op
= (code
== TRUTH_ORIF_EXPR
4913 || code
== TRUTH_OR_EXPR
);
4914 int in0_p
, in1_p
, in_p
;
4915 tree low0
, low1
, low
, high0
, high1
, high
;
4916 bool strict_overflow_p
= false;
4918 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4919 "when simplifying range test");
4921 if (!INTEGRAL_TYPE_P (type
))
4924 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4925 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4927 /* If this is an OR operation, invert both sides; we will invert
4928 again at the end. */
4930 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4932 /* If both expressions are the same, if we can merge the ranges, and we
4933 can build the range test, return it or it inverted. If one of the
4934 ranges is always true or always false, consider it to be the same
4935 expression as the other. */
4936 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4937 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4939 && 0 != (tem
= (build_range_check (loc
, type
,
4941 : rhs
!= 0 ? rhs
: integer_zero_node
,
4944 if (strict_overflow_p
)
4945 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4946 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4949 /* On machines where the branch cost is expensive, if this is a
4950 short-circuited branch and the underlying object on both sides
4951 is the same, make a non-short-circuit operation. */
4952 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4953 && lhs
!= 0 && rhs
!= 0
4954 && (code
== TRUTH_ANDIF_EXPR
4955 || code
== TRUTH_ORIF_EXPR
)
4956 && operand_equal_p (lhs
, rhs
, 0))
4958 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4959 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4960 which cases we can't do this. */
4961 if (simple_operand_p (lhs
))
4962 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4963 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4966 else if (!lang_hooks
.decls
.global_bindings_p ()
4967 && !CONTAINS_PLACEHOLDER_P (lhs
))
4969 tree common
= save_expr (lhs
);
4971 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4972 or_op
? ! in0_p
: in0_p
,
4974 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4975 or_op
? ! in1_p
: in1_p
,
4978 if (strict_overflow_p
)
4979 fold_overflow_warning (warnmsg
,
4980 WARN_STRICT_OVERFLOW_COMPARISON
);
4981 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4982 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4991 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4992 bit value. Arrange things so the extra bits will be set to zero if and
4993 only if C is signed-extended to its full width. If MASK is nonzero,
4994 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4997 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4999 tree type
= TREE_TYPE (c
);
5000 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5003 if (p
== modesize
|| unsignedp
)
5006 /* We work by getting just the sign bit into the low-order bit, then
5007 into the high-order bit, then sign-extend. We then XOR that value
5009 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5011 /* We must use a signed type in order to get an arithmetic right shift.
5012 However, we must also avoid introducing accidental overflows, so that
5013 a subsequent call to integer_zerop will work. Hence we must
5014 do the type conversion here. At this point, the constant is either
5015 zero or one, and the conversion to a signed type can never overflow.
5016 We could get an overflow if this conversion is done anywhere else. */
5017 if (TYPE_UNSIGNED (type
))
5018 temp
= fold_convert (signed_type_for (type
), temp
);
5020 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5021 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5023 temp
= const_binop (BIT_AND_EXPR
, temp
,
5024 fold_convert (TREE_TYPE (c
), mask
));
5025 /* If necessary, convert the type back to match the type of C. */
5026 if (TYPE_UNSIGNED (type
))
5027 temp
= fold_convert (type
, temp
);
5029 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5032 /* For an expression that has the form
5036 we can drop one of the inner expressions and simplify to
5040 LOC is the location of the resulting expression. OP is the inner
5041 logical operation; the left-hand side in the examples above, while CMPOP
5042 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5043 removing a condition that guards another, as in
5044 (A != NULL && A->...) || A == NULL
5045 which we must not transform. If RHS_ONLY is true, only eliminate the
5046 right-most operand of the inner logical operation. */
5049 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5052 tree type
= TREE_TYPE (cmpop
);
5053 enum tree_code code
= TREE_CODE (cmpop
);
5054 enum tree_code truthop_code
= TREE_CODE (op
);
5055 tree lhs
= TREE_OPERAND (op
, 0);
5056 tree rhs
= TREE_OPERAND (op
, 1);
5057 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5058 enum tree_code rhs_code
= TREE_CODE (rhs
);
5059 enum tree_code lhs_code
= TREE_CODE (lhs
);
5060 enum tree_code inv_code
;
5062 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5065 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5068 if (rhs_code
== truthop_code
)
5070 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5071 if (newrhs
!= NULL_TREE
)
5074 rhs_code
= TREE_CODE (rhs
);
5077 if (lhs_code
== truthop_code
&& !rhs_only
)
5079 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5080 if (newlhs
!= NULL_TREE
)
5083 lhs_code
= TREE_CODE (lhs
);
5087 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5088 if (inv_code
== rhs_code
5089 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5090 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5092 if (!rhs_only
&& inv_code
== lhs_code
5093 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5094 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5096 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5097 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5102 /* Find ways of folding logical expressions of LHS and RHS:
5103 Try to merge two comparisons to the same innermost item.
5104 Look for range tests like "ch >= '0' && ch <= '9'".
5105 Look for combinations of simple terms on machines with expensive branches
5106 and evaluate the RHS unconditionally.
5108 For example, if we have p->a == 2 && p->b == 4 and we can make an
5109 object large enough to span both A and B, we can do this with a comparison
5110 against the object ANDed with the a mask.
5112 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5113 operations to do this with one comparison.
5115 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5116 function and the one above.
5118 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5119 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5121 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5124 We return the simplified tree or 0 if no optimization is possible. */
5127 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5130 /* If this is the "or" of two comparisons, we can do something if
5131 the comparisons are NE_EXPR. If this is the "and", we can do something
5132 if the comparisons are EQ_EXPR. I.e.,
5133 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5135 WANTED_CODE is this operation code. For single bit fields, we can
5136 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5137 comparison for one-bit fields. */
5139 enum tree_code wanted_code
;
5140 enum tree_code lcode
, rcode
;
5141 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5142 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5143 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5144 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5145 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5146 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5147 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5148 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5149 enum machine_mode lnmode
, rnmode
;
5150 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5151 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5152 tree l_const
, r_const
;
5153 tree lntype
, rntype
, result
;
5154 HOST_WIDE_INT first_bit
, end_bit
;
5157 /* Start by getting the comparison codes. Fail if anything is volatile.
5158 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5159 it were surrounded with a NE_EXPR. */
5161 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5164 lcode
= TREE_CODE (lhs
);
5165 rcode
= TREE_CODE (rhs
);
5167 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5169 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5170 build_int_cst (TREE_TYPE (lhs
), 0));
5174 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5176 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5177 build_int_cst (TREE_TYPE (rhs
), 0));
5181 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5182 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5185 ll_arg
= TREE_OPERAND (lhs
, 0);
5186 lr_arg
= TREE_OPERAND (lhs
, 1);
5187 rl_arg
= TREE_OPERAND (rhs
, 0);
5188 rr_arg
= TREE_OPERAND (rhs
, 1);
5190 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5191 if (simple_operand_p (ll_arg
)
5192 && simple_operand_p (lr_arg
))
5194 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5195 && operand_equal_p (lr_arg
, rr_arg
, 0))
5197 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5198 truth_type
, ll_arg
, lr_arg
);
5202 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5203 && operand_equal_p (lr_arg
, rl_arg
, 0))
5205 result
= combine_comparisons (loc
, code
, lcode
,
5206 swap_tree_comparison (rcode
),
5207 truth_type
, ll_arg
, lr_arg
);
5213 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5214 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5216 /* If the RHS can be evaluated unconditionally and its operands are
5217 simple, it wins to evaluate the RHS unconditionally on machines
5218 with expensive branches. In this case, this isn't a comparison
5219 that can be merged. */
5221 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5223 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5224 && simple_operand_p (rl_arg
)
5225 && simple_operand_p (rr_arg
))
5227 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5228 if (code
== TRUTH_OR_EXPR
5229 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5230 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5231 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5232 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5233 return build2_loc (loc
, NE_EXPR
, truth_type
,
5234 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5236 build_int_cst (TREE_TYPE (ll_arg
), 0));
5238 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5239 if (code
== TRUTH_AND_EXPR
5240 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5241 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5242 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5244 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5245 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5247 build_int_cst (TREE_TYPE (ll_arg
), 0));
5250 /* See if the comparisons can be merged. Then get all the parameters for
5253 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5254 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5258 ll_inner
= decode_field_reference (loc
, ll_arg
,
5259 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5260 &ll_unsignedp
, &volatilep
, &ll_mask
,
5262 lr_inner
= decode_field_reference (loc
, lr_arg
,
5263 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5264 &lr_unsignedp
, &volatilep
, &lr_mask
,
5266 rl_inner
= decode_field_reference (loc
, rl_arg
,
5267 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5268 &rl_unsignedp
, &volatilep
, &rl_mask
,
5270 rr_inner
= decode_field_reference (loc
, rr_arg
,
5271 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5272 &rr_unsignedp
, &volatilep
, &rr_mask
,
5275 /* It must be true that the inner operation on the lhs of each
5276 comparison must be the same if we are to be able to do anything.
5277 Then see if we have constants. If not, the same must be true for
5279 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5280 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5283 if (TREE_CODE (lr_arg
) == INTEGER_CST
5284 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5285 l_const
= lr_arg
, r_const
= rr_arg
;
5286 else if (lr_inner
== 0 || rr_inner
== 0
5287 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5290 l_const
= r_const
= 0;
5292 /* If either comparison code is not correct for our logical operation,
5293 fail. However, we can convert a one-bit comparison against zero into
5294 the opposite comparison against that bit being set in the field. */
5296 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5297 if (lcode
!= wanted_code
)
5299 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5301 /* Make the left operand unsigned, since we are only interested
5302 in the value of one bit. Otherwise we are doing the wrong
5311 /* This is analogous to the code for l_const above. */
5312 if (rcode
!= wanted_code
)
5314 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5323 /* See if we can find a mode that contains both fields being compared on
5324 the left. If we can't, fail. Otherwise, update all constants and masks
5325 to be relative to a field of that size. */
5326 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5327 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5328 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5329 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5331 if (lnmode
== VOIDmode
)
5334 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5335 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5336 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5337 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5339 if (BYTES_BIG_ENDIAN
)
5341 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5342 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5345 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5346 size_int (xll_bitpos
));
5347 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5348 size_int (xrl_bitpos
));
5352 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5353 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5354 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5355 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5356 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5359 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5361 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5366 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5367 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5368 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5369 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5370 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5373 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5375 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5379 /* If the right sides are not constant, do the same for it. Also,
5380 disallow this optimization if a size or signedness mismatch occurs
5381 between the left and right sides. */
5384 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5385 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5386 /* Make sure the two fields on the right
5387 correspond to the left without being swapped. */
5388 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5391 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5392 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5393 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5394 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5396 if (rnmode
== VOIDmode
)
5399 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5400 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5401 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5402 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5404 if (BYTES_BIG_ENDIAN
)
5406 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5407 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5410 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5412 size_int (xlr_bitpos
));
5413 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5415 size_int (xrr_bitpos
));
5417 /* Make a mask that corresponds to both fields being compared.
5418 Do this for both items being compared. If the operands are the
5419 same size and the bits being compared are in the same position
5420 then we can do this by masking both and comparing the masked
5422 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5423 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5424 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5426 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5427 ll_unsignedp
|| rl_unsignedp
);
5428 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5429 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5431 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5432 lr_unsignedp
|| rr_unsignedp
);
5433 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5434 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5436 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5439 /* There is still another way we can do something: If both pairs of
5440 fields being compared are adjacent, we may be able to make a wider
5441 field containing them both.
5443 Note that we still must mask the lhs/rhs expressions. Furthermore,
5444 the mask must be shifted to account for the shift done by
5445 make_bit_field_ref. */
5446 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5447 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5448 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5449 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5453 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5454 ll_bitsize
+ rl_bitsize
,
5455 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5456 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5457 lr_bitsize
+ rr_bitsize
,
5458 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5460 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5461 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5462 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5463 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5465 /* Convert to the smaller type before masking out unwanted bits. */
5467 if (lntype
!= rntype
)
5469 if (lnbitsize
> rnbitsize
)
5471 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5472 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5475 else if (lnbitsize
< rnbitsize
)
5477 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5478 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5483 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5484 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5486 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5487 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5489 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5495 /* Handle the case of comparisons with constants. If there is something in
5496 common between the masks, those bits of the constants must be the same.
5497 If not, the condition is always false. Test for this to avoid generating
5498 incorrect code below. */
5499 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5500 if (! integer_zerop (result
)
5501 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5502 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5504 if (wanted_code
== NE_EXPR
)
5506 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5507 return constant_boolean_node (true, truth_type
);
5511 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5512 return constant_boolean_node (false, truth_type
);
5516 /* Construct the expression we will return. First get the component
5517 reference we will make. Unless the mask is all ones the width of
5518 that field, perform the mask operation. Then compare with the
5520 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5521 ll_unsignedp
|| rl_unsignedp
);
5523 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5524 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5525 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5527 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5528 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5531 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5535 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5539 enum tree_code op_code
;
5542 int consts_equal
, consts_lt
;
5545 STRIP_SIGN_NOPS (arg0
);
5547 op_code
= TREE_CODE (arg0
);
5548 minmax_const
= TREE_OPERAND (arg0
, 1);
5549 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5550 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5551 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5552 inner
= TREE_OPERAND (arg0
, 0);
5554 /* If something does not permit us to optimize, return the original tree. */
5555 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5556 || TREE_CODE (comp_const
) != INTEGER_CST
5557 || TREE_OVERFLOW (comp_const
)
5558 || TREE_CODE (minmax_const
) != INTEGER_CST
5559 || TREE_OVERFLOW (minmax_const
))
5562 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5563 and GT_EXPR, doing the rest with recursive calls using logical
5567 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5570 = optimize_minmax_comparison (loc
,
5571 invert_tree_comparison (code
, false),
5574 return invert_truthvalue_loc (loc
, tem
);
5580 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5581 optimize_minmax_comparison
5582 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5583 optimize_minmax_comparison
5584 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5587 if (op_code
== MAX_EXPR
&& consts_equal
)
5588 /* MAX (X, 0) == 0 -> X <= 0 */
5589 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5591 else if (op_code
== MAX_EXPR
&& consts_lt
)
5592 /* MAX (X, 0) == 5 -> X == 5 */
5593 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5595 else if (op_code
== MAX_EXPR
)
5596 /* MAX (X, 0) == -1 -> false */
5597 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5599 else if (consts_equal
)
5600 /* MIN (X, 0) == 0 -> X >= 0 */
5601 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5604 /* MIN (X, 0) == 5 -> false */
5605 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5608 /* MIN (X, 0) == -1 -> X == -1 */
5609 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5612 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5613 /* MAX (X, 0) > 0 -> X > 0
5614 MAX (X, 0) > 5 -> X > 5 */
5615 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5617 else if (op_code
== MAX_EXPR
)
5618 /* MAX (X, 0) > -1 -> true */
5619 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5621 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5622 /* MIN (X, 0) > 0 -> false
5623 MIN (X, 0) > 5 -> false */
5624 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5627 /* MIN (X, 0) > -1 -> X > -1 */
5628 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5635 /* T is an integer expression that is being multiplied, divided, or taken a
5636 modulus (CODE says which and what kind of divide or modulus) by a
5637 constant C. See if we can eliminate that operation by folding it with
5638 other operations already in T. WIDE_TYPE, if non-null, is a type that
5639 should be used for the computation if wider than our type.
5641 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5642 (X * 2) + (Y * 4). We must, however, be assured that either the original
5643 expression would not overflow or that overflow is undefined for the type
5644 in the language in question.
5646 If we return a non-null expression, it is an equivalent form of the
5647 original computation, but need not be in the original type.
5649 We set *STRICT_OVERFLOW_P to true if the return values depends on
5650 signed overflow being undefined. Otherwise we do not change
5651 *STRICT_OVERFLOW_P. */
5654 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5655 bool *strict_overflow_p
)
5657 /* To avoid exponential search depth, refuse to allow recursion past
5658 three levels. Beyond that (1) it's highly unlikely that we'll find
5659 something interesting and (2) we've probably processed it before
5660 when we built the inner expression. */
5669 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5676 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5677 bool *strict_overflow_p
)
5679 tree type
= TREE_TYPE (t
);
5680 enum tree_code tcode
= TREE_CODE (t
);
5681 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5682 > GET_MODE_SIZE (TYPE_MODE (type
)))
5683 ? wide_type
: type
);
5685 int same_p
= tcode
== code
;
5686 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5687 bool sub_strict_overflow_p
;
5689 /* Don't deal with constants of zero here; they confuse the code below. */
5690 if (integer_zerop (c
))
5693 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5694 op0
= TREE_OPERAND (t
, 0);
5696 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5697 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5699 /* Note that we need not handle conditional operations here since fold
5700 already handles those cases. So just do arithmetic here. */
5704 /* For a constant, we can always simplify if we are a multiply
5705 or (for divide and modulus) if it is a multiple of our constant. */
5706 if (code
== MULT_EXPR
5707 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5708 return const_binop (code
, fold_convert (ctype
, t
),
5709 fold_convert (ctype
, c
));
5712 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5713 /* If op0 is an expression ... */
5714 if ((COMPARISON_CLASS_P (op0
)
5715 || UNARY_CLASS_P (op0
)
5716 || BINARY_CLASS_P (op0
)
5717 || VL_EXP_CLASS_P (op0
)
5718 || EXPRESSION_CLASS_P (op0
))
5719 /* ... and has wrapping overflow, and its type is smaller
5720 than ctype, then we cannot pass through as widening. */
5721 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5722 && (TYPE_PRECISION (ctype
)
5723 > TYPE_PRECISION (TREE_TYPE (op0
))))
5724 /* ... or this is a truncation (t is narrower than op0),
5725 then we cannot pass through this narrowing. */
5726 || (TYPE_PRECISION (type
)
5727 < TYPE_PRECISION (TREE_TYPE (op0
)))
5728 /* ... or signedness changes for division or modulus,
5729 then we cannot pass through this conversion. */
5730 || (code
!= MULT_EXPR
5731 && (TYPE_UNSIGNED (ctype
)
5732 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5733 /* ... or has undefined overflow while the converted to
5734 type has not, we cannot do the operation in the inner type
5735 as that would introduce undefined overflow. */
5736 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5737 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5740 /* Pass the constant down and see if we can make a simplification. If
5741 we can, replace this expression with the inner simplification for
5742 possible later conversion to our or some other type. */
5743 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5744 && TREE_CODE (t2
) == INTEGER_CST
5745 && !TREE_OVERFLOW (t2
)
5746 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5748 ? ctype
: NULL_TREE
,
5749 strict_overflow_p
))))
5754 /* If widening the type changes it from signed to unsigned, then we
5755 must avoid building ABS_EXPR itself as unsigned. */
5756 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5758 tree cstype
= (*signed_type_for
) (ctype
);
5759 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5762 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5763 return fold_convert (ctype
, t1
);
5767 /* If the constant is negative, we cannot simplify this. */
5768 if (tree_int_cst_sgn (c
) == -1)
5772 /* For division and modulus, type can't be unsigned, as e.g.
5773 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5774 For signed types, even with wrapping overflow, this is fine. */
5775 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5777 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5779 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5782 case MIN_EXPR
: case MAX_EXPR
:
5783 /* If widening the type changes the signedness, then we can't perform
5784 this optimization as that changes the result. */
5785 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5788 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5789 sub_strict_overflow_p
= false;
5790 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5791 &sub_strict_overflow_p
)) != 0
5792 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5793 &sub_strict_overflow_p
)) != 0)
5795 if (tree_int_cst_sgn (c
) < 0)
5796 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5797 if (sub_strict_overflow_p
)
5798 *strict_overflow_p
= true;
5799 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5800 fold_convert (ctype
, t2
));
5804 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5805 /* If the second operand is constant, this is a multiplication
5806 or floor division, by a power of two, so we can treat it that
5807 way unless the multiplier or divisor overflows. Signed
5808 left-shift overflow is implementation-defined rather than
5809 undefined in C90, so do not convert signed left shift into
5811 if (TREE_CODE (op1
) == INTEGER_CST
5812 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5813 /* const_binop may not detect overflow correctly,
5814 so check for it explicitly here. */
5815 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5816 && 0 != (t1
= fold_convert (ctype
,
5817 const_binop (LSHIFT_EXPR
,
5820 && !TREE_OVERFLOW (t1
))
5821 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5822 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5824 fold_convert (ctype
, op0
),
5826 c
, code
, wide_type
, strict_overflow_p
);
5829 case PLUS_EXPR
: case MINUS_EXPR
:
5830 /* See if we can eliminate the operation on both sides. If we can, we
5831 can return a new PLUS or MINUS. If we can't, the only remaining
5832 cases where we can do anything are if the second operand is a
5834 sub_strict_overflow_p
= false;
5835 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5836 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5837 if (t1
!= 0 && t2
!= 0
5838 && (code
== MULT_EXPR
5839 /* If not multiplication, we can only do this if both operands
5840 are divisible by c. */
5841 || (multiple_of_p (ctype
, op0
, c
)
5842 && multiple_of_p (ctype
, op1
, c
))))
5844 if (sub_strict_overflow_p
)
5845 *strict_overflow_p
= true;
5846 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5847 fold_convert (ctype
, t2
));
5850 /* If this was a subtraction, negate OP1 and set it to be an addition.
5851 This simplifies the logic below. */
5852 if (tcode
== MINUS_EXPR
)
5854 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5855 /* If OP1 was not easily negatable, the constant may be OP0. */
5856 if (TREE_CODE (op0
) == INTEGER_CST
)
5867 if (TREE_CODE (op1
) != INTEGER_CST
)
5870 /* If either OP1 or C are negative, this optimization is not safe for
5871 some of the division and remainder types while for others we need
5872 to change the code. */
5873 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5875 if (code
== CEIL_DIV_EXPR
)
5876 code
= FLOOR_DIV_EXPR
;
5877 else if (code
== FLOOR_DIV_EXPR
)
5878 code
= CEIL_DIV_EXPR
;
5879 else if (code
!= MULT_EXPR
5880 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5884 /* If it's a multiply or a division/modulus operation of a multiple
5885 of our constant, do the operation and verify it doesn't overflow. */
5886 if (code
== MULT_EXPR
5887 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5889 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5890 fold_convert (ctype
, c
));
5891 /* We allow the constant to overflow with wrapping semantics. */
5893 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5899 /* If we have an unsigned type, we cannot widen the operation since it
5900 will change the result if the original computation overflowed. */
5901 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5904 /* If we were able to eliminate our operation from the first side,
5905 apply our operation to the second side and reform the PLUS. */
5906 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5907 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5909 /* The last case is if we are a multiply. In that case, we can
5910 apply the distributive law to commute the multiply and addition
5911 if the multiplication of the constants doesn't overflow
5912 and overflow is defined. With undefined overflow
5913 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5914 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5915 return fold_build2 (tcode
, ctype
,
5916 fold_build2 (code
, ctype
,
5917 fold_convert (ctype
, op0
),
5918 fold_convert (ctype
, c
)),
5924 /* We have a special case here if we are doing something like
5925 (C * 8) % 4 since we know that's zero. */
5926 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5927 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5928 /* If the multiplication can overflow we cannot optimize this. */
5929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5930 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5931 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5933 *strict_overflow_p
= true;
5934 return omit_one_operand (type
, integer_zero_node
, op0
);
5937 /* ... fall through ... */
5939 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5940 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5945 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5946 strict_overflow_p
)) != 0)
5947 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5948 fold_convert (ctype
, op1
));
5949 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5950 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5951 strict_overflow_p
)) != 0)
5952 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5953 fold_convert (ctype
, t1
));
5954 else if (TREE_CODE (op1
) != INTEGER_CST
)
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5961 bool overflow_p
= false;
5962 bool overflow_mul_p
;
5963 signop sign
= TYPE_SIGN (ctype
);
5964 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5965 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5967 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5970 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5971 wide_int_to_tree (ctype
, mul
));
5974 /* If these operations "cancel" each other, we have the main
5975 optimizations of this pass, which occur when either constant is a
5976 multiple of the other, in which case we replace this with either an
5977 operation or CODE or TCODE.
5979 If we have an unsigned type, we cannot do this since it will change
5980 the result if the original computation overflowed. */
5981 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5982 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5983 || (tcode
== MULT_EXPR
5984 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5985 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5986 && code
!= MULT_EXPR
)))
5988 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5991 *strict_overflow_p
= true;
5992 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5993 fold_convert (ctype
,
5994 const_binop (TRUNC_DIV_EXPR
,
5997 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6000 *strict_overflow_p
= true;
6001 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6002 fold_convert (ctype
,
6003 const_binop (TRUNC_DIV_EXPR
,
6016 /* Return a node which has the indicated constant VALUE (either 0 or
6017 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6018 and is of the indicated TYPE. */
6021 constant_boolean_node (bool value
, tree type
)
6023 if (type
== integer_type_node
)
6024 return value
? integer_one_node
: integer_zero_node
;
6025 else if (type
== boolean_type_node
)
6026 return value
? boolean_true_node
: boolean_false_node
;
6027 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6028 return build_vector_from_val (type
,
6029 build_int_cst (TREE_TYPE (type
),
6032 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6036 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6037 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6038 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6039 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6040 COND is the first argument to CODE; otherwise (as in the example
6041 given here), it is the second argument. TYPE is the type of the
6042 original expression. Return NULL_TREE if no simplification is
6046 fold_binary_op_with_conditional_arg (location_t loc
,
6047 enum tree_code code
,
6048 tree type
, tree op0
, tree op1
,
6049 tree cond
, tree arg
, int cond_first_p
)
6051 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6052 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6053 tree test
, true_value
, false_value
;
6054 tree lhs
= NULL_TREE
;
6055 tree rhs
= NULL_TREE
;
6056 enum tree_code cond_code
= COND_EXPR
;
6058 if (TREE_CODE (cond
) == COND_EXPR
6059 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6061 test
= TREE_OPERAND (cond
, 0);
6062 true_value
= TREE_OPERAND (cond
, 1);
6063 false_value
= TREE_OPERAND (cond
, 2);
6064 /* If this operand throws an expression, then it does not make
6065 sense to try to perform a logical or arithmetic operation
6067 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6069 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6074 tree testtype
= TREE_TYPE (cond
);
6076 true_value
= constant_boolean_node (true, testtype
);
6077 false_value
= constant_boolean_node (false, testtype
);
6080 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6081 cond_code
= VEC_COND_EXPR
;
6083 /* This transformation is only worthwhile if we don't have to wrap ARG
6084 in a SAVE_EXPR and the operation can be simplified without recursing
6085 on at least one of the branches once its pushed inside the COND_EXPR. */
6086 if (!TREE_CONSTANT (arg
)
6087 && (TREE_SIDE_EFFECTS (arg
)
6088 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6089 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6092 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6095 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6097 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6099 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6103 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6105 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6107 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6110 /* Check that we have simplified at least one of the branches. */
6111 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6114 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6118 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6120 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6121 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6122 ADDEND is the same as X.
6124 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6125 and finite. The problematic cases are when X is zero, and its mode
6126 has signed zeros. In the case of rounding towards -infinity,
6127 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6128 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6131 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6133 if (!real_zerop (addend
))
6136 /* Don't allow the fold with -fsignaling-nans. */
6137 if (HONOR_SNANS (TYPE_MODE (type
)))
6140 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6141 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6144 /* In a vector or complex, we would need to check the sign of all zeros. */
6145 if (TREE_CODE (addend
) != REAL_CST
)
6148 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6149 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6152 /* The mode has signed zeros, and we have to honor their sign.
6153 In this situation, there is only one case we can return true for.
6154 X - 0 is the same as X unless rounding towards -infinity is
6156 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6159 /* Subroutine of fold() that checks comparisons of built-in math
6160 functions against real constants.
6162 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6163 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6164 is the type of the result and ARG0 and ARG1 are the operands of the
6165 comparison. ARG1 must be a TREE_REAL_CST.
6167 The function returns the constant folded tree if a simplification
6168 can be made, and NULL_TREE otherwise. */
6171 fold_mathfn_compare (location_t loc
,
6172 enum built_in_function fcode
, enum tree_code code
,
6173 tree type
, tree arg0
, tree arg1
)
6177 if (BUILTIN_SQRT_P (fcode
))
6179 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6180 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6182 c
= TREE_REAL_CST (arg1
);
6183 if (REAL_VALUE_NEGATIVE (c
))
6185 /* sqrt(x) < y is always false, if y is negative. */
6186 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6187 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6189 /* sqrt(x) > y is always true, if y is negative and we
6190 don't care about NaNs, i.e. negative values of x. */
6191 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6192 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6194 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6195 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6196 build_real (TREE_TYPE (arg
), dconst0
));
6198 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6202 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6203 real_convert (&c2
, mode
, &c2
);
6205 if (REAL_VALUE_ISINF (c2
))
6207 /* sqrt(x) > y is x == +Inf, when y is very large. */
6208 if (HONOR_INFINITIES (mode
))
6209 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6210 build_real (TREE_TYPE (arg
), c2
));
6212 /* sqrt(x) > y is always false, when y is very large
6213 and we don't care about infinities. */
6214 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6217 /* sqrt(x) > c is the same as x > c*c. */
6218 return fold_build2_loc (loc
, code
, type
, arg
,
6219 build_real (TREE_TYPE (arg
), c2
));
6221 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6225 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6226 real_convert (&c2
, mode
, &c2
);
6228 if (REAL_VALUE_ISINF (c2
))
6230 /* sqrt(x) < y is always true, when y is a very large
6231 value and we don't care about NaNs or Infinities. */
6232 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6233 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6235 /* sqrt(x) < y is x != +Inf when y is very large and we
6236 don't care about NaNs. */
6237 if (! HONOR_NANS (mode
))
6238 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6239 build_real (TREE_TYPE (arg
), c2
));
6241 /* sqrt(x) < y is x >= 0 when y is very large and we
6242 don't care about Infinities. */
6243 if (! HONOR_INFINITIES (mode
))
6244 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6245 build_real (TREE_TYPE (arg
), dconst0
));
6247 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6248 arg
= save_expr (arg
);
6249 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6250 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6251 build_real (TREE_TYPE (arg
),
6253 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6254 build_real (TREE_TYPE (arg
),
6258 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6259 if (! HONOR_NANS (mode
))
6260 return fold_build2_loc (loc
, code
, type
, arg
,
6261 build_real (TREE_TYPE (arg
), c2
));
6263 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6264 arg
= save_expr (arg
);
6265 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6266 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6267 build_real (TREE_TYPE (arg
),
6269 fold_build2_loc (loc
, code
, type
, arg
,
6270 build_real (TREE_TYPE (arg
),
6278 /* Subroutine of fold() that optimizes comparisons against Infinities,
6279 either +Inf or -Inf.
6281 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6283 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6285 The function returns the constant folded tree if a simplification
6286 can be made, and NULL_TREE otherwise. */
6289 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6290 tree arg0
, tree arg1
)
6292 enum machine_mode mode
;
6293 REAL_VALUE_TYPE max
;
6297 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6299 /* For negative infinity swap the sense of the comparison. */
6300 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6302 code
= swap_tree_comparison (code
);
6307 /* x > +Inf is always false, if with ignore sNANs. */
6308 if (HONOR_SNANS (mode
))
6310 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6313 /* x <= +Inf is always true, if we don't case about NaNs. */
6314 if (! HONOR_NANS (mode
))
6315 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6317 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6318 arg0
= save_expr (arg0
);
6319 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6323 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6324 real_maxval (&max
, neg
, mode
);
6325 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6326 arg0
, build_real (TREE_TYPE (arg0
), max
));
6329 /* x < +Inf is always equal to x <= DBL_MAX. */
6330 real_maxval (&max
, neg
, mode
);
6331 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6332 arg0
, build_real (TREE_TYPE (arg0
), max
));
6335 /* x != +Inf is always equal to !(x > DBL_MAX). */
6336 real_maxval (&max
, neg
, mode
);
6337 if (! HONOR_NANS (mode
))
6338 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6339 arg0
, build_real (TREE_TYPE (arg0
), max
));
6341 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6342 arg0
, build_real (TREE_TYPE (arg0
), max
));
6343 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6352 /* Subroutine of fold() that optimizes comparisons of a division by
6353 a nonzero integer constant against an integer constant, i.e.
6356 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6357 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6358 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6360 The function returns the constant folded tree if a simplification
6361 can be made, and NULL_TREE otherwise. */
6364 fold_div_compare (location_t loc
,
6365 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6367 tree prod
, tmp
, hi
, lo
;
6368 tree arg00
= TREE_OPERAND (arg0
, 0);
6369 tree arg01
= TREE_OPERAND (arg0
, 1);
6370 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6371 bool neg_overflow
= false;
6374 /* We have to do this the hard way to detect unsigned overflow.
6375 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6376 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6377 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6378 neg_overflow
= false;
6380 if (sign
== UNSIGNED
)
6382 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6383 build_int_cst (TREE_TYPE (arg01
), 1));
6386 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6387 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6388 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6389 -1, overflow
| TREE_OVERFLOW (prod
));
6391 else if (tree_int_cst_sgn (arg01
) >= 0)
6393 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6394 build_int_cst (TREE_TYPE (arg01
), 1));
6395 switch (tree_int_cst_sgn (arg1
))
6398 neg_overflow
= true;
6399 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6404 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6409 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6419 /* A negative divisor reverses the relational operators. */
6420 code
= swap_tree_comparison (code
);
6422 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6423 build_int_cst (TREE_TYPE (arg01
), 1));
6424 switch (tree_int_cst_sgn (arg1
))
6427 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6432 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6437 neg_overflow
= true;
6438 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6450 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6451 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6452 if (TREE_OVERFLOW (hi
))
6453 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6454 if (TREE_OVERFLOW (lo
))
6455 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6456 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6459 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6460 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6461 if (TREE_OVERFLOW (hi
))
6462 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6463 if (TREE_OVERFLOW (lo
))
6464 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6465 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6468 if (TREE_OVERFLOW (lo
))
6470 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6471 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6473 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6476 if (TREE_OVERFLOW (hi
))
6478 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6479 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6481 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6484 if (TREE_OVERFLOW (hi
))
6486 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6487 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6489 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6492 if (TREE_OVERFLOW (lo
))
6494 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6495 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6497 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6507 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6508 equality/inequality test, then return a simplified form of the test
6509 using a sign testing. Otherwise return NULL. TYPE is the desired
6513 fold_single_bit_test_into_sign_test (location_t loc
,
6514 enum tree_code code
, tree arg0
, tree arg1
,
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6519 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6520 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6526 if (arg00
!= NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00
))
6530 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6532 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6533 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6535 fold_convert_loc (loc
, stype
, arg00
),
6536 build_int_cst (stype
, 0));
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6549 fold_single_bit_test (location_t loc
, enum tree_code code
,
6550 tree arg0
, tree arg1
, tree result_type
)
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6554 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6555 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6557 tree inner
= TREE_OPERAND (arg0
, 0);
6558 tree type
= TREE_TYPE (arg0
);
6559 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6560 enum machine_mode operand_mode
= TYPE_MODE (type
);
6562 tree signed_type
, unsigned_type
, intermediate_type
;
6565 /* First, see if we can fold the single bit test into a sign-bit
6567 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner
) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6580 && bitnum
< TYPE_PRECISION (type
)
6581 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6582 TYPE_PRECISION (type
) - bitnum
))
6584 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6585 inner
= TREE_OPERAND (inner
, 0);
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6593 && !flag_syntax_only
) ? 0 : 1;
6598 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6599 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6600 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6601 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6604 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6605 inner
, size_int (bitnum
));
6607 one
= build_int_cst (intermediate_type
, 1);
6609 if (code
== EQ_EXPR
)
6610 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6612 /* Put the AND last so it can combine with more things. */
6613 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6615 /* Make sure to return the proper type. */
6616 inner
= fold_convert_loc (loc
, result_type
, inner
);
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6627 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6629 if (! flag_evaluation_order
)
6631 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6633 return ! TREE_SIDE_EFFECTS (arg0
)
6634 && ! TREE_SIDE_EFFECTS (arg1
);
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6643 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6645 if (CONSTANT_CLASS_P (arg1
))
6647 if (CONSTANT_CLASS_P (arg0
))
6653 if (TREE_CONSTANT (arg1
))
6655 if (TREE_CONSTANT (arg0
))
6658 if (reorder
&& flag_evaluation_order
6659 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6662 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6663 for commutative and comparison operators. Ensuring a canonical
6664 form allows the optimizers to find additional redundancies without
6665 having to explicitly check for both orderings. */
6666 if (TREE_CODE (arg0
) == SSA_NAME
6667 && TREE_CODE (arg1
) == SSA_NAME
6668 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6671 /* Put SSA_NAMEs last. */
6672 if (TREE_CODE (arg1
) == SSA_NAME
)
6674 if (TREE_CODE (arg0
) == SSA_NAME
)
6677 /* Put variables last. */
6686 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6687 ARG0 is extended to a wider type. */
6690 fold_widened_comparison (location_t loc
, enum tree_code code
,
6691 tree type
, tree arg0
, tree arg1
)
6693 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6695 tree shorter_type
, outer_type
;
6699 if (arg0_unw
== arg0
)
6701 shorter_type
= TREE_TYPE (arg0_unw
);
6703 #ifdef HAVE_canonicalize_funcptr_for_compare
6704 /* Disable this optimization if we're casting a function pointer
6705 type on targets that require function pointer canonicalization. */
6706 if (HAVE_canonicalize_funcptr_for_compare
6707 && TREE_CODE (shorter_type
) == POINTER_TYPE
6708 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6712 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6715 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6717 /* If possible, express the comparison in the shorter mode. */
6718 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6719 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6720 && (TREE_TYPE (arg1_unw
) == shorter_type
6721 || ((TYPE_PRECISION (shorter_type
)
6722 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6723 && (TYPE_UNSIGNED (shorter_type
)
6724 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6725 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6726 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6727 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6728 && int_fits_type_p (arg1_unw
, shorter_type
))))
6729 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6730 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6732 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6733 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6734 || !int_fits_type_p (arg1_unw
, shorter_type
))
6737 /* If we are comparing with the integer that does not fit into the range
6738 of the shorter type, the result is known. */
6739 outer_type
= TREE_TYPE (arg1_unw
);
6740 min
= lower_bound_in_type (outer_type
, shorter_type
);
6741 max
= upper_bound_in_type (outer_type
, shorter_type
);
6743 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6745 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6752 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6757 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6763 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6765 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6770 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6772 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6781 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6782 ARG0 just the signedness is changed. */
6785 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6786 tree arg0
, tree arg1
)
6789 tree inner_type
, outer_type
;
6791 if (!CONVERT_EXPR_P (arg0
))
6794 outer_type
= TREE_TYPE (arg0
);
6795 arg0_inner
= TREE_OPERAND (arg0
, 0);
6796 inner_type
= TREE_TYPE (arg0_inner
);
6798 #ifdef HAVE_canonicalize_funcptr_for_compare
6799 /* Disable this optimization if we're casting a function pointer
6800 type on targets that require function pointer canonicalization. */
6801 if (HAVE_canonicalize_funcptr_for_compare
6802 && TREE_CODE (inner_type
) == POINTER_TYPE
6803 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6807 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6810 if (TREE_CODE (arg1
) != INTEGER_CST
6811 && !(CONVERT_EXPR_P (arg1
)
6812 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6815 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6820 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6823 if (TREE_CODE (arg1
) == INTEGER_CST
)
6824 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6825 TREE_OVERFLOW (arg1
));
6827 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6829 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6832 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6833 step of the array. Reconstructs s and delta in the case of s *
6834 delta being an integer constant (and thus already folded). ADDR is
6835 the address. MULT is the multiplicative expression. If the
6836 function succeeds, the new address expression is returned.
6837 Otherwise NULL_TREE is returned. LOC is the location of the
6838 resulting expression. */
6841 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6843 tree s
, delta
, step
;
6844 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6849 /* Strip the nops that might be added when converting op1 to sizetype. */
6852 /* Canonicalize op1 into a possibly non-constant delta
6853 and an INTEGER_CST s. */
6854 if (TREE_CODE (op1
) == MULT_EXPR
)
6856 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6861 if (TREE_CODE (arg0
) == INTEGER_CST
)
6866 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6874 else if (TREE_CODE (op1
) == INTEGER_CST
)
6881 /* Simulate we are delta * 1. */
6883 s
= integer_one_node
;
6886 /* Handle &x.array the same as we would handle &x.array[0]. */
6887 if (TREE_CODE (ref
) == COMPONENT_REF
6888 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6892 /* Remember if this was a multi-dimensional array. */
6893 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6896 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6899 itype
= TREE_TYPE (domain
);
6901 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6902 if (TREE_CODE (step
) != INTEGER_CST
)
6907 if (! tree_int_cst_equal (step
, s
))
6912 /* Try if delta is a multiple of step. */
6913 tree tmp
= div_if_zero_remainder (op1
, step
);
6919 /* Only fold here if we can verify we do not overflow one
6920 dimension of a multi-dimensional array. */
6925 if (!TYPE_MIN_VALUE (domain
)
6926 || !TYPE_MAX_VALUE (domain
)
6927 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6930 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6931 fold_convert_loc (loc
, itype
,
6932 TYPE_MIN_VALUE (domain
)),
6933 fold_convert_loc (loc
, itype
, delta
));
6934 if (TREE_CODE (tmp
) != INTEGER_CST
6935 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6939 /* We found a suitable component reference. */
6941 pref
= TREE_OPERAND (addr
, 0);
6942 ret
= copy_node (pref
);
6943 SET_EXPR_LOCATION (ret
, loc
);
6945 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6947 (loc
, PLUS_EXPR
, itype
,
6948 fold_convert_loc (loc
, itype
,
6950 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6951 fold_convert_loc (loc
, itype
, delta
)),
6952 NULL_TREE
, NULL_TREE
);
6953 return build_fold_addr_expr_loc (loc
, ret
);
6958 for (;; ref
= TREE_OPERAND (ref
, 0))
6960 if (TREE_CODE (ref
) == ARRAY_REF
)
6964 /* Remember if this was a multi-dimensional array. */
6965 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6968 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6971 itype
= TREE_TYPE (domain
);
6973 step
= array_ref_element_size (ref
);
6974 if (TREE_CODE (step
) != INTEGER_CST
)
6979 if (! tree_int_cst_equal (step
, s
))
6984 /* Try if delta is a multiple of step. */
6985 tree tmp
= div_if_zero_remainder (op1
, step
);
6991 /* Only fold here if we can verify we do not overflow one
6992 dimension of a multi-dimensional array. */
6997 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6998 || !TYPE_MAX_VALUE (domain
)
6999 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7002 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7003 fold_convert_loc (loc
, itype
,
7004 TREE_OPERAND (ref
, 1)),
7005 fold_convert_loc (loc
, itype
, delta
));
7007 || TREE_CODE (tmp
) != INTEGER_CST
7008 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7017 if (!handled_component_p (ref
))
7021 /* We found the suitable array reference. So copy everything up to it,
7022 and replace the index. */
7024 pref
= TREE_OPERAND (addr
, 0);
7025 ret
= copy_node (pref
);
7026 SET_EXPR_LOCATION (ret
, loc
);
7031 pref
= TREE_OPERAND (pref
, 0);
7032 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7033 pos
= TREE_OPERAND (pos
, 0);
7036 TREE_OPERAND (pos
, 1)
7037 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7038 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7039 fold_convert_loc (loc
, itype
, delta
));
7040 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7044 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7045 means A >= Y && A != MAX, but in this case we know that
7046 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7049 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7051 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7053 if (TREE_CODE (bound
) == LT_EXPR
)
7054 a
= TREE_OPERAND (bound
, 0);
7055 else if (TREE_CODE (bound
) == GT_EXPR
)
7056 a
= TREE_OPERAND (bound
, 1);
7060 typea
= TREE_TYPE (a
);
7061 if (!INTEGRAL_TYPE_P (typea
)
7062 && !POINTER_TYPE_P (typea
))
7065 if (TREE_CODE (ineq
) == LT_EXPR
)
7067 a1
= TREE_OPERAND (ineq
, 1);
7068 y
= TREE_OPERAND (ineq
, 0);
7070 else if (TREE_CODE (ineq
) == GT_EXPR
)
7072 a1
= TREE_OPERAND (ineq
, 0);
7073 y
= TREE_OPERAND (ineq
, 1);
7078 if (TREE_TYPE (a1
) != typea
)
7081 if (POINTER_TYPE_P (typea
))
7083 /* Convert the pointer types into integer before taking the difference. */
7084 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7085 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7086 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7089 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7091 if (!diff
|| !integer_onep (diff
))
7094 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7097 /* Fold a sum or difference of at least one multiplication.
7098 Returns the folded tree or NULL if no simplification could be made. */
7101 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7102 tree arg0
, tree arg1
)
7104 tree arg00
, arg01
, arg10
, arg11
;
7105 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7107 /* (A * C) +- (B * C) -> (A+-B) * C.
7108 (A * C) +- A -> A * (C+-1).
7109 We are most concerned about the case where C is a constant,
7110 but other combinations show up during loop reduction. Since
7111 it is not difficult, try all four possibilities. */
7113 if (TREE_CODE (arg0
) == MULT_EXPR
)
7115 arg00
= TREE_OPERAND (arg0
, 0);
7116 arg01
= TREE_OPERAND (arg0
, 1);
7118 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7120 arg00
= build_one_cst (type
);
7125 /* We cannot generate constant 1 for fract. */
7126 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7129 arg01
= build_one_cst (type
);
7131 if (TREE_CODE (arg1
) == MULT_EXPR
)
7133 arg10
= TREE_OPERAND (arg1
, 0);
7134 arg11
= TREE_OPERAND (arg1
, 1);
7136 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7138 arg10
= build_one_cst (type
);
7139 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7140 the purpose of this canonicalization. */
7141 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7142 && negate_expr_p (arg1
)
7143 && code
== PLUS_EXPR
)
7145 arg11
= negate_expr (arg1
);
7153 /* We cannot generate constant 1 for fract. */
7154 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7157 arg11
= build_one_cst (type
);
7161 if (operand_equal_p (arg01
, arg11
, 0))
7162 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7163 else if (operand_equal_p (arg00
, arg10
, 0))
7164 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7165 else if (operand_equal_p (arg00
, arg11
, 0))
7166 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7167 else if (operand_equal_p (arg01
, arg10
, 0))
7168 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7170 /* No identical multiplicands; see if we can find a common
7171 power-of-two factor in non-power-of-two multiplies. This
7172 can help in multi-dimensional array access. */
7173 else if (tree_fits_shwi_p (arg01
)
7174 && tree_fits_shwi_p (arg11
))
7176 HOST_WIDE_INT int01
, int11
, tmp
;
7179 int01
= tree_to_shwi (arg01
);
7180 int11
= tree_to_shwi (arg11
);
7182 /* Move min of absolute values to int11. */
7183 if (absu_hwi (int01
) < absu_hwi (int11
))
7185 tmp
= int01
, int01
= int11
, int11
= tmp
;
7186 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7193 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7194 /* The remainder should not be a constant, otherwise we
7195 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7196 increased the number of multiplications necessary. */
7197 && TREE_CODE (arg10
) != INTEGER_CST
)
7199 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7200 build_int_cst (TREE_TYPE (arg00
),
7205 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7210 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7211 fold_build2_loc (loc
, code
, type
,
7212 fold_convert_loc (loc
, type
, alt0
),
7213 fold_convert_loc (loc
, type
, alt1
)),
7214 fold_convert_loc (loc
, type
, same
));
7219 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7220 specified by EXPR into the buffer PTR of length LEN bytes.
7221 Return the number of bytes placed in the buffer, or zero
7225 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7227 tree type
= TREE_TYPE (expr
);
7228 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7229 int byte
, offset
, word
, words
;
7230 unsigned char value
;
7232 if ((off
== -1 && total_bytes
> len
)
7233 || off
>= total_bytes
)
7237 words
= total_bytes
/ UNITS_PER_WORD
;
7239 for (byte
= 0; byte
< total_bytes
; byte
++)
7241 int bitpos
= byte
* BITS_PER_UNIT
;
7242 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7244 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7246 if (total_bytes
> UNITS_PER_WORD
)
7248 word
= byte
/ UNITS_PER_WORD
;
7249 if (WORDS_BIG_ENDIAN
)
7250 word
= (words
- 1) - word
;
7251 offset
= word
* UNITS_PER_WORD
;
7252 if (BYTES_BIG_ENDIAN
)
7253 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7255 offset
+= byte
% UNITS_PER_WORD
;
7258 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7260 && offset
- off
< len
)
7261 ptr
[offset
- off
] = value
;
7263 return MIN (len
, total_bytes
- off
);
7267 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7273 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7275 tree type
= TREE_TYPE (expr
);
7276 enum machine_mode mode
= TYPE_MODE (type
);
7277 int total_bytes
= GET_MODE_SIZE (mode
);
7278 FIXED_VALUE_TYPE value
;
7279 tree i_value
, i_type
;
7281 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7284 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7286 if (NULL_TREE
== i_type
7287 || TYPE_PRECISION (i_type
) != total_bytes
)
7290 value
= TREE_FIXED_CST (expr
);
7291 i_value
= double_int_to_tree (i_type
, value
.data
);
7293 return native_encode_int (i_value
, ptr
, len
, off
);
7297 /* Subroutine of native_encode_expr. Encode the REAL_CST
7298 specified by EXPR into the buffer PTR of length LEN bytes.
7299 Return the number of bytes placed in the buffer, or zero
7303 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7305 tree type
= TREE_TYPE (expr
);
7306 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7307 int byte
, offset
, word
, words
, bitpos
;
7308 unsigned char value
;
7310 /* There are always 32 bits in each long, no matter the size of
7311 the hosts long. We handle floating point representations with
7315 if ((off
== -1 && total_bytes
> len
)
7316 || off
>= total_bytes
)
7320 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7322 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7324 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7325 bitpos
+= BITS_PER_UNIT
)
7327 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7328 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7330 if (UNITS_PER_WORD
< 4)
7332 word
= byte
/ UNITS_PER_WORD
;
7333 if (WORDS_BIG_ENDIAN
)
7334 word
= (words
- 1) - word
;
7335 offset
= word
* UNITS_PER_WORD
;
7336 if (BYTES_BIG_ENDIAN
)
7337 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7339 offset
+= byte
% UNITS_PER_WORD
;
7342 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7343 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7345 && offset
- off
< len
)
7346 ptr
[offset
- off
] = value
;
7348 return MIN (len
, total_bytes
- off
);
7351 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7352 specified by EXPR into the buffer PTR of length LEN bytes.
7353 Return the number of bytes placed in the buffer, or zero
7357 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7362 part
= TREE_REALPART (expr
);
7363 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7367 part
= TREE_IMAGPART (expr
);
7369 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7370 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7374 return rsize
+ isize
;
7378 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7379 specified by EXPR into the buffer PTR of length LEN bytes.
7380 Return the number of bytes placed in the buffer, or zero
7384 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7391 count
= VECTOR_CST_NELTS (expr
);
7392 itype
= TREE_TYPE (TREE_TYPE (expr
));
7393 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7394 for (i
= 0; i
< count
; i
++)
7401 elem
= VECTOR_CST_ELT (expr
, i
);
7402 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7403 if ((off
== -1 && res
!= size
)
7416 /* Subroutine of native_encode_expr. Encode the STRING_CST
7417 specified by EXPR into the buffer PTR of length LEN bytes.
7418 Return the number of bytes placed in the buffer, or zero
7422 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7424 tree type
= TREE_TYPE (expr
);
7425 HOST_WIDE_INT total_bytes
;
7427 if (TREE_CODE (type
) != ARRAY_TYPE
7428 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7429 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7430 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7432 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7433 if ((off
== -1 && total_bytes
> len
)
7434 || off
>= total_bytes
)
7438 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7441 if (off
< TREE_STRING_LENGTH (expr
))
7443 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7444 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7446 memset (ptr
+ written
, 0,
7447 MIN (total_bytes
- written
, len
- written
));
7450 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7451 return MIN (total_bytes
- off
, len
);
7455 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7456 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7457 buffer PTR of length LEN bytes. If OFF is not -1 then start
7458 the encoding at byte offset OFF and encode at most LEN bytes.
7459 Return the number of bytes placed in the buffer, or zero upon failure. */
7462 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7464 switch (TREE_CODE (expr
))
7467 return native_encode_int (expr
, ptr
, len
, off
);
7470 return native_encode_real (expr
, ptr
, len
, off
);
7473 return native_encode_fixed (expr
, ptr
, len
, off
);
7476 return native_encode_complex (expr
, ptr
, len
, off
);
7479 return native_encode_vector (expr
, ptr
, len
, off
);
7482 return native_encode_string (expr
, ptr
, len
, off
);
7490 /* Subroutine of native_interpret_expr. Interpret the contents of
7491 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7492 If the buffer cannot be interpreted, return NULL_TREE. */
7495 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7497 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7499 if (total_bytes
> len
7500 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7503 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7505 return wide_int_to_tree (type
, result
);
7509 /* Subroutine of native_interpret_expr. Interpret the contents of
7510 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7511 If the buffer cannot be interpreted, return NULL_TREE. */
7514 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7516 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7518 FIXED_VALUE_TYPE fixed_value
;
7520 if (total_bytes
> len
7521 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7524 result
= double_int::from_buffer (ptr
, total_bytes
);
7525 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7527 return build_fixed (type
, fixed_value
);
7531 /* Subroutine of native_interpret_expr. Interpret the contents of
7532 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7533 If the buffer cannot be interpreted, return NULL_TREE. */
7536 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7538 enum machine_mode mode
= TYPE_MODE (type
);
7539 int total_bytes
= GET_MODE_SIZE (mode
);
7540 int byte
, offset
, word
, words
, bitpos
;
7541 unsigned char value
;
7542 /* There are always 32 bits in each long, no matter the size of
7543 the hosts long. We handle floating point representations with
7548 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7549 if (total_bytes
> len
|| total_bytes
> 24)
7551 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7553 memset (tmp
, 0, sizeof (tmp
));
7554 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7555 bitpos
+= BITS_PER_UNIT
)
7557 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7558 if (UNITS_PER_WORD
< 4)
7560 word
= byte
/ UNITS_PER_WORD
;
7561 if (WORDS_BIG_ENDIAN
)
7562 word
= (words
- 1) - word
;
7563 offset
= word
* UNITS_PER_WORD
;
7564 if (BYTES_BIG_ENDIAN
)
7565 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7567 offset
+= byte
% UNITS_PER_WORD
;
7570 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7571 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7573 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7576 real_from_target (&r
, tmp
, mode
);
7577 return build_real (type
, r
);
7581 /* Subroutine of native_interpret_expr. Interpret the contents of
7582 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7583 If the buffer cannot be interpreted, return NULL_TREE. */
7586 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7588 tree etype
, rpart
, ipart
;
7591 etype
= TREE_TYPE (type
);
7592 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7595 rpart
= native_interpret_expr (etype
, ptr
, size
);
7598 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7601 return build_complex (type
, rpart
, ipart
);
7605 /* Subroutine of native_interpret_expr. Interpret the contents of
7606 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7607 If the buffer cannot be interpreted, return NULL_TREE. */
7610 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7616 etype
= TREE_TYPE (type
);
7617 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7618 count
= TYPE_VECTOR_SUBPARTS (type
);
7619 if (size
* count
> len
)
7622 elements
= XALLOCAVEC (tree
, count
);
7623 for (i
= count
- 1; i
>= 0; i
--)
7625 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7630 return build_vector (type
, elements
);
7634 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7635 the buffer PTR of length LEN as a constant of type TYPE. For
7636 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7637 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7638 return NULL_TREE. */
7641 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7643 switch (TREE_CODE (type
))
7649 case REFERENCE_TYPE
:
7650 return native_interpret_int (type
, ptr
, len
);
7653 return native_interpret_real (type
, ptr
, len
);
7655 case FIXED_POINT_TYPE
:
7656 return native_interpret_fixed (type
, ptr
, len
);
7659 return native_interpret_complex (type
, ptr
, len
);
7662 return native_interpret_vector (type
, ptr
, len
);
7669 /* Returns true if we can interpret the contents of a native encoding
7673 can_native_interpret_type_p (tree type
)
7675 switch (TREE_CODE (type
))
7681 case REFERENCE_TYPE
:
7682 case FIXED_POINT_TYPE
:
7692 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7693 TYPE at compile-time. If we're unable to perform the conversion
7694 return NULL_TREE. */
7697 fold_view_convert_expr (tree type
, tree expr
)
7699 /* We support up to 512-bit values (for V8DFmode). */
7700 unsigned char buffer
[64];
7703 /* Check that the host and target are sane. */
7704 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7707 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7711 return native_interpret_expr (type
, buffer
, len
);
7714 /* Build an expression for the address of T. Folds away INDIRECT_REF
7715 to avoid confusing the gimplify process. */
7718 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7720 /* The size of the object is not relevant when talking about its address. */
7721 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7722 t
= TREE_OPERAND (t
, 0);
7724 if (TREE_CODE (t
) == INDIRECT_REF
)
7726 t
= TREE_OPERAND (t
, 0);
7728 if (TREE_TYPE (t
) != ptrtype
)
7729 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7731 else if (TREE_CODE (t
) == MEM_REF
7732 && integer_zerop (TREE_OPERAND (t
, 1)))
7733 return TREE_OPERAND (t
, 0);
7734 else if (TREE_CODE (t
) == MEM_REF
7735 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7736 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7737 TREE_OPERAND (t
, 0),
7738 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7739 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7741 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7743 if (TREE_TYPE (t
) != ptrtype
)
7744 t
= fold_convert_loc (loc
, ptrtype
, t
);
7747 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7752 /* Build an expression for the address of T. */
7755 build_fold_addr_expr_loc (location_t loc
, tree t
)
7757 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7759 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7762 static bool vec_cst_ctor_to_array (tree
, tree
*);
7764 /* Fold a unary expression of code CODE and type TYPE with operand
7765 OP0. Return the folded expression if folding is successful.
7766 Otherwise, return NULL_TREE. */
7769 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7773 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7775 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7776 && TREE_CODE_LENGTH (code
) == 1);
7781 if (CONVERT_EXPR_CODE_P (code
)
7782 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7784 /* Don't use STRIP_NOPS, because signedness of argument type
7786 STRIP_SIGN_NOPS (arg0
);
7790 /* Strip any conversions that don't change the mode. This
7791 is safe for every expression, except for a comparison
7792 expression because its signedness is derived from its
7795 Note that this is done as an internal manipulation within
7796 the constant folder, in order to find the simplest
7797 representation of the arguments so that their form can be
7798 studied. In any cases, the appropriate type conversions
7799 should be put back in the tree that will get out of the
7805 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7807 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7808 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7809 fold_build1_loc (loc
, code
, type
,
7810 fold_convert_loc (loc
, TREE_TYPE (op0
),
7811 TREE_OPERAND (arg0
, 1))));
7812 else if (TREE_CODE (arg0
) == COND_EXPR
)
7814 tree arg01
= TREE_OPERAND (arg0
, 1);
7815 tree arg02
= TREE_OPERAND (arg0
, 2);
7816 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7817 arg01
= fold_build1_loc (loc
, code
, type
,
7818 fold_convert_loc (loc
,
7819 TREE_TYPE (op0
), arg01
));
7820 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7821 arg02
= fold_build1_loc (loc
, code
, type
,
7822 fold_convert_loc (loc
,
7823 TREE_TYPE (op0
), arg02
));
7824 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7827 /* If this was a conversion, and all we did was to move into
7828 inside the COND_EXPR, bring it back out. But leave it if
7829 it is a conversion from integer to integer and the
7830 result precision is no wider than a word since such a
7831 conversion is cheap and may be optimized away by combine,
7832 while it couldn't if it were outside the COND_EXPR. Then return
7833 so we don't get into an infinite recursion loop taking the
7834 conversion out and then back in. */
7836 if ((CONVERT_EXPR_CODE_P (code
)
7837 || code
== NON_LVALUE_EXPR
)
7838 && TREE_CODE (tem
) == COND_EXPR
7839 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7840 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7841 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7842 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7843 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7844 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7845 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7847 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7848 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7849 || flag_syntax_only
))
7850 tem
= build1_loc (loc
, code
, type
,
7852 TREE_TYPE (TREE_OPERAND
7853 (TREE_OPERAND (tem
, 1), 0)),
7854 TREE_OPERAND (tem
, 0),
7855 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7856 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7865 /* Re-association barriers around constants and other re-association
7866 barriers can be removed. */
7867 if (CONSTANT_CLASS_P (op0
)
7868 || TREE_CODE (op0
) == PAREN_EXPR
)
7869 return fold_convert_loc (loc
, type
, op0
);
7872 case NON_LVALUE_EXPR
:
7873 if (!maybe_lvalue_p (op0
))
7874 return fold_convert_loc (loc
, type
, op0
);
7879 case FIX_TRUNC_EXPR
:
7880 if (TREE_TYPE (op0
) == type
)
7883 if (COMPARISON_CLASS_P (op0
))
7885 /* If we have (type) (a CMP b) and type is an integral type, return
7886 new expression involving the new type. Canonicalize
7887 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7889 Do not fold the result as that would not simplify further, also
7890 folding again results in recursions. */
7891 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7892 return build2_loc (loc
, TREE_CODE (op0
), type
,
7893 TREE_OPERAND (op0
, 0),
7894 TREE_OPERAND (op0
, 1));
7895 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7896 && TREE_CODE (type
) != VECTOR_TYPE
)
7897 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7898 constant_boolean_node (true, type
),
7899 constant_boolean_node (false, type
));
7902 /* Handle cases of two conversions in a row. */
7903 if (CONVERT_EXPR_P (op0
))
7905 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7906 tree inter_type
= TREE_TYPE (op0
);
7907 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7908 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7909 int inside_float
= FLOAT_TYPE_P (inside_type
);
7910 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7911 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7912 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7913 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7914 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7915 int inter_float
= FLOAT_TYPE_P (inter_type
);
7916 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7917 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7918 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7919 int final_int
= INTEGRAL_TYPE_P (type
);
7920 int final_ptr
= POINTER_TYPE_P (type
);
7921 int final_float
= FLOAT_TYPE_P (type
);
7922 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7923 unsigned int final_prec
= TYPE_PRECISION (type
);
7924 int final_unsignedp
= TYPE_UNSIGNED (type
);
7926 /* In addition to the cases of two conversions in a row
7927 handled below, if we are converting something to its own
7928 type via an object of identical or wider precision, neither
7929 conversion is needed. */
7930 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7931 && (((inter_int
|| inter_ptr
) && final_int
)
7932 || (inter_float
&& final_float
))
7933 && inter_prec
>= final_prec
)
7934 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7936 /* Likewise, if the intermediate and initial types are either both
7937 float or both integer, we don't need the middle conversion if the
7938 former is wider than the latter and doesn't change the signedness
7939 (for integers). Avoid this if the final type is a pointer since
7940 then we sometimes need the middle conversion. Likewise if the
7941 final type has a precision not equal to the size of its mode. */
7942 if (((inter_int
&& inside_int
)
7943 || (inter_float
&& inside_float
)
7944 || (inter_vec
&& inside_vec
))
7945 && inter_prec
>= inside_prec
7946 && (inter_float
|| inter_vec
7947 || inter_unsignedp
== inside_unsignedp
)
7948 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7949 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7951 && (! final_vec
|| inter_prec
== inside_prec
))
7952 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7954 /* If we have a sign-extension of a zero-extended value, we can
7955 replace that by a single zero-extension. Likewise if the
7956 final conversion does not change precision we can drop the
7957 intermediate conversion. */
7958 if (inside_int
&& inter_int
&& final_int
7959 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7960 && inside_unsignedp
&& !inter_unsignedp
)
7961 || final_prec
== inter_prec
))
7962 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7964 /* Two conversions in a row are not needed unless:
7965 - some conversion is floating-point (overstrict for now), or
7966 - some conversion is a vector (overstrict for now), or
7967 - the intermediate type is narrower than both initial and
7969 - the intermediate type and innermost type differ in signedness,
7970 and the outermost type is wider than the intermediate, or
7971 - the initial type is a pointer type and the precisions of the
7972 intermediate and final types differ, or
7973 - the final type is a pointer type and the precisions of the
7974 initial and intermediate types differ. */
7975 if (! inside_float
&& ! inter_float
&& ! final_float
7976 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7977 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7978 && ! (inside_int
&& inter_int
7979 && inter_unsignedp
!= inside_unsignedp
7980 && inter_prec
< final_prec
)
7981 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7982 == (final_unsignedp
&& final_prec
> inter_prec
))
7983 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7984 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7985 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7986 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7987 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7990 /* Handle (T *)&A.B.C for A being of type T and B and C
7991 living at offset zero. This occurs frequently in
7992 C++ upcasting and then accessing the base. */
7993 if (TREE_CODE (op0
) == ADDR_EXPR
7994 && POINTER_TYPE_P (type
)
7995 && handled_component_p (TREE_OPERAND (op0
, 0)))
7997 HOST_WIDE_INT bitsize
, bitpos
;
7999 enum machine_mode mode
;
8000 int unsignedp
, volatilep
;
8001 tree base
= TREE_OPERAND (op0
, 0);
8002 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8003 &mode
, &unsignedp
, &volatilep
, false);
8004 /* If the reference was to a (constant) zero offset, we can use
8005 the address of the base if it has the same base type
8006 as the result type and the pointer type is unqualified. */
8007 if (! offset
&& bitpos
== 0
8008 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8009 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8010 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8011 return fold_convert_loc (loc
, type
,
8012 build_fold_addr_expr_loc (loc
, base
));
8015 if (TREE_CODE (op0
) == MODIFY_EXPR
8016 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8017 /* Detect assigning a bitfield. */
8018 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8020 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8022 /* Don't leave an assignment inside a conversion
8023 unless assigning a bitfield. */
8024 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8025 /* First do the assignment, then return converted constant. */
8026 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8027 TREE_NO_WARNING (tem
) = 1;
8028 TREE_USED (tem
) = 1;
8032 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8033 constants (if x has signed type, the sign bit cannot be set
8034 in c). This folds extension into the BIT_AND_EXPR.
8035 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8036 very likely don't have maximal range for their precision and this
8037 transformation effectively doesn't preserve non-maximal ranges. */
8038 if (TREE_CODE (type
) == INTEGER_TYPE
8039 && TREE_CODE (op0
) == BIT_AND_EXPR
8040 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8042 tree and_expr
= op0
;
8043 tree and0
= TREE_OPERAND (and_expr
, 0);
8044 tree and1
= TREE_OPERAND (and_expr
, 1);
8047 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8048 || (TYPE_PRECISION (type
)
8049 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8051 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8052 <= HOST_BITS_PER_WIDE_INT
8053 && tree_fits_uhwi_p (and1
))
8055 unsigned HOST_WIDE_INT cst
;
8057 cst
= tree_to_uhwi (and1
);
8058 cst
&= HOST_WIDE_INT_M1U
8059 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8060 change
= (cst
== 0);
8061 #ifdef LOAD_EXTEND_OP
8063 && !flag_syntax_only
8064 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8067 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8068 and0
= fold_convert_loc (loc
, uns
, and0
);
8069 and1
= fold_convert_loc (loc
, uns
, and1
);
8075 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8076 TREE_OVERFLOW (and1
));
8077 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8078 fold_convert_loc (loc
, type
, and0
), tem
);
8082 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8083 when one of the new casts will fold away. Conservatively we assume
8084 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8085 if (POINTER_TYPE_P (type
)
8086 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8087 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8088 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8089 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8090 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8092 tree arg00
= TREE_OPERAND (arg0
, 0);
8093 tree arg01
= TREE_OPERAND (arg0
, 1);
8095 return fold_build_pointer_plus_loc
8096 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8099 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8100 of the same precision, and X is an integer type not narrower than
8101 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8102 if (INTEGRAL_TYPE_P (type
)
8103 && TREE_CODE (op0
) == BIT_NOT_EXPR
8104 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8105 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8106 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8108 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8109 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8110 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8111 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8112 fold_convert_loc (loc
, type
, tem
));
8115 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8116 type of X and Y (integer types only). */
8117 if (INTEGRAL_TYPE_P (type
)
8118 && TREE_CODE (op0
) == MULT_EXPR
8119 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8120 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8122 /* Be careful not to introduce new overflows. */
8124 if (TYPE_OVERFLOW_WRAPS (type
))
8127 mult_type
= unsigned_type_for (type
);
8129 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8131 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8132 fold_convert_loc (loc
, mult_type
,
8133 TREE_OPERAND (op0
, 0)),
8134 fold_convert_loc (loc
, mult_type
,
8135 TREE_OPERAND (op0
, 1)));
8136 return fold_convert_loc (loc
, type
, tem
);
8140 tem
= fold_convert_const (code
, type
, arg0
);
8141 return tem
? tem
: NULL_TREE
;
8143 case ADDR_SPACE_CONVERT_EXPR
:
8144 if (integer_zerop (arg0
))
8145 return fold_convert_const (code
, type
, arg0
);
8148 case FIXED_CONVERT_EXPR
:
8149 tem
= fold_convert_const (code
, type
, arg0
);
8150 return tem
? tem
: NULL_TREE
;
8152 case VIEW_CONVERT_EXPR
:
8153 if (TREE_TYPE (op0
) == type
)
8155 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8156 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8157 type
, TREE_OPERAND (op0
, 0));
8158 if (TREE_CODE (op0
) == MEM_REF
)
8159 return fold_build2_loc (loc
, MEM_REF
, type
,
8160 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8162 /* For integral conversions with the same precision or pointer
8163 conversions use a NOP_EXPR instead. */
8164 if ((INTEGRAL_TYPE_P (type
)
8165 || POINTER_TYPE_P (type
))
8166 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8167 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8168 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8169 return fold_convert_loc (loc
, type
, op0
);
8171 /* Strip inner integral conversions that do not change the precision. */
8172 if (CONVERT_EXPR_P (op0
)
8173 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8174 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8175 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8176 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8177 && (TYPE_PRECISION (TREE_TYPE (op0
))
8178 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8179 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8180 type
, TREE_OPERAND (op0
, 0));
8182 return fold_view_convert_expr (type
, op0
);
8185 tem
= fold_negate_expr (loc
, arg0
);
8187 return fold_convert_loc (loc
, type
, tem
);
8191 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8192 return fold_abs_const (arg0
, type
);
8193 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8194 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8195 /* Convert fabs((double)float) into (double)fabsf(float). */
8196 else if (TREE_CODE (arg0
) == NOP_EXPR
8197 && TREE_CODE (type
) == REAL_TYPE
)
8199 tree targ0
= strip_float_extensions (arg0
);
8201 return fold_convert_loc (loc
, type
,
8202 fold_build1_loc (loc
, ABS_EXPR
,
8206 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8207 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8209 else if (tree_expr_nonnegative_p (arg0
))
8212 /* Strip sign ops from argument. */
8213 if (TREE_CODE (type
) == REAL_TYPE
)
8215 tem
= fold_strip_sign_ops (arg0
);
8217 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8218 fold_convert_loc (loc
, type
, tem
));
8223 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8224 return fold_convert_loc (loc
, type
, arg0
);
8225 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8227 tree itype
= TREE_TYPE (type
);
8228 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8229 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8230 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8231 negate_expr (ipart
));
8233 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8235 tree itype
= TREE_TYPE (type
);
8236 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8237 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8238 return build_complex (type
, rpart
, negate_expr (ipart
));
8240 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8241 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8245 if (TREE_CODE (arg0
) == INTEGER_CST
)
8246 return fold_not_const (arg0
, type
);
8247 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8248 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8249 /* Convert ~ (-A) to A - 1. */
8250 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8251 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8252 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8253 build_int_cst (type
, 1));
8254 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8255 else if (INTEGRAL_TYPE_P (type
)
8256 && ((TREE_CODE (arg0
) == MINUS_EXPR
8257 && integer_onep (TREE_OPERAND (arg0
, 1)))
8258 || (TREE_CODE (arg0
) == PLUS_EXPR
8259 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8260 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8261 fold_convert_loc (loc
, type
,
8262 TREE_OPERAND (arg0
, 0)));
8263 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8264 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8265 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8266 fold_convert_loc (loc
, type
,
8267 TREE_OPERAND (arg0
, 0)))))
8268 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8269 fold_convert_loc (loc
, type
,
8270 TREE_OPERAND (arg0
, 1)));
8271 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8272 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8273 fold_convert_loc (loc
, type
,
8274 TREE_OPERAND (arg0
, 1)))))
8275 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8276 fold_convert_loc (loc
, type
,
8277 TREE_OPERAND (arg0
, 0)), tem
);
8278 /* Perform BIT_NOT_EXPR on each element individually. */
8279 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8283 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8285 elements
= XALLOCAVEC (tree
, count
);
8286 for (i
= 0; i
< count
; i
++)
8288 elem
= VECTOR_CST_ELT (arg0
, i
);
8289 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8290 if (elem
== NULL_TREE
)
8295 return build_vector (type
, elements
);
8297 else if (COMPARISON_CLASS_P (arg0
)
8298 && (VECTOR_TYPE_P (type
)
8299 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8301 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8302 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8303 HONOR_NANS (TYPE_MODE (op_type
)));
8304 if (subcode
!= ERROR_MARK
)
8305 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8306 TREE_OPERAND (arg0
, 1));
8312 case TRUTH_NOT_EXPR
:
8313 /* Note that the operand of this must be an int
8314 and its values must be 0 or 1.
8315 ("true" is a fixed value perhaps depending on the language,
8316 but we don't handle values other than 1 correctly yet.) */
8317 tem
= fold_truth_not_expr (loc
, arg0
);
8320 return fold_convert_loc (loc
, type
, tem
);
8323 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8324 return fold_convert_loc (loc
, type
, arg0
);
8325 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8326 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8327 TREE_OPERAND (arg0
, 1));
8328 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8329 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8330 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8332 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8333 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8334 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8335 TREE_OPERAND (arg0
, 0)),
8336 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8337 TREE_OPERAND (arg0
, 1)));
8338 return fold_convert_loc (loc
, type
, tem
);
8340 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8342 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8343 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8344 TREE_OPERAND (arg0
, 0));
8345 return fold_convert_loc (loc
, type
, tem
);
8347 if (TREE_CODE (arg0
) == CALL_EXPR
)
8349 tree fn
= get_callee_fndecl (arg0
);
8350 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8351 switch (DECL_FUNCTION_CODE (fn
))
8353 CASE_FLT_FN (BUILT_IN_CEXPI
):
8354 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8356 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8366 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8367 return build_zero_cst (type
);
8368 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8369 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8370 TREE_OPERAND (arg0
, 0));
8371 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8372 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8373 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8375 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8376 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8377 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8378 TREE_OPERAND (arg0
, 0)),
8379 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8380 TREE_OPERAND (arg0
, 1)));
8381 return fold_convert_loc (loc
, type
, tem
);
8383 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8385 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8386 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8387 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8389 if (TREE_CODE (arg0
) == CALL_EXPR
)
8391 tree fn
= get_callee_fndecl (arg0
);
8392 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8393 switch (DECL_FUNCTION_CODE (fn
))
8395 CASE_FLT_FN (BUILT_IN_CEXPI
):
8396 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8398 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8408 /* Fold *&X to X if X is an lvalue. */
8409 if (TREE_CODE (op0
) == ADDR_EXPR
)
8411 tree op00
= TREE_OPERAND (op0
, 0);
8412 if ((TREE_CODE (op00
) == VAR_DECL
8413 || TREE_CODE (op00
) == PARM_DECL
8414 || TREE_CODE (op00
) == RESULT_DECL
)
8415 && !TREE_READONLY (op00
))
8420 case VEC_UNPACK_LO_EXPR
:
8421 case VEC_UNPACK_HI_EXPR
:
8422 case VEC_UNPACK_FLOAT_LO_EXPR
:
8423 case VEC_UNPACK_FLOAT_HI_EXPR
:
8425 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8427 enum tree_code subcode
;
8429 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8430 if (TREE_CODE (arg0
) != VECTOR_CST
)
8433 elts
= XALLOCAVEC (tree
, nelts
* 2);
8434 if (!vec_cst_ctor_to_array (arg0
, elts
))
8437 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8438 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8441 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8444 subcode
= FLOAT_EXPR
;
8446 for (i
= 0; i
< nelts
; i
++)
8448 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8449 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8453 return build_vector (type
, elts
);
8456 case REDUC_MIN_EXPR
:
8457 case REDUC_MAX_EXPR
:
8458 case REDUC_PLUS_EXPR
:
8460 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8462 enum tree_code subcode
;
8464 if (TREE_CODE (op0
) != VECTOR_CST
)
8467 elts
= XALLOCAVEC (tree
, nelts
);
8468 if (!vec_cst_ctor_to_array (op0
, elts
))
8473 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8474 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8475 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8476 default: gcc_unreachable ();
8479 for (i
= 1; i
< nelts
; i
++)
8481 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8482 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8484 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8487 return build_vector (type
, elts
);
8492 } /* switch (code) */
8496 /* If the operation was a conversion do _not_ mark a resulting constant
8497 with TREE_OVERFLOW if the original constant was not. These conversions
8498 have implementation defined behavior and retaining the TREE_OVERFLOW
8499 flag here would confuse later passes such as VRP. */
8501 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8502 tree type
, tree op0
)
8504 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8506 && TREE_CODE (res
) == INTEGER_CST
8507 && TREE_CODE (op0
) == INTEGER_CST
8508 && CONVERT_EXPR_CODE_P (code
))
8509 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8514 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8515 operands OP0 and OP1. LOC is the location of the resulting expression.
8516 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8517 Return the folded expression if folding is successful. Otherwise,
8518 return NULL_TREE. */
8520 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8521 tree arg0
, tree arg1
, tree op0
, tree op1
)
8525 /* We only do these simplifications if we are optimizing. */
8529 /* Check for things like (A || B) && (A || C). We can convert this
8530 to A || (B && C). Note that either operator can be any of the four
8531 truth and/or operations and the transformation will still be
8532 valid. Also note that we only care about order for the
8533 ANDIF and ORIF operators. If B contains side effects, this
8534 might change the truth-value of A. */
8535 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8536 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8537 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8538 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8539 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8540 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8542 tree a00
= TREE_OPERAND (arg0
, 0);
8543 tree a01
= TREE_OPERAND (arg0
, 1);
8544 tree a10
= TREE_OPERAND (arg1
, 0);
8545 tree a11
= TREE_OPERAND (arg1
, 1);
8546 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8547 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8548 && (code
== TRUTH_AND_EXPR
8549 || code
== TRUTH_OR_EXPR
));
8551 if (operand_equal_p (a00
, a10
, 0))
8552 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8553 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8554 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8555 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8556 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8557 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8558 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8559 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8561 /* This case if tricky because we must either have commutative
8562 operators or else A10 must not have side-effects. */
8564 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8565 && operand_equal_p (a01
, a11
, 0))
8566 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8567 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8571 /* See if we can build a range comparison. */
8572 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8575 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8576 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8578 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8580 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8583 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8584 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8586 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8588 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8591 /* Check for the possibility of merging component references. If our
8592 lhs is another similar operation, try to merge its rhs with our
8593 rhs. Then try to merge our lhs and rhs. */
8594 if (TREE_CODE (arg0
) == code
8595 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8596 TREE_OPERAND (arg0
, 1), arg1
)))
8597 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8599 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8602 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8603 && (code
== TRUTH_AND_EXPR
8604 || code
== TRUTH_ANDIF_EXPR
8605 || code
== TRUTH_OR_EXPR
8606 || code
== TRUTH_ORIF_EXPR
))
8608 enum tree_code ncode
, icode
;
8610 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8611 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8612 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8614 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8615 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8616 We don't want to pack more than two leafs to a non-IF AND/OR
8618 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8619 equal to IF-CODE, then we don't want to add right-hand operand.
8620 If the inner right-hand side of left-hand operand has
8621 side-effects, or isn't simple, then we can't add to it,
8622 as otherwise we might destroy if-sequence. */
8623 if (TREE_CODE (arg0
) == icode
8624 && simple_operand_p_2 (arg1
)
8625 /* Needed for sequence points to handle trappings, and
8627 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8629 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8631 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8634 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8635 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8636 else if (TREE_CODE (arg1
) == icode
8637 && simple_operand_p_2 (arg0
)
8638 /* Needed for sequence points to handle trappings, and
8640 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8642 tem
= fold_build2_loc (loc
, ncode
, type
,
8643 arg0
, TREE_OPERAND (arg1
, 0));
8644 return fold_build2_loc (loc
, icode
, type
, tem
,
8645 TREE_OPERAND (arg1
, 1));
8647 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8649 For sequence point consistancy, we need to check for trapping,
8650 and side-effects. */
8651 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8652 && simple_operand_p_2 (arg1
))
8653 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8659 /* Fold a binary expression of code CODE and type TYPE with operands
8660 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8661 Return the folded expression if folding is successful. Otherwise,
8662 return NULL_TREE. */
8665 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8667 enum tree_code compl_code
;
8669 if (code
== MIN_EXPR
)
8670 compl_code
= MAX_EXPR
;
8671 else if (code
== MAX_EXPR
)
8672 compl_code
= MIN_EXPR
;
8676 /* MIN (MAX (a, b), b) == b. */
8677 if (TREE_CODE (op0
) == compl_code
8678 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8679 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8681 /* MIN (MAX (b, a), b) == b. */
8682 if (TREE_CODE (op0
) == compl_code
8683 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8684 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8685 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8687 /* MIN (a, MAX (a, b)) == a. */
8688 if (TREE_CODE (op1
) == compl_code
8689 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8690 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8691 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8693 /* MIN (a, MAX (b, a)) == a. */
8694 if (TREE_CODE (op1
) == compl_code
8695 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8696 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8697 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8702 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8703 by changing CODE to reduce the magnitude of constants involved in
8704 ARG0 of the comparison.
8705 Returns a canonicalized comparison tree if a simplification was
8706 possible, otherwise returns NULL_TREE.
8707 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8708 valid if signed overflow is undefined. */
8711 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8712 tree arg0
, tree arg1
,
8713 bool *strict_overflow_p
)
8715 enum tree_code code0
= TREE_CODE (arg0
);
8716 tree t
, cst0
= NULL_TREE
;
8720 /* Match A +- CST code arg1 and CST code arg1. We can change the
8721 first form only if overflow is undefined. */
8722 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8723 /* In principle pointers also have undefined overflow behavior,
8724 but that causes problems elsewhere. */
8725 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8726 && (code0
== MINUS_EXPR
8727 || code0
== PLUS_EXPR
)
8728 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8729 || code0
== INTEGER_CST
))
8732 /* Identify the constant in arg0 and its sign. */
8733 if (code0
== INTEGER_CST
)
8736 cst0
= TREE_OPERAND (arg0
, 1);
8737 sgn0
= tree_int_cst_sgn (cst0
);
8739 /* Overflowed constants and zero will cause problems. */
8740 if (integer_zerop (cst0
)
8741 || TREE_OVERFLOW (cst0
))
8744 /* See if we can reduce the magnitude of the constant in
8745 arg0 by changing the comparison code. */
8746 if (code0
== INTEGER_CST
)
8748 /* CST <= arg1 -> CST-1 < arg1. */
8749 if (code
== LE_EXPR
&& sgn0
== 1)
8751 /* -CST < arg1 -> -CST-1 <= arg1. */
8752 else if (code
== LT_EXPR
&& sgn0
== -1)
8754 /* CST > arg1 -> CST-1 >= arg1. */
8755 else if (code
== GT_EXPR
&& sgn0
== 1)
8757 /* -CST >= arg1 -> -CST-1 > arg1. */
8758 else if (code
== GE_EXPR
&& sgn0
== -1)
8762 /* arg1 code' CST' might be more canonical. */
8767 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8769 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8771 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8772 else if (code
== GT_EXPR
8773 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8775 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8776 else if (code
== LE_EXPR
8777 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8779 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8780 else if (code
== GE_EXPR
8781 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8785 *strict_overflow_p
= true;
8788 /* Now build the constant reduced in magnitude. But not if that
8789 would produce one outside of its types range. */
8790 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8792 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8793 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8795 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8796 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8797 /* We cannot swap the comparison here as that would cause us to
8798 endlessly recurse. */
8801 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8802 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8803 if (code0
!= INTEGER_CST
)
8804 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8805 t
= fold_convert (TREE_TYPE (arg1
), t
);
8807 /* If swapping might yield to a more canonical form, do so. */
8809 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8811 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8814 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8815 overflow further. Try to decrease the magnitude of constants involved
8816 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8817 and put sole constants at the second argument position.
8818 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8821 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8822 tree arg0
, tree arg1
)
8825 bool strict_overflow_p
;
8826 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8827 "when reducing constant in comparison");
8829 /* Try canonicalization by simplifying arg0. */
8830 strict_overflow_p
= false;
8831 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8832 &strict_overflow_p
);
8835 if (strict_overflow_p
)
8836 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8840 /* Try canonicalization by simplifying arg1 using the swapped
8842 code
= swap_tree_comparison (code
);
8843 strict_overflow_p
= false;
8844 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8845 &strict_overflow_p
);
8846 if (t
&& strict_overflow_p
)
8847 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8851 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8852 space. This is used to avoid issuing overflow warnings for
8853 expressions like &p->x which can not wrap. */
8856 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8858 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8865 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8866 if (offset
== NULL_TREE
)
8867 wi_offset
= wi::zero (precision
);
8868 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8874 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8875 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8879 if (!wi::fits_uhwi_p (total
))
8882 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8886 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8888 if (TREE_CODE (base
) == ADDR_EXPR
)
8890 HOST_WIDE_INT base_size
;
8892 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8893 if (base_size
> 0 && size
< base_size
)
8897 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8900 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8901 kind INTEGER_CST. This makes sure to properly sign-extend the
8904 static HOST_WIDE_INT
8905 size_low_cst (const_tree t
)
8907 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8908 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8909 if (prec
< HOST_BITS_PER_WIDE_INT
)
8910 return sext_hwi (w
, prec
);
8914 /* Subroutine of fold_binary. This routine performs all of the
8915 transformations that are common to the equality/inequality
8916 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8917 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8918 fold_binary should call fold_binary. Fold a comparison with
8919 tree code CODE and type TYPE with operands OP0 and OP1. Return
8920 the folded comparison or NULL_TREE. */
8923 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8926 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8927 tree arg0
, arg1
, tem
;
8932 STRIP_SIGN_NOPS (arg0
);
8933 STRIP_SIGN_NOPS (arg1
);
8935 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8936 if (tem
!= NULL_TREE
)
8939 /* If one arg is a real or integer constant, put it last. */
8940 if (tree_swap_operands_p (arg0
, arg1
, true))
8941 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8943 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8944 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8945 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8946 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8947 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8948 && TREE_CODE (arg1
) == INTEGER_CST
8949 && !TREE_OVERFLOW (arg1
))
8951 const enum tree_code
8952 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8953 tree const1
= TREE_OPERAND (arg0
, 1);
8954 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8955 tree variable
= TREE_OPERAND (arg0
, 0);
8956 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8958 /* If the constant operation overflowed this can be
8959 simplified as a comparison against INT_MAX/INT_MIN. */
8960 if (TREE_OVERFLOW (new_const
))
8962 int const1_sgn
= tree_int_cst_sgn (const1
);
8963 enum tree_code code2
= code
;
8965 /* Get the sign of the constant on the lhs if the
8966 operation were VARIABLE + CONST1. */
8967 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8968 const1_sgn
= -const1_sgn
;
8970 /* The sign of the constant determines if we overflowed
8971 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8972 Canonicalize to the INT_MIN overflow by swapping the comparison
8974 if (const1_sgn
== -1)
8975 code2
= swap_tree_comparison (code
);
8977 /* We now can look at the canonicalized case
8978 VARIABLE + 1 CODE2 INT_MIN
8979 and decide on the result. */
8986 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8992 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9001 fold_overflow_warning ("assuming signed overflow does not occur "
9002 "when changing X +- C1 cmp C2 to "
9004 WARN_STRICT_OVERFLOW_COMPARISON
);
9005 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
9009 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
9010 if (TREE_CODE (arg0
) == MINUS_EXPR
9012 && integer_zerop (arg1
))
9014 /* ??? The transformation is valid for the other operators if overflow
9015 is undefined for the type, but performing it here badly interacts
9016 with the transformation in fold_cond_expr_with_comparison which
9017 attempts to synthetize ABS_EXPR. */
9019 fold_overflow_warning ("assuming signed overflow does not occur "
9020 "when changing X - Y cmp 0 to X cmp Y",
9021 WARN_STRICT_OVERFLOW_COMPARISON
);
9022 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
9023 TREE_OPERAND (arg0
, 1));
9026 /* For comparisons of pointers we can decompose it to a compile time
9027 comparison of the base objects and the offsets into the object.
9028 This requires at least one operand being an ADDR_EXPR or a
9029 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9030 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9031 && (TREE_CODE (arg0
) == ADDR_EXPR
9032 || TREE_CODE (arg1
) == ADDR_EXPR
9033 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9034 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9036 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9037 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9038 enum machine_mode mode
;
9039 int volatilep
, unsignedp
;
9040 bool indirect_base0
= false, indirect_base1
= false;
9042 /* Get base and offset for the access. Strip ADDR_EXPR for
9043 get_inner_reference, but put it back by stripping INDIRECT_REF
9044 off the base object if possible. indirect_baseN will be true
9045 if baseN is not an address but refers to the object itself. */
9047 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9049 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9050 &bitsize
, &bitpos0
, &offset0
, &mode
,
9051 &unsignedp
, &volatilep
, false);
9052 if (TREE_CODE (base0
) == INDIRECT_REF
)
9053 base0
= TREE_OPERAND (base0
, 0);
9055 indirect_base0
= true;
9057 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9059 base0
= TREE_OPERAND (arg0
, 0);
9060 STRIP_SIGN_NOPS (base0
);
9061 if (TREE_CODE (base0
) == ADDR_EXPR
)
9063 base0
= TREE_OPERAND (base0
, 0);
9064 indirect_base0
= true;
9066 offset0
= TREE_OPERAND (arg0
, 1);
9067 if (tree_fits_shwi_p (offset0
))
9069 HOST_WIDE_INT off
= size_low_cst (offset0
);
9070 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9072 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9074 bitpos0
= off
* BITS_PER_UNIT
;
9075 offset0
= NULL_TREE
;
9081 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9083 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9084 &bitsize
, &bitpos1
, &offset1
, &mode
,
9085 &unsignedp
, &volatilep
, false);
9086 if (TREE_CODE (base1
) == INDIRECT_REF
)
9087 base1
= TREE_OPERAND (base1
, 0);
9089 indirect_base1
= true;
9091 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9093 base1
= TREE_OPERAND (arg1
, 0);
9094 STRIP_SIGN_NOPS (base1
);
9095 if (TREE_CODE (base1
) == ADDR_EXPR
)
9097 base1
= TREE_OPERAND (base1
, 0);
9098 indirect_base1
= true;
9100 offset1
= TREE_OPERAND (arg1
, 1);
9101 if (tree_fits_shwi_p (offset1
))
9103 HOST_WIDE_INT off
= size_low_cst (offset1
);
9104 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9106 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9108 bitpos1
= off
* BITS_PER_UNIT
;
9109 offset1
= NULL_TREE
;
9114 /* A local variable can never be pointed to by
9115 the default SSA name of an incoming parameter. */
9116 if ((TREE_CODE (arg0
) == ADDR_EXPR
9118 && TREE_CODE (base0
) == VAR_DECL
9119 && auto_var_in_fn_p (base0
, current_function_decl
)
9121 && TREE_CODE (base1
) == SSA_NAME
9122 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9123 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9124 || (TREE_CODE (arg1
) == ADDR_EXPR
9126 && TREE_CODE (base1
) == VAR_DECL
9127 && auto_var_in_fn_p (base1
, current_function_decl
)
9129 && TREE_CODE (base0
) == SSA_NAME
9130 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9131 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9133 if (code
== NE_EXPR
)
9134 return constant_boolean_node (1, type
);
9135 else if (code
== EQ_EXPR
)
9136 return constant_boolean_node (0, type
);
9138 /* If we have equivalent bases we might be able to simplify. */
9139 else if (indirect_base0
== indirect_base1
9140 && operand_equal_p (base0
, base1
, 0))
9142 /* We can fold this expression to a constant if the non-constant
9143 offset parts are equal. */
9144 if ((offset0
== offset1
9145 || (offset0
&& offset1
9146 && operand_equal_p (offset0
, offset1
, 0)))
9149 || (indirect_base0
&& DECL_P (base0
))
9150 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9154 && bitpos0
!= bitpos1
9155 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9156 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9157 fold_overflow_warning (("assuming pointer wraparound does not "
9158 "occur when comparing P +- C1 with "
9160 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9165 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9167 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9169 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9171 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9173 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9175 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9179 /* We can simplify the comparison to a comparison of the variable
9180 offset parts if the constant offset parts are equal.
9181 Be careful to use signed sizetype here because otherwise we
9182 mess with array offsets in the wrong way. This is possible
9183 because pointer arithmetic is restricted to retain within an
9184 object and overflow on pointer differences is undefined as of
9185 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9186 else if (bitpos0
== bitpos1
9188 || (indirect_base0
&& DECL_P (base0
))
9189 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9191 /* By converting to signed sizetype we cover middle-end pointer
9192 arithmetic which operates on unsigned pointer types of size
9193 type size and ARRAY_REF offsets which are properly sign or
9194 zero extended from their type in case it is narrower than
9196 if (offset0
== NULL_TREE
)
9197 offset0
= build_int_cst (ssizetype
, 0);
9199 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9200 if (offset1
== NULL_TREE
)
9201 offset1
= build_int_cst (ssizetype
, 0);
9203 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9206 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9207 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9208 fold_overflow_warning (("assuming pointer wraparound does not "
9209 "occur when comparing P +- C1 with "
9211 WARN_STRICT_OVERFLOW_COMPARISON
);
9213 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9216 /* For non-equal bases we can simplify if they are addresses
9217 of local binding decls or constants. */
9218 else if (indirect_base0
&& indirect_base1
9219 /* We know that !operand_equal_p (base0, base1, 0)
9220 because the if condition was false. But make
9221 sure two decls are not the same. */
9223 && TREE_CODE (arg0
) == ADDR_EXPR
9224 && TREE_CODE (arg1
) == ADDR_EXPR
9225 && (((TREE_CODE (base0
) == VAR_DECL
9226 || TREE_CODE (base0
) == PARM_DECL
)
9227 && (targetm
.binds_local_p (base0
)
9228 || CONSTANT_CLASS_P (base1
)))
9229 || CONSTANT_CLASS_P (base0
))
9230 && (((TREE_CODE (base1
) == VAR_DECL
9231 || TREE_CODE (base1
) == PARM_DECL
)
9232 && (targetm
.binds_local_p (base1
)
9233 || CONSTANT_CLASS_P (base0
)))
9234 || CONSTANT_CLASS_P (base1
)))
9236 if (code
== EQ_EXPR
)
9237 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9239 else if (code
== NE_EXPR
)
9240 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9243 /* For equal offsets we can simplify to a comparison of the
9245 else if (bitpos0
== bitpos1
9247 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9249 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9250 && ((offset0
== offset1
)
9251 || (offset0
&& offset1
9252 && operand_equal_p (offset0
, offset1
, 0))))
9255 base0
= build_fold_addr_expr_loc (loc
, base0
);
9257 base1
= build_fold_addr_expr_loc (loc
, base1
);
9258 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9262 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9263 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9264 the resulting offset is smaller in absolute value than the
9265 original one and has the same sign. */
9266 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9267 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9268 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9269 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9270 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9271 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9272 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9274 tree const1
= TREE_OPERAND (arg0
, 1);
9275 tree const2
= TREE_OPERAND (arg1
, 1);
9276 tree variable1
= TREE_OPERAND (arg0
, 0);
9277 tree variable2
= TREE_OPERAND (arg1
, 0);
9279 const char * const warnmsg
= G_("assuming signed overflow does not "
9280 "occur when combining constants around "
9283 /* Put the constant on the side where it doesn't overflow and is
9284 of lower absolute value and of same sign than before. */
9285 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9286 ? MINUS_EXPR
: PLUS_EXPR
,
9288 if (!TREE_OVERFLOW (cst
)
9289 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9290 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9292 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9293 return fold_build2_loc (loc
, code
, type
,
9295 fold_build2_loc (loc
, TREE_CODE (arg1
),
9300 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9301 ? MINUS_EXPR
: PLUS_EXPR
,
9303 if (!TREE_OVERFLOW (cst
)
9304 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9305 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9307 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9308 return fold_build2_loc (loc
, code
, type
,
9309 fold_build2_loc (loc
, TREE_CODE (arg0
),
9316 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9317 signed arithmetic case. That form is created by the compiler
9318 often enough for folding it to be of value. One example is in
9319 computing loop trip counts after Operator Strength Reduction. */
9320 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9321 && TREE_CODE (arg0
) == MULT_EXPR
9322 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9323 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9324 && integer_zerop (arg1
))
9326 tree const1
= TREE_OPERAND (arg0
, 1);
9327 tree const2
= arg1
; /* zero */
9328 tree variable1
= TREE_OPERAND (arg0
, 0);
9329 enum tree_code cmp_code
= code
;
9331 /* Handle unfolded multiplication by zero. */
9332 if (integer_zerop (const1
))
9333 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9335 fold_overflow_warning (("assuming signed overflow does not occur when "
9336 "eliminating multiplication in comparison "
9338 WARN_STRICT_OVERFLOW_COMPARISON
);
9340 /* If const1 is negative we swap the sense of the comparison. */
9341 if (tree_int_cst_sgn (const1
) < 0)
9342 cmp_code
= swap_tree_comparison (cmp_code
);
9344 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9347 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9351 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9353 tree targ0
= strip_float_extensions (arg0
);
9354 tree targ1
= strip_float_extensions (arg1
);
9355 tree newtype
= TREE_TYPE (targ0
);
9357 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9358 newtype
= TREE_TYPE (targ1
);
9360 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9361 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9362 return fold_build2_loc (loc
, code
, type
,
9363 fold_convert_loc (loc
, newtype
, targ0
),
9364 fold_convert_loc (loc
, newtype
, targ1
));
9366 /* (-a) CMP (-b) -> b CMP a */
9367 if (TREE_CODE (arg0
) == NEGATE_EXPR
9368 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9369 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9370 TREE_OPERAND (arg0
, 0));
9372 if (TREE_CODE (arg1
) == REAL_CST
)
9374 REAL_VALUE_TYPE cst
;
9375 cst
= TREE_REAL_CST (arg1
);
9377 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9378 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9379 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9380 TREE_OPERAND (arg0
, 0),
9381 build_real (TREE_TYPE (arg1
),
9382 real_value_negate (&cst
)));
9384 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9385 /* a CMP (-0) -> a CMP 0 */
9386 if (REAL_VALUE_MINUS_ZERO (cst
))
9387 return fold_build2_loc (loc
, code
, type
, arg0
,
9388 build_real (TREE_TYPE (arg1
), dconst0
));
9390 /* x != NaN is always true, other ops are always false. */
9391 if (REAL_VALUE_ISNAN (cst
)
9392 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9394 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9395 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9398 /* Fold comparisons against infinity. */
9399 if (REAL_VALUE_ISINF (cst
)
9400 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9402 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9403 if (tem
!= NULL_TREE
)
9408 /* If this is a comparison of a real constant with a PLUS_EXPR
9409 or a MINUS_EXPR of a real constant, we can convert it into a
9410 comparison with a revised real constant as long as no overflow
9411 occurs when unsafe_math_optimizations are enabled. */
9412 if (flag_unsafe_math_optimizations
9413 && TREE_CODE (arg1
) == REAL_CST
9414 && (TREE_CODE (arg0
) == PLUS_EXPR
9415 || TREE_CODE (arg0
) == MINUS_EXPR
)
9416 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9417 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9418 ? MINUS_EXPR
: PLUS_EXPR
,
9419 arg1
, TREE_OPERAND (arg0
, 1)))
9420 && !TREE_OVERFLOW (tem
))
9421 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9423 /* Likewise, we can simplify a comparison of a real constant with
9424 a MINUS_EXPR whose first operand is also a real constant, i.e.
9425 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9426 floating-point types only if -fassociative-math is set. */
9427 if (flag_associative_math
9428 && TREE_CODE (arg1
) == REAL_CST
9429 && TREE_CODE (arg0
) == MINUS_EXPR
9430 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9431 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9433 && !TREE_OVERFLOW (tem
))
9434 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9435 TREE_OPERAND (arg0
, 1), tem
);
9437 /* Fold comparisons against built-in math functions. */
9438 if (TREE_CODE (arg1
) == REAL_CST
9439 && flag_unsafe_math_optimizations
9440 && ! flag_errno_math
)
9442 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9444 if (fcode
!= END_BUILTINS
)
9446 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9447 if (tem
!= NULL_TREE
)
9453 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9454 && CONVERT_EXPR_P (arg0
))
9456 /* If we are widening one operand of an integer comparison,
9457 see if the other operand is similarly being widened. Perhaps we
9458 can do the comparison in the narrower type. */
9459 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9463 /* Or if we are changing signedness. */
9464 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9469 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9470 constant, we can simplify it. */
9471 if (TREE_CODE (arg1
) == INTEGER_CST
9472 && (TREE_CODE (arg0
) == MIN_EXPR
9473 || TREE_CODE (arg0
) == MAX_EXPR
)
9474 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9476 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9481 /* Simplify comparison of something with itself. (For IEEE
9482 floating-point, we can only do some of these simplifications.) */
9483 if (operand_equal_p (arg0
, arg1
, 0))
9488 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9489 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9490 return constant_boolean_node (1, type
);
9495 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9496 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9497 return constant_boolean_node (1, type
);
9498 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9501 /* For NE, we can only do this simplification if integer
9502 or we don't honor IEEE floating point NaNs. */
9503 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9504 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9506 /* ... fall through ... */
9509 return constant_boolean_node (0, type
);
9515 /* If we are comparing an expression that just has comparisons
9516 of two integer values, arithmetic expressions of those comparisons,
9517 and constants, we can simplify it. There are only three cases
9518 to check: the two values can either be equal, the first can be
9519 greater, or the second can be greater. Fold the expression for
9520 those three values. Since each value must be 0 or 1, we have
9521 eight possibilities, each of which corresponds to the constant 0
9522 or 1 or one of the six possible comparisons.
9524 This handles common cases like (a > b) == 0 but also handles
9525 expressions like ((x > y) - (y > x)) > 0, which supposedly
9526 occur in macroized code. */
9528 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9530 tree cval1
= 0, cval2
= 0;
9533 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9534 /* Don't handle degenerate cases here; they should already
9535 have been handled anyway. */
9536 && cval1
!= 0 && cval2
!= 0
9537 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9538 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9539 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9540 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9541 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9542 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9543 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9545 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9546 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9548 /* We can't just pass T to eval_subst in case cval1 or cval2
9549 was the same as ARG1. */
9552 = fold_build2_loc (loc
, code
, type
,
9553 eval_subst (loc
, arg0
, cval1
, maxval
,
9557 = fold_build2_loc (loc
, code
, type
,
9558 eval_subst (loc
, arg0
, cval1
, maxval
,
9562 = fold_build2_loc (loc
, code
, type
,
9563 eval_subst (loc
, arg0
, cval1
, minval
,
9567 /* All three of these results should be 0 or 1. Confirm they are.
9568 Then use those values to select the proper code to use. */
9570 if (TREE_CODE (high_result
) == INTEGER_CST
9571 && TREE_CODE (equal_result
) == INTEGER_CST
9572 && TREE_CODE (low_result
) == INTEGER_CST
)
9574 /* Make a 3-bit mask with the high-order bit being the
9575 value for `>', the next for '=', and the low for '<'. */
9576 switch ((integer_onep (high_result
) * 4)
9577 + (integer_onep (equal_result
) * 2)
9578 + integer_onep (low_result
))
9582 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9603 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9608 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9609 SET_EXPR_LOCATION (tem
, loc
);
9612 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9617 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9618 into a single range test. */
9619 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9620 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9621 && TREE_CODE (arg1
) == INTEGER_CST
9622 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9623 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9624 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9625 && !TREE_OVERFLOW (arg1
))
9627 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9628 if (tem
!= NULL_TREE
)
9632 /* Fold ~X op ~Y as Y op X. */
9633 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9634 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9636 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9637 return fold_build2_loc (loc
, code
, type
,
9638 fold_convert_loc (loc
, cmp_type
,
9639 TREE_OPERAND (arg1
, 0)),
9640 TREE_OPERAND (arg0
, 0));
9643 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9644 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9645 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9647 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9648 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9649 TREE_OPERAND (arg0
, 0),
9650 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9651 fold_convert_loc (loc
, cmp_type
, arg1
)));
9658 /* Subroutine of fold_binary. Optimize complex multiplications of the
9659 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9660 argument EXPR represents the expression "z" of type TYPE. */
9663 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9665 tree itype
= TREE_TYPE (type
);
9666 tree rpart
, ipart
, tem
;
9668 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9670 rpart
= TREE_OPERAND (expr
, 0);
9671 ipart
= TREE_OPERAND (expr
, 1);
9673 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9675 rpart
= TREE_REALPART (expr
);
9676 ipart
= TREE_IMAGPART (expr
);
9680 expr
= save_expr (expr
);
9681 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9682 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9685 rpart
= save_expr (rpart
);
9686 ipart
= save_expr (ipart
);
9687 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9688 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9689 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9690 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9691 build_zero_cst (itype
));
9695 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9696 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9697 guarantees that P and N have the same least significant log2(M) bits.
9698 N is not otherwise constrained. In particular, N is not normalized to
9699 0 <= N < M as is common. In general, the precise value of P is unknown.
9700 M is chosen as large as possible such that constant N can be determined.
9702 Returns M and sets *RESIDUE to N.
9704 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9705 account. This is not always possible due to PR 35705.
9708 static unsigned HOST_WIDE_INT
9709 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9710 bool allow_func_align
)
9712 enum tree_code code
;
9716 code
= TREE_CODE (expr
);
9717 if (code
== ADDR_EXPR
)
9719 unsigned int bitalign
;
9720 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9721 *residue
/= BITS_PER_UNIT
;
9722 return bitalign
/ BITS_PER_UNIT
;
9724 else if (code
== POINTER_PLUS_EXPR
)
9727 unsigned HOST_WIDE_INT modulus
;
9728 enum tree_code inner_code
;
9730 op0
= TREE_OPERAND (expr
, 0);
9732 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9735 op1
= TREE_OPERAND (expr
, 1);
9737 inner_code
= TREE_CODE (op1
);
9738 if (inner_code
== INTEGER_CST
)
9740 *residue
+= TREE_INT_CST_LOW (op1
);
9743 else if (inner_code
== MULT_EXPR
)
9745 op1
= TREE_OPERAND (op1
, 1);
9746 if (TREE_CODE (op1
) == INTEGER_CST
)
9748 unsigned HOST_WIDE_INT align
;
9750 /* Compute the greatest power-of-2 divisor of op1. */
9751 align
= TREE_INT_CST_LOW (op1
);
9754 /* If align is non-zero and less than *modulus, replace
9755 *modulus with align., If align is 0, then either op1 is 0
9756 or the greatest power-of-2 divisor of op1 doesn't fit in an
9757 unsigned HOST_WIDE_INT. In either case, no additional
9758 constraint is imposed. */
9760 modulus
= MIN (modulus
, align
);
9767 /* If we get here, we were unable to determine anything useful about the
9772 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9773 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9776 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9778 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9780 if (TREE_CODE (arg
) == VECTOR_CST
)
9782 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9783 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9785 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9787 constructor_elt
*elt
;
9789 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9790 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9793 elts
[i
] = elt
->value
;
9797 for (; i
< nelts
; i
++)
9799 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9803 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9804 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9805 NULL_TREE otherwise. */
9808 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9810 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9812 bool need_ctor
= false;
9814 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9815 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9816 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9817 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9820 elts
= XALLOCAVEC (tree
, nelts
* 3);
9821 if (!vec_cst_ctor_to_array (arg0
, elts
)
9822 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9825 for (i
= 0; i
< nelts
; i
++)
9827 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9829 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9834 vec
<constructor_elt
, va_gc
> *v
;
9835 vec_alloc (v
, nelts
);
9836 for (i
= 0; i
< nelts
; i
++)
9837 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9838 return build_constructor (type
, v
);
9841 return build_vector (type
, &elts
[2 * nelts
]);
9844 /* Try to fold a pointer difference of type TYPE two address expressions of
9845 array references AREF0 and AREF1 using location LOC. Return a
9846 simplified expression for the difference or NULL_TREE. */
9849 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9850 tree aref0
, tree aref1
)
9852 tree base0
= TREE_OPERAND (aref0
, 0);
9853 tree base1
= TREE_OPERAND (aref1
, 0);
9854 tree base_offset
= build_int_cst (type
, 0);
9856 /* If the bases are array references as well, recurse. If the bases
9857 are pointer indirections compute the difference of the pointers.
9858 If the bases are equal, we are set. */
9859 if ((TREE_CODE (base0
) == ARRAY_REF
9860 && TREE_CODE (base1
) == ARRAY_REF
9862 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9863 || (INDIRECT_REF_P (base0
)
9864 && INDIRECT_REF_P (base1
)
9865 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9866 TREE_OPERAND (base0
, 0),
9867 TREE_OPERAND (base1
, 0))))
9868 || operand_equal_p (base0
, base1
, 0))
9870 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9871 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9872 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9873 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9874 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9876 fold_build2_loc (loc
, MULT_EXPR
, type
,
9882 /* If the real or vector real constant CST of type TYPE has an exact
9883 inverse, return it, else return NULL. */
9886 exact_inverse (tree type
, tree cst
)
9889 tree unit_type
, *elts
;
9890 enum machine_mode mode
;
9891 unsigned vec_nelts
, i
;
9893 switch (TREE_CODE (cst
))
9896 r
= TREE_REAL_CST (cst
);
9898 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9899 return build_real (type
, r
);
9904 vec_nelts
= VECTOR_CST_NELTS (cst
);
9905 elts
= XALLOCAVEC (tree
, vec_nelts
);
9906 unit_type
= TREE_TYPE (type
);
9907 mode
= TYPE_MODE (unit_type
);
9909 for (i
= 0; i
< vec_nelts
; i
++)
9911 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9912 if (!exact_real_inverse (mode
, &r
))
9914 elts
[i
] = build_real (unit_type
, r
);
9917 return build_vector (type
, elts
);
9924 /* Mask out the tz least significant bits of X of type TYPE where
9925 tz is the number of trailing zeroes in Y. */
9927 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9929 int tz
= wi::ctz (y
);
9931 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9935 /* Return true when T is an address and is known to be nonzero.
9936 For floating point we further ensure that T is not denormal.
9937 Similar logic is present in nonzero_address in rtlanal.h.
9939 If the return value is based on the assumption that signed overflow
9940 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9941 change *STRICT_OVERFLOW_P. */
9944 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9946 tree type
= TREE_TYPE (t
);
9947 enum tree_code code
;
9949 /* Doing something useful for floating point would need more work. */
9950 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9953 code
= TREE_CODE (t
);
9954 switch (TREE_CODE_CLASS (code
))
9957 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9960 case tcc_comparison
:
9961 return tree_binary_nonzero_warnv_p (code
, type
,
9962 TREE_OPERAND (t
, 0),
9963 TREE_OPERAND (t
, 1),
9966 case tcc_declaration
:
9968 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9976 case TRUTH_NOT_EXPR
:
9977 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9980 case TRUTH_AND_EXPR
:
9982 case TRUTH_XOR_EXPR
:
9983 return tree_binary_nonzero_warnv_p (code
, type
,
9984 TREE_OPERAND (t
, 0),
9985 TREE_OPERAND (t
, 1),
9993 case WITH_SIZE_EXPR
:
9995 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10000 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10001 strict_overflow_p
);
10004 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10005 strict_overflow_p
);
10009 tree fndecl
= get_callee_fndecl (t
);
10010 if (!fndecl
) return false;
10011 if (flag_delete_null_pointer_checks
&& !flag_check_new
10012 && DECL_IS_OPERATOR_NEW (fndecl
)
10013 && !TREE_NOTHROW (fndecl
))
10015 if (flag_delete_null_pointer_checks
10016 && lookup_attribute ("returns_nonnull",
10017 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10019 return alloca_call_p (t
);
10028 /* Return true when T is an address and is known to be nonzero.
10029 Handle warnings about undefined signed overflow. */
10032 tree_expr_nonzero_p (tree t
)
10034 bool ret
, strict_overflow_p
;
10036 strict_overflow_p
= false;
10037 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10038 if (strict_overflow_p
)
10039 fold_overflow_warning (("assuming signed overflow does not occur when "
10040 "determining that expression is always "
10042 WARN_STRICT_OVERFLOW_MISC
);
10046 /* Fold a binary expression of code CODE and type TYPE with operands
10047 OP0 and OP1. LOC is the location of the resulting expression.
10048 Return the folded expression if folding is successful. Otherwise,
10049 return NULL_TREE. */
10052 fold_binary_loc (location_t loc
,
10053 enum tree_code code
, tree type
, tree op0
, tree op1
)
10055 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10056 tree arg0
, arg1
, tem
;
10057 tree t1
= NULL_TREE
;
10058 bool strict_overflow_p
;
10061 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10062 && TREE_CODE_LENGTH (code
) == 2
10063 && op0
!= NULL_TREE
10064 && op1
!= NULL_TREE
);
10069 /* Strip any conversions that don't change the mode. This is
10070 safe for every expression, except for a comparison expression
10071 because its signedness is derived from its operands. So, in
10072 the latter case, only strip conversions that don't change the
10073 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10076 Note that this is done as an internal manipulation within the
10077 constant folder, in order to find the simplest representation
10078 of the arguments so that their form can be studied. In any
10079 cases, the appropriate type conversions should be put back in
10080 the tree that will get out of the constant folder. */
10082 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10084 STRIP_SIGN_NOPS (arg0
);
10085 STRIP_SIGN_NOPS (arg1
);
10093 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10094 constant but we can't do arithmetic on them. */
10095 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10096 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10097 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10098 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10099 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10100 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
10101 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
10103 if (kind
== tcc_binary
)
10105 /* Make sure type and arg0 have the same saturating flag. */
10106 gcc_assert (TYPE_SATURATING (type
)
10107 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10108 tem
= const_binop (code
, arg0
, arg1
);
10110 else if (kind
== tcc_comparison
)
10111 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10115 if (tem
!= NULL_TREE
)
10117 if (TREE_TYPE (tem
) != type
)
10118 tem
= fold_convert_loc (loc
, type
, tem
);
10123 /* If this is a commutative operation, and ARG0 is a constant, move it
10124 to ARG1 to reduce the number of tests below. */
10125 if (commutative_tree_code (code
)
10126 && tree_swap_operands_p (arg0
, arg1
, true))
10127 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10129 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10131 First check for cases where an arithmetic operation is applied to a
10132 compound, conditional, or comparison operation. Push the arithmetic
10133 operation inside the compound or conditional to see if any folding
10134 can then be done. Convert comparison to conditional for this purpose.
10135 The also optimizes non-constant cases that used to be done in
10138 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10139 one of the operands is a comparison and the other is a comparison, a
10140 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10141 code below would make the expression more complex. Change it to a
10142 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10143 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10145 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10146 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10147 && TREE_CODE (type
) != VECTOR_TYPE
10148 && ((truth_value_p (TREE_CODE (arg0
))
10149 && (truth_value_p (TREE_CODE (arg1
))
10150 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10151 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10152 || (truth_value_p (TREE_CODE (arg1
))
10153 && (truth_value_p (TREE_CODE (arg0
))
10154 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10155 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10157 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10158 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10161 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10162 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10164 if (code
== EQ_EXPR
)
10165 tem
= invert_truthvalue_loc (loc
, tem
);
10167 return fold_convert_loc (loc
, type
, tem
);
10170 if (TREE_CODE_CLASS (code
) == tcc_binary
10171 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10173 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10175 tem
= fold_build2_loc (loc
, code
, type
,
10176 fold_convert_loc (loc
, TREE_TYPE (op0
),
10177 TREE_OPERAND (arg0
, 1)), op1
);
10178 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10181 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10182 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10184 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10185 fold_convert_loc (loc
, TREE_TYPE (op1
),
10186 TREE_OPERAND (arg1
, 1)));
10187 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10191 if (TREE_CODE (arg0
) == COND_EXPR
10192 || TREE_CODE (arg0
) == VEC_COND_EXPR
10193 || COMPARISON_CLASS_P (arg0
))
10195 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10197 /*cond_first_p=*/1);
10198 if (tem
!= NULL_TREE
)
10202 if (TREE_CODE (arg1
) == COND_EXPR
10203 || TREE_CODE (arg1
) == VEC_COND_EXPR
10204 || COMPARISON_CLASS_P (arg1
))
10206 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10208 /*cond_first_p=*/0);
10209 if (tem
!= NULL_TREE
)
10217 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10218 if (TREE_CODE (arg0
) == ADDR_EXPR
10219 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10221 tree iref
= TREE_OPERAND (arg0
, 0);
10222 return fold_build2 (MEM_REF
, type
,
10223 TREE_OPERAND (iref
, 0),
10224 int_const_binop (PLUS_EXPR
, arg1
,
10225 TREE_OPERAND (iref
, 1)));
10228 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10229 if (TREE_CODE (arg0
) == ADDR_EXPR
10230 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10233 HOST_WIDE_INT coffset
;
10234 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10238 return fold_build2 (MEM_REF
, type
,
10239 build_fold_addr_expr (base
),
10240 int_const_binop (PLUS_EXPR
, arg1
,
10241 size_int (coffset
)));
10246 case POINTER_PLUS_EXPR
:
10247 /* 0 +p index -> (type)index */
10248 if (integer_zerop (arg0
))
10249 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10251 /* PTR +p 0 -> PTR */
10252 if (integer_zerop (arg1
))
10253 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10255 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10256 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10257 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10258 return fold_convert_loc (loc
, type
,
10259 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10260 fold_convert_loc (loc
, sizetype
,
10262 fold_convert_loc (loc
, sizetype
,
10265 /* (PTR +p B) +p A -> PTR +p (B + A) */
10266 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10269 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10270 tree arg00
= TREE_OPERAND (arg0
, 0);
10271 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10272 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10273 return fold_convert_loc (loc
, type
,
10274 fold_build_pointer_plus_loc (loc
,
10278 /* PTR_CST +p CST -> CST1 */
10279 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10280 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10281 fold_convert_loc (loc
, type
, arg1
));
10283 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10284 of the array. Loop optimizer sometimes produce this type of
10286 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10288 tem
= try_move_mult_to_index (loc
, arg0
,
10289 fold_convert_loc (loc
,
10292 return fold_convert_loc (loc
, type
, tem
);
10298 /* A + (-B) -> A - B */
10299 if (TREE_CODE (arg1
) == NEGATE_EXPR
10300 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10301 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10302 fold_convert_loc (loc
, type
, arg0
),
10303 fold_convert_loc (loc
, type
,
10304 TREE_OPERAND (arg1
, 0)));
10305 /* (-A) + B -> B - A */
10306 if (TREE_CODE (arg0
) == NEGATE_EXPR
10307 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
)
10308 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10309 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10310 fold_convert_loc (loc
, type
, arg1
),
10311 fold_convert_loc (loc
, type
,
10312 TREE_OPERAND (arg0
, 0)));
10314 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10316 /* Convert ~A + 1 to -A. */
10317 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10318 && integer_onep (arg1
))
10319 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10320 fold_convert_loc (loc
, type
,
10321 TREE_OPERAND (arg0
, 0)));
10323 /* ~X + X is -1. */
10324 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10325 && !TYPE_OVERFLOW_TRAPS (type
))
10327 tree tem
= TREE_OPERAND (arg0
, 0);
10330 if (operand_equal_p (tem
, arg1
, 0))
10332 t1
= build_all_ones_cst (type
);
10333 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10337 /* X + ~X is -1. */
10338 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10339 && !TYPE_OVERFLOW_TRAPS (type
))
10341 tree tem
= TREE_OPERAND (arg1
, 0);
10344 if (operand_equal_p (arg0
, tem
, 0))
10346 t1
= build_all_ones_cst (type
);
10347 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10351 /* X + (X / CST) * -CST is X % CST. */
10352 if (TREE_CODE (arg1
) == MULT_EXPR
10353 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10354 && operand_equal_p (arg0
,
10355 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10357 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10358 tree cst1
= TREE_OPERAND (arg1
, 1);
10359 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10361 if (sum
&& integer_zerop (sum
))
10362 return fold_convert_loc (loc
, type
,
10363 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10364 TREE_TYPE (arg0
), arg0
,
10369 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10370 one. Make sure the type is not saturating and has the signedness of
10371 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10372 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10373 if ((TREE_CODE (arg0
) == MULT_EXPR
10374 || TREE_CODE (arg1
) == MULT_EXPR
)
10375 && !TYPE_SATURATING (type
)
10376 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10377 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10378 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10380 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10385 if (! FLOAT_TYPE_P (type
))
10387 if (integer_zerop (arg1
))
10388 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10390 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10391 with a constant, and the two constants have no bits in common,
10392 we should treat this as a BIT_IOR_EXPR since this may produce more
10393 simplifications. */
10394 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10395 && TREE_CODE (arg1
) == BIT_AND_EXPR
10396 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10397 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10398 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10399 TREE_OPERAND (arg1
, 1)) == 0)
10401 code
= BIT_IOR_EXPR
;
10405 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10406 (plus (plus (mult) (mult)) (foo)) so that we can
10407 take advantage of the factoring cases below. */
10408 if (TYPE_OVERFLOW_WRAPS (type
)
10409 && (((TREE_CODE (arg0
) == PLUS_EXPR
10410 || TREE_CODE (arg0
) == MINUS_EXPR
)
10411 && TREE_CODE (arg1
) == MULT_EXPR
)
10412 || ((TREE_CODE (arg1
) == PLUS_EXPR
10413 || TREE_CODE (arg1
) == MINUS_EXPR
)
10414 && TREE_CODE (arg0
) == MULT_EXPR
)))
10416 tree parg0
, parg1
, parg
, marg
;
10417 enum tree_code pcode
;
10419 if (TREE_CODE (arg1
) == MULT_EXPR
)
10420 parg
= arg0
, marg
= arg1
;
10422 parg
= arg1
, marg
= arg0
;
10423 pcode
= TREE_CODE (parg
);
10424 parg0
= TREE_OPERAND (parg
, 0);
10425 parg1
= TREE_OPERAND (parg
, 1);
10426 STRIP_NOPS (parg0
);
10427 STRIP_NOPS (parg1
);
10429 if (TREE_CODE (parg0
) == MULT_EXPR
10430 && TREE_CODE (parg1
) != MULT_EXPR
)
10431 return fold_build2_loc (loc
, pcode
, type
,
10432 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10433 fold_convert_loc (loc
, type
,
10435 fold_convert_loc (loc
, type
,
10437 fold_convert_loc (loc
, type
, parg1
));
10438 if (TREE_CODE (parg0
) != MULT_EXPR
10439 && TREE_CODE (parg1
) == MULT_EXPR
)
10441 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10442 fold_convert_loc (loc
, type
, parg0
),
10443 fold_build2_loc (loc
, pcode
, type
,
10444 fold_convert_loc (loc
, type
, marg
),
10445 fold_convert_loc (loc
, type
,
10451 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10452 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10453 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10455 /* Likewise if the operands are reversed. */
10456 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10457 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10459 /* Convert X + -C into X - C. */
10460 if (TREE_CODE (arg1
) == REAL_CST
10461 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10463 tem
= fold_negate_const (arg1
, type
);
10464 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10465 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10466 fold_convert_loc (loc
, type
, arg0
),
10467 fold_convert_loc (loc
, type
, tem
));
10470 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10471 to __complex__ ( x, y ). This is not the same for SNaNs or
10472 if signed zeros are involved. */
10473 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10474 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10475 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10477 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10478 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10479 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10480 bool arg0rz
= false, arg0iz
= false;
10481 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10482 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10484 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10485 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10486 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10488 tree rp
= arg1r
? arg1r
10489 : build1 (REALPART_EXPR
, rtype
, arg1
);
10490 tree ip
= arg0i
? arg0i
10491 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10492 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10494 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10496 tree rp
= arg0r
? arg0r
10497 : build1 (REALPART_EXPR
, rtype
, arg0
);
10498 tree ip
= arg1i
? arg1i
10499 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10500 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10505 if (flag_unsafe_math_optimizations
10506 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10507 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10508 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10511 /* Convert x+x into x*2.0. */
10512 if (operand_equal_p (arg0
, arg1
, 0)
10513 && SCALAR_FLOAT_TYPE_P (type
))
10514 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10515 build_real (type
, dconst2
));
10517 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10518 We associate floats only if the user has specified
10519 -fassociative-math. */
10520 if (flag_associative_math
10521 && TREE_CODE (arg1
) == PLUS_EXPR
10522 && TREE_CODE (arg0
) != MULT_EXPR
)
10524 tree tree10
= TREE_OPERAND (arg1
, 0);
10525 tree tree11
= TREE_OPERAND (arg1
, 1);
10526 if (TREE_CODE (tree11
) == MULT_EXPR
10527 && TREE_CODE (tree10
) == MULT_EXPR
)
10530 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10531 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10534 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10535 We associate floats only if the user has specified
10536 -fassociative-math. */
10537 if (flag_associative_math
10538 && TREE_CODE (arg0
) == PLUS_EXPR
10539 && TREE_CODE (arg1
) != MULT_EXPR
)
10541 tree tree00
= TREE_OPERAND (arg0
, 0);
10542 tree tree01
= TREE_OPERAND (arg0
, 1);
10543 if (TREE_CODE (tree01
) == MULT_EXPR
10544 && TREE_CODE (tree00
) == MULT_EXPR
)
10547 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10548 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10554 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10555 is a rotate of A by C1 bits. */
10556 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10557 is a rotate of A by B bits. */
10559 enum tree_code code0
, code1
;
10561 code0
= TREE_CODE (arg0
);
10562 code1
= TREE_CODE (arg1
);
10563 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10564 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10565 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10566 TREE_OPERAND (arg1
, 0), 0)
10567 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10568 TYPE_UNSIGNED (rtype
))
10569 /* Only create rotates in complete modes. Other cases are not
10570 expanded properly. */
10571 && (element_precision (rtype
)
10572 == element_precision (TYPE_MODE (rtype
))))
10574 tree tree01
, tree11
;
10575 enum tree_code code01
, code11
;
10577 tree01
= TREE_OPERAND (arg0
, 1);
10578 tree11
= TREE_OPERAND (arg1
, 1);
10579 STRIP_NOPS (tree01
);
10580 STRIP_NOPS (tree11
);
10581 code01
= TREE_CODE (tree01
);
10582 code11
= TREE_CODE (tree11
);
10583 if (code01
== INTEGER_CST
10584 && code11
== INTEGER_CST
10585 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10586 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10588 tem
= build2_loc (loc
, LROTATE_EXPR
,
10589 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10590 TREE_OPERAND (arg0
, 0),
10591 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10592 return fold_convert_loc (loc
, type
, tem
);
10594 else if (code11
== MINUS_EXPR
)
10596 tree tree110
, tree111
;
10597 tree110
= TREE_OPERAND (tree11
, 0);
10598 tree111
= TREE_OPERAND (tree11
, 1);
10599 STRIP_NOPS (tree110
);
10600 STRIP_NOPS (tree111
);
10601 if (TREE_CODE (tree110
) == INTEGER_CST
10602 && 0 == compare_tree_int (tree110
,
10604 (TREE_TYPE (TREE_OPERAND
10606 && operand_equal_p (tree01
, tree111
, 0))
10608 fold_convert_loc (loc
, type
,
10609 build2 ((code0
== LSHIFT_EXPR
10612 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10613 TREE_OPERAND (arg0
, 0), tree01
));
10615 else if (code01
== MINUS_EXPR
)
10617 tree tree010
, tree011
;
10618 tree010
= TREE_OPERAND (tree01
, 0);
10619 tree011
= TREE_OPERAND (tree01
, 1);
10620 STRIP_NOPS (tree010
);
10621 STRIP_NOPS (tree011
);
10622 if (TREE_CODE (tree010
) == INTEGER_CST
10623 && 0 == compare_tree_int (tree010
,
10625 (TREE_TYPE (TREE_OPERAND
10627 && operand_equal_p (tree11
, tree011
, 0))
10628 return fold_convert_loc
10630 build2 ((code0
!= LSHIFT_EXPR
10633 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10634 TREE_OPERAND (arg0
, 0), tree11
));
10640 /* In most languages, can't associate operations on floats through
10641 parentheses. Rather than remember where the parentheses were, we
10642 don't associate floats at all, unless the user has specified
10643 -fassociative-math.
10644 And, we need to make sure type is not saturating. */
10646 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10647 && !TYPE_SATURATING (type
))
10649 tree var0
, con0
, lit0
, minus_lit0
;
10650 tree var1
, con1
, lit1
, minus_lit1
;
10654 /* Split both trees into variables, constants, and literals. Then
10655 associate each group together, the constants with literals,
10656 then the result with variables. This increases the chances of
10657 literals being recombined later and of generating relocatable
10658 expressions for the sum of a constant and literal. */
10659 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10660 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10661 code
== MINUS_EXPR
);
10663 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10664 if (code
== MINUS_EXPR
)
10667 /* With undefined overflow prefer doing association in a type
10668 which wraps on overflow, if that is one of the operand types. */
10669 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10670 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10672 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10673 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10674 atype
= TREE_TYPE (arg0
);
10675 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10676 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10677 atype
= TREE_TYPE (arg1
);
10678 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10681 /* With undefined overflow we can only associate constants with one
10682 variable, and constants whose association doesn't overflow. */
10683 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10684 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10691 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10692 tmp0
= TREE_OPERAND (tmp0
, 0);
10693 if (CONVERT_EXPR_P (tmp0
)
10694 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10695 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10696 <= TYPE_PRECISION (atype
)))
10697 tmp0
= TREE_OPERAND (tmp0
, 0);
10698 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10699 tmp1
= TREE_OPERAND (tmp1
, 0);
10700 if (CONVERT_EXPR_P (tmp1
)
10701 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10702 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10703 <= TYPE_PRECISION (atype
)))
10704 tmp1
= TREE_OPERAND (tmp1
, 0);
10705 /* The only case we can still associate with two variables
10706 is if they are the same, modulo negation and bit-pattern
10707 preserving conversions. */
10708 if (!operand_equal_p (tmp0
, tmp1
, 0))
10713 /* Only do something if we found more than two objects. Otherwise,
10714 nothing has changed and we risk infinite recursion. */
10716 && (2 < ((var0
!= 0) + (var1
!= 0)
10717 + (con0
!= 0) + (con1
!= 0)
10718 + (lit0
!= 0) + (lit1
!= 0)
10719 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10721 bool any_overflows
= false;
10722 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10723 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10724 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10725 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10726 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10727 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10728 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10729 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10732 /* Preserve the MINUS_EXPR if the negative part of the literal is
10733 greater than the positive part. Otherwise, the multiplicative
10734 folding code (i.e extract_muldiv) may be fooled in case
10735 unsigned constants are subtracted, like in the following
10736 example: ((X*2 + 4) - 8U)/2. */
10737 if (minus_lit0
&& lit0
)
10739 if (TREE_CODE (lit0
) == INTEGER_CST
10740 && TREE_CODE (minus_lit0
) == INTEGER_CST
10741 && tree_int_cst_lt (lit0
, minus_lit0
))
10743 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10744 MINUS_EXPR
, atype
);
10749 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10750 MINUS_EXPR
, atype
);
10755 /* Don't introduce overflows through reassociation. */
10757 && ((lit0
&& TREE_OVERFLOW (lit0
))
10758 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10765 fold_convert_loc (loc
, type
,
10766 associate_trees (loc
, var0
, minus_lit0
,
10767 MINUS_EXPR
, atype
));
10770 con0
= associate_trees (loc
, con0
, minus_lit0
,
10771 MINUS_EXPR
, atype
);
10773 fold_convert_loc (loc
, type
,
10774 associate_trees (loc
, var0
, con0
,
10775 PLUS_EXPR
, atype
));
10779 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10781 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10789 /* Pointer simplifications for subtraction, simple reassociations. */
10790 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10792 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10793 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10794 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10796 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10797 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10798 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10799 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10800 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10801 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10803 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10806 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10807 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10809 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10810 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10811 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10812 fold_convert_loc (loc
, type
, arg1
));
10814 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10816 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10818 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10820 tree arg10
= fold_convert_loc (loc
, type
,
10821 TREE_OPERAND (arg1
, 0));
10822 tree arg11
= fold_convert_loc (loc
, type
,
10823 TREE_OPERAND (arg1
, 1));
10824 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg0
,
10825 fold_convert_loc (loc
, type
, arg10
));
10827 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10830 /* A - (-B) -> A + B */
10831 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10832 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10833 fold_convert_loc (loc
, type
,
10834 TREE_OPERAND (arg1
, 0)));
10835 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10836 if (TREE_CODE (arg0
) == NEGATE_EXPR
10837 && negate_expr_p (arg1
)
10838 && reorder_operands_p (arg0
, arg1
))
10839 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10840 fold_convert_loc (loc
, type
,
10841 negate_expr (arg1
)),
10842 fold_convert_loc (loc
, type
,
10843 TREE_OPERAND (arg0
, 0)));
10844 /* Convert -A - 1 to ~A. */
10845 if (TREE_CODE (type
) != COMPLEX_TYPE
10846 && TREE_CODE (arg0
) == NEGATE_EXPR
10847 && integer_onep (arg1
)
10848 && !TYPE_OVERFLOW_TRAPS (type
))
10849 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10850 fold_convert_loc (loc
, type
,
10851 TREE_OPERAND (arg0
, 0)));
10853 /* Convert -1 - A to ~A. */
10854 if (TREE_CODE (type
) != COMPLEX_TYPE
10855 && integer_all_onesp (arg0
))
10856 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10859 /* X - (X / Y) * Y is X % Y. */
10860 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10861 && TREE_CODE (arg1
) == MULT_EXPR
10862 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10863 && operand_equal_p (arg0
,
10864 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10865 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10866 TREE_OPERAND (arg1
, 1), 0))
10868 fold_convert_loc (loc
, type
,
10869 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10870 arg0
, TREE_OPERAND (arg1
, 1)));
10872 if (! FLOAT_TYPE_P (type
))
10874 if (integer_zerop (arg0
))
10875 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10876 if (integer_zerop (arg1
))
10877 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10879 /* Fold A - (A & B) into ~B & A. */
10880 if (!TREE_SIDE_EFFECTS (arg0
)
10881 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10883 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10885 tree arg10
= fold_convert_loc (loc
, type
,
10886 TREE_OPERAND (arg1
, 0));
10887 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10888 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10890 fold_convert_loc (loc
, type
, arg0
));
10892 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10894 tree arg11
= fold_convert_loc (loc
,
10895 type
, TREE_OPERAND (arg1
, 1));
10896 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10897 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10899 fold_convert_loc (loc
, type
, arg0
));
10903 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10904 any power of 2 minus 1. */
10905 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10906 && TREE_CODE (arg1
) == BIT_AND_EXPR
10907 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10908 TREE_OPERAND (arg1
, 0), 0))
10910 tree mask0
= TREE_OPERAND (arg0
, 1);
10911 tree mask1
= TREE_OPERAND (arg1
, 1);
10912 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10914 if (operand_equal_p (tem
, mask1
, 0))
10916 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10917 TREE_OPERAND (arg0
, 0), mask1
);
10918 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10923 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10924 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10925 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10927 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10928 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10929 (-ARG1 + ARG0) reduces to -ARG1. */
10930 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10931 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10933 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10934 __complex__ ( x, -y ). This is not the same for SNaNs or if
10935 signed zeros are involved. */
10936 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10937 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10938 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10940 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10941 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10942 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10943 bool arg0rz
= false, arg0iz
= false;
10944 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10945 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10947 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10948 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10949 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10951 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10953 : build1 (REALPART_EXPR
, rtype
, arg1
));
10954 tree ip
= arg0i
? arg0i
10955 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10956 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10958 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10960 tree rp
= arg0r
? arg0r
10961 : build1 (REALPART_EXPR
, rtype
, arg0
);
10962 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10964 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10965 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10970 /* Fold &x - &x. This can happen from &x.foo - &x.
10971 This is unsafe for certain floats even in non-IEEE formats.
10972 In IEEE, it is unsafe because it does wrong for NaNs.
10973 Also note that operand_equal_p is always false if an operand
10976 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10977 && operand_equal_p (arg0
, arg1
, 0))
10978 return build_zero_cst (type
);
10980 /* A - B -> A + (-B) if B is easily negatable. */
10981 if (negate_expr_p (arg1
)
10982 && ((FLOAT_TYPE_P (type
)
10983 /* Avoid this transformation if B is a positive REAL_CST. */
10984 && (TREE_CODE (arg1
) != REAL_CST
10985 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10986 || INTEGRAL_TYPE_P (type
)))
10987 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10988 fold_convert_loc (loc
, type
, arg0
),
10989 fold_convert_loc (loc
, type
,
10990 negate_expr (arg1
)));
10992 /* Try folding difference of addresses. */
10994 HOST_WIDE_INT diff
;
10996 if ((TREE_CODE (arg0
) == ADDR_EXPR
10997 || TREE_CODE (arg1
) == ADDR_EXPR
)
10998 && ptr_difference_const (arg0
, arg1
, &diff
))
10999 return build_int_cst_type (type
, diff
);
11002 /* Fold &a[i] - &a[j] to i-j. */
11003 if (TREE_CODE (arg0
) == ADDR_EXPR
11004 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11005 && TREE_CODE (arg1
) == ADDR_EXPR
11006 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11008 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11009 TREE_OPERAND (arg0
, 0),
11010 TREE_OPERAND (arg1
, 0));
11015 if (FLOAT_TYPE_P (type
)
11016 && flag_unsafe_math_optimizations
11017 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
11018 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
11019 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
11022 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11023 one. Make sure the type is not saturating and has the signedness of
11024 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11025 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11026 if ((TREE_CODE (arg0
) == MULT_EXPR
11027 || TREE_CODE (arg1
) == MULT_EXPR
)
11028 && !TYPE_SATURATING (type
)
11029 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11030 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11031 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11033 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11041 /* (-A) * (-B) -> A * B */
11042 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11043 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11044 fold_convert_loc (loc
, type
,
11045 TREE_OPERAND (arg0
, 0)),
11046 fold_convert_loc (loc
, type
,
11047 negate_expr (arg1
)));
11048 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11049 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11050 fold_convert_loc (loc
, type
,
11051 negate_expr (arg0
)),
11052 fold_convert_loc (loc
, type
,
11053 TREE_OPERAND (arg1
, 0)));
11055 if (! FLOAT_TYPE_P (type
))
11057 if (integer_zerop (arg1
))
11058 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11059 if (integer_onep (arg1
))
11060 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11061 /* Transform x * -1 into -x. Make sure to do the negation
11062 on the original operand with conversions not stripped
11063 because we can only strip non-sign-changing conversions. */
11064 if (integer_minus_onep (arg1
))
11065 return fold_convert_loc (loc
, type
, negate_expr (op0
));
11066 /* Transform x * -C into -x * C if x is easily negatable. */
11067 if (TREE_CODE (arg1
) == INTEGER_CST
11068 && tree_int_cst_sgn (arg1
) == -1
11069 && negate_expr_p (arg0
)
11070 && (tem
= negate_expr (arg1
)) != arg1
11071 && !TREE_OVERFLOW (tem
))
11072 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11073 fold_convert_loc (loc
, type
,
11074 negate_expr (arg0
)),
11077 /* (a * (1 << b)) is (a << b) */
11078 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11079 && integer_onep (TREE_OPERAND (arg1
, 0)))
11080 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11081 TREE_OPERAND (arg1
, 1));
11082 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11083 && integer_onep (TREE_OPERAND (arg0
, 0)))
11084 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11085 TREE_OPERAND (arg0
, 1));
11087 /* (A + A) * C -> A * 2 * C */
11088 if (TREE_CODE (arg0
) == PLUS_EXPR
11089 && TREE_CODE (arg1
) == INTEGER_CST
11090 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11091 TREE_OPERAND (arg0
, 1), 0))
11092 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11093 omit_one_operand_loc (loc
, type
,
11094 TREE_OPERAND (arg0
, 0),
11095 TREE_OPERAND (arg0
, 1)),
11096 fold_build2_loc (loc
, MULT_EXPR
, type
,
11097 build_int_cst (type
, 2) , arg1
));
11099 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11100 sign-changing only. */
11101 if (TREE_CODE (arg1
) == INTEGER_CST
11102 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
11103 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
11104 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11106 strict_overflow_p
= false;
11107 if (TREE_CODE (arg1
) == INTEGER_CST
11108 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11109 &strict_overflow_p
)))
11111 if (strict_overflow_p
)
11112 fold_overflow_warning (("assuming signed overflow does not "
11113 "occur when simplifying "
11115 WARN_STRICT_OVERFLOW_MISC
);
11116 return fold_convert_loc (loc
, type
, tem
);
11119 /* Optimize z * conj(z) for integer complex numbers. */
11120 if (TREE_CODE (arg0
) == CONJ_EXPR
11121 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11122 return fold_mult_zconjz (loc
, type
, arg1
);
11123 if (TREE_CODE (arg1
) == CONJ_EXPR
11124 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11125 return fold_mult_zconjz (loc
, type
, arg0
);
11129 /* Maybe fold x * 0 to 0. The expressions aren't the same
11130 when x is NaN, since x * 0 is also NaN. Nor are they the
11131 same in modes with signed zeros, since multiplying a
11132 negative value by 0 gives -0, not +0. */
11133 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11134 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11135 && real_zerop (arg1
))
11136 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11137 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11138 Likewise for complex arithmetic with signed zeros. */
11139 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11140 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11141 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11142 && real_onep (arg1
))
11143 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11145 /* Transform x * -1.0 into -x. */
11146 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11147 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11148 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11149 && real_minus_onep (arg1
))
11150 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11152 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11153 the result for floating point types due to rounding so it is applied
11154 only if -fassociative-math was specify. */
11155 if (flag_associative_math
11156 && TREE_CODE (arg0
) == RDIV_EXPR
11157 && TREE_CODE (arg1
) == REAL_CST
11158 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11160 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11163 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11164 TREE_OPERAND (arg0
, 1));
11167 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11168 if (operand_equal_p (arg0
, arg1
, 0))
11170 tree tem
= fold_strip_sign_ops (arg0
);
11171 if (tem
!= NULL_TREE
)
11173 tem
= fold_convert_loc (loc
, type
, tem
);
11174 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11178 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11179 This is not the same for NaNs or if signed zeros are
11181 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11182 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11183 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11184 && TREE_CODE (arg1
) == COMPLEX_CST
11185 && real_zerop (TREE_REALPART (arg1
)))
11187 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11188 if (real_onep (TREE_IMAGPART (arg1
)))
11190 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11191 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11193 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11194 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11196 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11197 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11198 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11202 /* Optimize z * conj(z) for floating point complex numbers.
11203 Guarded by flag_unsafe_math_optimizations as non-finite
11204 imaginary components don't produce scalar results. */
11205 if (flag_unsafe_math_optimizations
11206 && TREE_CODE (arg0
) == CONJ_EXPR
11207 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11208 return fold_mult_zconjz (loc
, type
, arg1
);
11209 if (flag_unsafe_math_optimizations
11210 && TREE_CODE (arg1
) == CONJ_EXPR
11211 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11212 return fold_mult_zconjz (loc
, type
, arg0
);
11214 if (flag_unsafe_math_optimizations
)
11216 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11217 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11219 /* Optimizations of root(...)*root(...). */
11220 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11223 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11224 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11226 /* Optimize sqrt(x)*sqrt(x) as x. */
11227 if (BUILTIN_SQRT_P (fcode0
)
11228 && operand_equal_p (arg00
, arg10
, 0)
11229 && ! HONOR_SNANS (TYPE_MODE (type
)))
11232 /* Optimize root(x)*root(y) as root(x*y). */
11233 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11234 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11235 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11238 /* Optimize expN(x)*expN(y) as expN(x+y). */
11239 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11241 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11242 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11243 CALL_EXPR_ARG (arg0
, 0),
11244 CALL_EXPR_ARG (arg1
, 0));
11245 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11248 /* Optimizations of pow(...)*pow(...). */
11249 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11250 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11251 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11253 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11254 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11255 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11256 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11258 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11259 if (operand_equal_p (arg01
, arg11
, 0))
11261 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11262 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11264 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11267 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11268 if (operand_equal_p (arg00
, arg10
, 0))
11270 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11271 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11273 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11277 /* Optimize tan(x)*cos(x) as sin(x). */
11278 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11279 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11280 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11281 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11282 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11283 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11284 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11285 CALL_EXPR_ARG (arg1
, 0), 0))
11287 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11289 if (sinfn
!= NULL_TREE
)
11290 return build_call_expr_loc (loc
, sinfn
, 1,
11291 CALL_EXPR_ARG (arg0
, 0));
11294 /* Optimize x*pow(x,c) as pow(x,c+1). */
11295 if (fcode1
== BUILT_IN_POW
11296 || fcode1
== BUILT_IN_POWF
11297 || fcode1
== BUILT_IN_POWL
)
11299 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11300 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11301 if (TREE_CODE (arg11
) == REAL_CST
11302 && !TREE_OVERFLOW (arg11
)
11303 && operand_equal_p (arg0
, arg10
, 0))
11305 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11309 c
= TREE_REAL_CST (arg11
);
11310 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11311 arg
= build_real (type
, c
);
11312 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11316 /* Optimize pow(x,c)*x as pow(x,c+1). */
11317 if (fcode0
== BUILT_IN_POW
11318 || fcode0
== BUILT_IN_POWF
11319 || fcode0
== BUILT_IN_POWL
)
11321 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11322 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11323 if (TREE_CODE (arg01
) == REAL_CST
11324 && !TREE_OVERFLOW (arg01
)
11325 && operand_equal_p (arg1
, arg00
, 0))
11327 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11331 c
= TREE_REAL_CST (arg01
);
11332 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11333 arg
= build_real (type
, c
);
11334 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11338 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11339 if (!in_gimple_form
11341 && operand_equal_p (arg0
, arg1
, 0))
11343 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11347 tree arg
= build_real (type
, dconst2
);
11348 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11357 if (integer_all_onesp (arg1
))
11358 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11359 if (integer_zerop (arg1
))
11360 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11361 if (operand_equal_p (arg0
, arg1
, 0))
11362 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11364 /* ~X | X is -1. */
11365 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11366 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11368 t1
= build_zero_cst (type
);
11369 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11370 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11373 /* X | ~X is -1. */
11374 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11375 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11377 t1
= build_zero_cst (type
);
11378 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11379 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11382 /* Canonicalize (X & C1) | C2. */
11383 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11384 && TREE_CODE (arg1
) == INTEGER_CST
11385 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11387 int width
= TYPE_PRECISION (type
), w
;
11388 wide_int c1
= TREE_OPERAND (arg0
, 1);
11389 wide_int c2
= arg1
;
11391 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11392 if ((c1
& c2
) == c1
)
11393 return omit_one_operand_loc (loc
, type
, arg1
,
11394 TREE_OPERAND (arg0
, 0));
11396 wide_int msk
= wi::mask (width
, false,
11397 TYPE_PRECISION (TREE_TYPE (arg1
)));
11399 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11400 if (msk
.and_not (c1
| c2
) == 0)
11401 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11402 TREE_OPERAND (arg0
, 0), arg1
);
11404 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11405 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11406 mode which allows further optimizations. */
11409 wide_int c3
= c1
.and_not (c2
);
11410 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11412 wide_int mask
= wi::mask (w
, false,
11413 TYPE_PRECISION (type
));
11414 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11422 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11423 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11424 TREE_OPERAND (arg0
, 0),
11425 wide_int_to_tree (type
,
11430 /* (X & Y) | Y is (X, Y). */
11431 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11432 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11433 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11434 /* (X & Y) | X is (Y, X). */
11435 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11436 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11437 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11438 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11439 /* X | (X & Y) is (Y, X). */
11440 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11441 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11442 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11443 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11444 /* X | (Y & X) is (Y, X). */
11445 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11446 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11447 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11448 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11450 /* (X & ~Y) | (~X & Y) is X ^ Y */
11451 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11452 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11454 tree a0
, a1
, l0
, l1
, n0
, n1
;
11456 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11457 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11459 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11460 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11462 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11463 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11465 if ((operand_equal_p (n0
, a0
, 0)
11466 && operand_equal_p (n1
, a1
, 0))
11467 || (operand_equal_p (n0
, a1
, 0)
11468 && operand_equal_p (n1
, a0
, 0)))
11469 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11472 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11473 if (t1
!= NULL_TREE
)
11476 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11478 This results in more efficient code for machines without a NAND
11479 instruction. Combine will canonicalize to the first form
11480 which will allow use of NAND instructions provided by the
11481 backend if they exist. */
11482 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11483 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11486 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11487 build2 (BIT_AND_EXPR
, type
,
11488 fold_convert_loc (loc
, type
,
11489 TREE_OPERAND (arg0
, 0)),
11490 fold_convert_loc (loc
, type
,
11491 TREE_OPERAND (arg1
, 0))));
11494 /* See if this can be simplified into a rotate first. If that
11495 is unsuccessful continue in the association code. */
11499 if (integer_zerop (arg1
))
11500 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11501 if (integer_all_onesp (arg1
))
11502 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11503 if (operand_equal_p (arg0
, arg1
, 0))
11504 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11506 /* ~X ^ X is -1. */
11507 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11508 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11510 t1
= build_zero_cst (type
);
11511 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11512 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11515 /* X ^ ~X is -1. */
11516 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11517 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11519 t1
= build_zero_cst (type
);
11520 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11521 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11524 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11525 with a constant, and the two constants have no bits in common,
11526 we should treat this as a BIT_IOR_EXPR since this may produce more
11527 simplifications. */
11528 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11529 && TREE_CODE (arg1
) == BIT_AND_EXPR
11530 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11531 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11532 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11533 TREE_OPERAND (arg1
, 1)) == 0)
11535 code
= BIT_IOR_EXPR
;
11539 /* (X | Y) ^ X -> Y & ~ X*/
11540 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11541 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11543 tree t2
= TREE_OPERAND (arg0
, 1);
11544 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11546 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11547 fold_convert_loc (loc
, type
, t2
),
11548 fold_convert_loc (loc
, type
, t1
));
11552 /* (Y | X) ^ X -> Y & ~ X*/
11553 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11554 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11556 tree t2
= TREE_OPERAND (arg0
, 0);
11557 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11559 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11560 fold_convert_loc (loc
, type
, t2
),
11561 fold_convert_loc (loc
, type
, t1
));
11565 /* X ^ (X | Y) -> Y & ~ X*/
11566 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11567 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11569 tree t2
= TREE_OPERAND (arg1
, 1);
11570 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11572 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11573 fold_convert_loc (loc
, type
, t2
),
11574 fold_convert_loc (loc
, type
, t1
));
11578 /* X ^ (Y | X) -> Y & ~ X*/
11579 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11582 tree t2
= TREE_OPERAND (arg1
, 0);
11583 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11585 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11586 fold_convert_loc (loc
, type
, t2
),
11587 fold_convert_loc (loc
, type
, t1
));
11591 /* Convert ~X ^ ~Y to X ^ Y. */
11592 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11593 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11594 return fold_build2_loc (loc
, code
, type
,
11595 fold_convert_loc (loc
, type
,
11596 TREE_OPERAND (arg0
, 0)),
11597 fold_convert_loc (loc
, type
,
11598 TREE_OPERAND (arg1
, 0)));
11600 /* Convert ~X ^ C to X ^ ~C. */
11601 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11602 && TREE_CODE (arg1
) == INTEGER_CST
)
11603 return fold_build2_loc (loc
, code
, type
,
11604 fold_convert_loc (loc
, type
,
11605 TREE_OPERAND (arg0
, 0)),
11606 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11608 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11609 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11610 && integer_onep (TREE_OPERAND (arg0
, 1))
11611 && integer_onep (arg1
))
11612 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11613 build_zero_cst (TREE_TYPE (arg0
)));
11615 /* Fold (X & Y) ^ Y as ~X & Y. */
11616 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11617 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11619 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11620 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11621 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11622 fold_convert_loc (loc
, type
, arg1
));
11624 /* Fold (X & Y) ^ X as ~Y & X. */
11625 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11626 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11627 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11629 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11630 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11631 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11632 fold_convert_loc (loc
, type
, arg1
));
11634 /* Fold X ^ (X & Y) as X & ~Y. */
11635 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11636 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11638 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11639 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11640 fold_convert_loc (loc
, type
, arg0
),
11641 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11643 /* Fold X ^ (Y & X) as ~Y & X. */
11644 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11645 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11646 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11648 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11649 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11650 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11651 fold_convert_loc (loc
, type
, arg0
));
11654 /* See if this can be simplified into a rotate first. If that
11655 is unsuccessful continue in the association code. */
11659 if (integer_all_onesp (arg1
))
11660 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11661 if (integer_zerop (arg1
))
11662 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11663 if (operand_equal_p (arg0
, arg1
, 0))
11664 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11666 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11667 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11668 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11669 || (TREE_CODE (arg0
) == EQ_EXPR
11670 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11671 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11672 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11674 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11675 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11676 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11677 || (TREE_CODE (arg1
) == EQ_EXPR
11678 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11679 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11680 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11682 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11683 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11684 && TREE_CODE (arg1
) == INTEGER_CST
11685 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11687 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11688 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11689 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11690 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11691 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11693 fold_convert_loc (loc
, type
,
11694 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11695 type
, tmp2
, tmp3
));
11698 /* (X | Y) & Y is (X, Y). */
11699 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11700 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11701 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11702 /* (X | Y) & X is (Y, X). */
11703 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11704 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11705 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11706 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11707 /* X & (X | Y) is (Y, X). */
11708 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11709 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11710 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11711 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11712 /* X & (Y | X) is (Y, X). */
11713 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11714 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11715 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11716 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11718 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11719 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11720 && integer_onep (TREE_OPERAND (arg0
, 1))
11721 && integer_onep (arg1
))
11724 tem
= TREE_OPERAND (arg0
, 0);
11725 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11726 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11728 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11729 build_zero_cst (TREE_TYPE (tem
)));
11731 /* Fold ~X & 1 as (X & 1) == 0. */
11732 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11733 && integer_onep (arg1
))
11736 tem
= TREE_OPERAND (arg0
, 0);
11737 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11738 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11740 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11741 build_zero_cst (TREE_TYPE (tem
)));
11743 /* Fold !X & 1 as X == 0. */
11744 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11745 && integer_onep (arg1
))
11747 tem
= TREE_OPERAND (arg0
, 0);
11748 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11749 build_zero_cst (TREE_TYPE (tem
)));
11752 /* Fold (X ^ Y) & Y as ~X & Y. */
11753 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11754 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11756 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11757 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11758 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11759 fold_convert_loc (loc
, type
, arg1
));
11761 /* Fold (X ^ Y) & X as ~Y & X. */
11762 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11763 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11764 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11766 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11767 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11768 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11769 fold_convert_loc (loc
, type
, arg1
));
11771 /* Fold X & (X ^ Y) as X & ~Y. */
11772 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11773 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11775 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11776 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11777 fold_convert_loc (loc
, type
, arg0
),
11778 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11780 /* Fold X & (Y ^ X) as ~Y & X. */
11781 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11782 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11783 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11785 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11786 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11787 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11788 fold_convert_loc (loc
, type
, arg0
));
11791 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11792 multiple of 1 << CST. */
11793 if (TREE_CODE (arg1
) == INTEGER_CST
)
11795 wide_int cst1
= arg1
;
11796 wide_int ncst1
= -cst1
;
11797 if ((cst1
& ncst1
) == ncst1
11798 && multiple_of_p (type
, arg0
,
11799 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11800 return fold_convert_loc (loc
, type
, arg0
);
11803 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11805 if (TREE_CODE (arg1
) == INTEGER_CST
11806 && TREE_CODE (arg0
) == MULT_EXPR
11807 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11809 wide_int warg1
= arg1
;
11810 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11813 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11815 else if (masked
!= warg1
)
11817 /* Avoid the transform if arg1 is a mask of some
11818 mode which allows further optimizations. */
11819 int pop
= wi::popcount (warg1
);
11820 if (!(pop
>= BITS_PER_UNIT
11821 && exact_log2 (pop
) != -1
11822 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11823 return fold_build2_loc (loc
, code
, type
, op0
,
11824 wide_int_to_tree (type
, masked
));
11828 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11829 ((A & N) + B) & M -> (A + B) & M
11830 Similarly if (N & M) == 0,
11831 ((A | N) + B) & M -> (A + B) & M
11832 and for - instead of + (or unary - instead of +)
11833 and/or ^ instead of |.
11834 If B is constant and (B & M) == 0, fold into A & M. */
11835 if (TREE_CODE (arg1
) == INTEGER_CST
)
11837 wide_int cst1
= arg1
;
11838 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11839 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11840 && (TREE_CODE (arg0
) == PLUS_EXPR
11841 || TREE_CODE (arg0
) == MINUS_EXPR
11842 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11843 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11844 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11850 /* Now we know that arg0 is (C + D) or (C - D) or
11851 -C and arg1 (M) is == (1LL << cst) - 1.
11852 Store C into PMOP[0] and D into PMOP[1]. */
11853 pmop
[0] = TREE_OPERAND (arg0
, 0);
11855 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11857 pmop
[1] = TREE_OPERAND (arg0
, 1);
11861 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11864 for (; which
>= 0; which
--)
11865 switch (TREE_CODE (pmop
[which
]))
11870 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11873 cst0
= TREE_OPERAND (pmop
[which
], 1);
11875 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11880 else if (cst0
!= 0)
11882 /* If C or D is of the form (A & N) where
11883 (N & M) == M, or of the form (A | N) or
11884 (A ^ N) where (N & M) == 0, replace it with A. */
11885 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11888 /* If C or D is a N where (N & M) == 0, it can be
11889 omitted (assumed 0). */
11890 if ((TREE_CODE (arg0
) == PLUS_EXPR
11891 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11892 && (cst1
& pmop
[which
]) == 0)
11893 pmop
[which
] = NULL
;
11899 /* Only build anything new if we optimized one or both arguments
11901 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11902 || (TREE_CODE (arg0
) != NEGATE_EXPR
11903 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11905 tree utype
= TREE_TYPE (arg0
);
11906 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11908 /* Perform the operations in a type that has defined
11909 overflow behavior. */
11910 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11911 if (pmop
[0] != NULL
)
11912 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11913 if (pmop
[1] != NULL
)
11914 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11917 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11918 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11919 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11921 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11922 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11924 else if (pmop
[0] != NULL
)
11926 else if (pmop
[1] != NULL
)
11929 return build_int_cst (type
, 0);
11931 else if (pmop
[0] == NULL
)
11932 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11934 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11936 /* TEM is now the new binary +, - or unary - replacement. */
11937 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11938 fold_convert_loc (loc
, utype
, arg1
));
11939 return fold_convert_loc (loc
, type
, tem
);
11944 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11945 if (t1
!= NULL_TREE
)
11947 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11948 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11949 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11951 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11953 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11956 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11959 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11961 This results in more efficient code for machines without a NOR
11962 instruction. Combine will canonicalize to the first form
11963 which will allow use of NOR instructions provided by the
11964 backend if they exist. */
11965 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11966 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11968 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11969 build2 (BIT_IOR_EXPR
, type
,
11970 fold_convert_loc (loc
, type
,
11971 TREE_OPERAND (arg0
, 0)),
11972 fold_convert_loc (loc
, type
,
11973 TREE_OPERAND (arg1
, 0))));
11976 /* If arg0 is derived from the address of an object or function, we may
11977 be able to fold this expression using the object or function's
11979 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11981 unsigned HOST_WIDE_INT modulus
, residue
;
11982 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11984 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11985 integer_onep (arg1
));
11987 /* This works because modulus is a power of 2. If this weren't the
11988 case, we'd have to replace it by its greatest power-of-2
11989 divisor: modulus & -modulus. */
11991 return build_int_cst (type
, residue
& low
);
11994 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11995 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11996 if the new mask might be further optimized. */
11997 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11998 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11999 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
12000 && TREE_CODE (arg1
) == INTEGER_CST
12001 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12002 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
12003 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12004 < TYPE_PRECISION (TREE_TYPE (arg0
))))
12006 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12007 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
12008 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
12009 tree shift_type
= TREE_TYPE (arg0
);
12011 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
12012 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
12013 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
12014 && TYPE_PRECISION (TREE_TYPE (arg0
))
12015 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
12017 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
12018 tree arg00
= TREE_OPERAND (arg0
, 0);
12019 /* See if more bits can be proven as zero because of
12021 if (TREE_CODE (arg00
) == NOP_EXPR
12022 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
12024 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
12025 if (TYPE_PRECISION (inner_type
)
12026 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
12027 && TYPE_PRECISION (inner_type
) < prec
)
12029 prec
= TYPE_PRECISION (inner_type
);
12030 /* See if we can shorten the right shift. */
12032 shift_type
= inner_type
;
12033 /* Otherwise X >> C1 is all zeros, so we'll optimize
12034 it into (X, 0) later on by making sure zerobits
12038 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
12041 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
12042 zerobits
<<= prec
- shiftc
;
12044 /* For arithmetic shift if sign bit could be set, zerobits
12045 can contain actually sign bits, so no transformation is
12046 possible, unless MASK masks them all away. In that
12047 case the shift needs to be converted into logical shift. */
12048 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
12049 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
12051 if ((mask
& zerobits
) == 0)
12052 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
12058 /* ((X << 16) & 0xff00) is (X, 0). */
12059 if ((mask
& zerobits
) == mask
)
12060 return omit_one_operand_loc (loc
, type
,
12061 build_int_cst (type
, 0), arg0
);
12063 newmask
= mask
| zerobits
;
12064 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
12066 /* Only do the transformation if NEWMASK is some integer
12068 for (prec
= BITS_PER_UNIT
;
12069 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
12070 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
12072 if (prec
< HOST_BITS_PER_WIDE_INT
12073 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
12077 if (shift_type
!= TREE_TYPE (arg0
))
12079 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
12080 fold_convert_loc (loc
, shift_type
,
12081 TREE_OPERAND (arg0
, 0)),
12082 TREE_OPERAND (arg0
, 1));
12083 tem
= fold_convert_loc (loc
, type
, tem
);
12087 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
12088 if (!tree_int_cst_equal (newmaskt
, arg1
))
12089 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
12097 /* Don't touch a floating-point divide by zero unless the mode
12098 of the constant can represent infinity. */
12099 if (TREE_CODE (arg1
) == REAL_CST
12100 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
12101 && real_zerop (arg1
))
12104 /* Optimize A / A to 1.0 if we don't care about
12105 NaNs or Infinities. Skip the transformation
12106 for non-real operands. */
12107 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12108 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12109 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
12110 && operand_equal_p (arg0
, arg1
, 0))
12112 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
12114 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12117 /* The complex version of the above A / A optimization. */
12118 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12119 && operand_equal_p (arg0
, arg1
, 0))
12121 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
12122 if (! HONOR_NANS (TYPE_MODE (elem_type
))
12123 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
12125 tree r
= build_real (elem_type
, dconst1
);
12126 /* omit_two_operands will call fold_convert for us. */
12127 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12131 /* (-A) / (-B) -> A / B */
12132 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
12133 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12134 TREE_OPERAND (arg0
, 0),
12135 negate_expr (arg1
));
12136 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
12137 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12138 negate_expr (arg0
),
12139 TREE_OPERAND (arg1
, 0));
12141 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12142 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12143 && real_onep (arg1
))
12144 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12146 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12147 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12148 && real_minus_onep (arg1
))
12149 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
12150 negate_expr (arg0
)));
12152 /* If ARG1 is a constant, we can convert this to a multiply by the
12153 reciprocal. This does not have the same rounding properties,
12154 so only do this if -freciprocal-math. We can actually
12155 always safely do it if ARG1 is a power of two, but it's hard to
12156 tell if it is or not in a portable manner. */
12158 && (TREE_CODE (arg1
) == REAL_CST
12159 || (TREE_CODE (arg1
) == COMPLEX_CST
12160 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
12161 || (TREE_CODE (arg1
) == VECTOR_CST
12162 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
12164 if (flag_reciprocal_math
12165 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
12166 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
12167 /* Find the reciprocal if optimizing and the result is exact.
12168 TODO: Complex reciprocal not implemented. */
12169 if (TREE_CODE (arg1
) != COMPLEX_CST
)
12171 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
12174 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
12177 /* Convert A/B/C to A/(B*C). */
12178 if (flag_reciprocal_math
12179 && TREE_CODE (arg0
) == RDIV_EXPR
)
12180 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
12181 fold_build2_loc (loc
, MULT_EXPR
, type
,
12182 TREE_OPERAND (arg0
, 1), arg1
));
12184 /* Convert A/(B/C) to (A/B)*C. */
12185 if (flag_reciprocal_math
12186 && TREE_CODE (arg1
) == RDIV_EXPR
)
12187 return fold_build2_loc (loc
, MULT_EXPR
, type
,
12188 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
12189 TREE_OPERAND (arg1
, 0)),
12190 TREE_OPERAND (arg1
, 1));
12192 /* Convert C1/(X*C2) into (C1/C2)/X. */
12193 if (flag_reciprocal_math
12194 && TREE_CODE (arg1
) == MULT_EXPR
12195 && TREE_CODE (arg0
) == REAL_CST
12196 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12198 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12199 TREE_OPERAND (arg1
, 1));
12201 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12202 TREE_OPERAND (arg1
, 0));
12205 if (flag_unsafe_math_optimizations
)
12207 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12208 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12210 /* Optimize sin(x)/cos(x) as tan(x). */
12211 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12212 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12213 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12214 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12215 CALL_EXPR_ARG (arg1
, 0), 0))
12217 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12219 if (tanfn
!= NULL_TREE
)
12220 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12223 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12224 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12225 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12226 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12227 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12228 CALL_EXPR_ARG (arg1
, 0), 0))
12230 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12232 if (tanfn
!= NULL_TREE
)
12234 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12235 CALL_EXPR_ARG (arg0
, 0));
12236 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12237 build_real (type
, dconst1
), tmp
);
12241 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12242 NaNs or Infinities. */
12243 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12244 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12245 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12247 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12248 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12250 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12251 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12252 && operand_equal_p (arg00
, arg01
, 0))
12254 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12256 if (cosfn
!= NULL_TREE
)
12257 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12261 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12262 NaNs or Infinities. */
12263 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12264 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12265 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12267 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12268 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12270 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12271 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12272 && operand_equal_p (arg00
, arg01
, 0))
12274 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12276 if (cosfn
!= NULL_TREE
)
12278 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12279 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12280 build_real (type
, dconst1
),
12286 /* Optimize pow(x,c)/x as pow(x,c-1). */
12287 if (fcode0
== BUILT_IN_POW
12288 || fcode0
== BUILT_IN_POWF
12289 || fcode0
== BUILT_IN_POWL
)
12291 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12292 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12293 if (TREE_CODE (arg01
) == REAL_CST
12294 && !TREE_OVERFLOW (arg01
)
12295 && operand_equal_p (arg1
, arg00
, 0))
12297 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12301 c
= TREE_REAL_CST (arg01
);
12302 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12303 arg
= build_real (type
, c
);
12304 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12308 /* Optimize a/root(b/c) into a*root(c/b). */
12309 if (BUILTIN_ROOT_P (fcode1
))
12311 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12313 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12315 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12316 tree b
= TREE_OPERAND (rootarg
, 0);
12317 tree c
= TREE_OPERAND (rootarg
, 1);
12319 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12321 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12322 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12326 /* Optimize x/expN(y) into x*expN(-y). */
12327 if (BUILTIN_EXPONENT_P (fcode1
))
12329 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12330 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12331 arg1
= build_call_expr_loc (loc
,
12333 fold_convert_loc (loc
, type
, arg
));
12334 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12337 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12338 if (fcode1
== BUILT_IN_POW
12339 || fcode1
== BUILT_IN_POWF
12340 || fcode1
== BUILT_IN_POWL
)
12342 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12343 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12344 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12345 tree neg11
= fold_convert_loc (loc
, type
,
12346 negate_expr (arg11
));
12347 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12348 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12353 case TRUNC_DIV_EXPR
:
12354 /* Optimize (X & (-A)) / A where A is a power of 2,
12356 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12357 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12358 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12360 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12361 arg1
, TREE_OPERAND (arg0
, 1));
12362 if (sum
&& integer_zerop (sum
)) {
12363 tree pow2
= build_int_cst (integer_type_node
,
12364 wi::exact_log2 (arg1
));
12365 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12366 TREE_OPERAND (arg0
, 0), pow2
);
12372 case FLOOR_DIV_EXPR
:
12373 /* Simplify A / (B << N) where A and B are positive and B is
12374 a power of 2, to A >> (N + log2(B)). */
12375 strict_overflow_p
= false;
12376 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12377 && (TYPE_UNSIGNED (type
)
12378 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12380 tree sval
= TREE_OPERAND (arg1
, 0);
12381 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12383 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12384 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
12385 wi::exact_log2 (sval
));
12387 if (strict_overflow_p
)
12388 fold_overflow_warning (("assuming signed overflow does not "
12389 "occur when simplifying A / (B << N)"),
12390 WARN_STRICT_OVERFLOW_MISC
);
12392 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12394 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12395 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12399 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12400 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12401 if (INTEGRAL_TYPE_P (type
)
12402 && TYPE_UNSIGNED (type
)
12403 && code
== FLOOR_DIV_EXPR
)
12404 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12408 case ROUND_DIV_EXPR
:
12409 case CEIL_DIV_EXPR
:
12410 case EXACT_DIV_EXPR
:
12411 if (integer_onep (arg1
))
12412 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12413 if (integer_zerop (arg1
))
12415 /* X / -1 is -X. */
12416 if (!TYPE_UNSIGNED (type
)
12417 && TREE_CODE (arg1
) == INTEGER_CST
12418 && wi::eq_p (arg1
, -1))
12419 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12421 /* Convert -A / -B to A / B when the type is signed and overflow is
12423 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12424 && TREE_CODE (arg0
) == NEGATE_EXPR
12425 && negate_expr_p (arg1
))
12427 if (INTEGRAL_TYPE_P (type
))
12428 fold_overflow_warning (("assuming signed overflow does not occur "
12429 "when distributing negation across "
12431 WARN_STRICT_OVERFLOW_MISC
);
12432 return fold_build2_loc (loc
, code
, type
,
12433 fold_convert_loc (loc
, type
,
12434 TREE_OPERAND (arg0
, 0)),
12435 fold_convert_loc (loc
, type
,
12436 negate_expr (arg1
)));
12438 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12439 && TREE_CODE (arg1
) == NEGATE_EXPR
12440 && negate_expr_p (arg0
))
12442 if (INTEGRAL_TYPE_P (type
))
12443 fold_overflow_warning (("assuming signed overflow does not occur "
12444 "when distributing negation across "
12446 WARN_STRICT_OVERFLOW_MISC
);
12447 return fold_build2_loc (loc
, code
, type
,
12448 fold_convert_loc (loc
, type
,
12449 negate_expr (arg0
)),
12450 fold_convert_loc (loc
, type
,
12451 TREE_OPERAND (arg1
, 0)));
12454 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12455 operation, EXACT_DIV_EXPR.
12457 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12458 At one time others generated faster code, it's not clear if they do
12459 after the last round to changes to the DIV code in expmed.c. */
12460 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12461 && multiple_of_p (type
, arg0
, arg1
))
12462 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12464 strict_overflow_p
= false;
12465 if (TREE_CODE (arg1
) == INTEGER_CST
12466 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12467 &strict_overflow_p
)))
12469 if (strict_overflow_p
)
12470 fold_overflow_warning (("assuming signed overflow does not occur "
12471 "when simplifying division"),
12472 WARN_STRICT_OVERFLOW_MISC
);
12473 return fold_convert_loc (loc
, type
, tem
);
12478 case CEIL_MOD_EXPR
:
12479 case FLOOR_MOD_EXPR
:
12480 case ROUND_MOD_EXPR
:
12481 case TRUNC_MOD_EXPR
:
12482 /* X % 1 is always zero, but be sure to preserve any side
12484 if (integer_onep (arg1
))
12485 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12487 /* X % 0, return X % 0 unchanged so that we can get the
12488 proper warnings and errors. */
12489 if (integer_zerop (arg1
))
12492 /* 0 % X is always zero, but be sure to preserve any side
12493 effects in X. Place this after checking for X == 0. */
12494 if (integer_zerop (arg0
))
12495 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12497 /* X % -1 is zero. */
12498 if (!TYPE_UNSIGNED (type
)
12499 && TREE_CODE (arg1
) == INTEGER_CST
12500 && wi::eq_p (arg1
, -1))
12501 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12503 /* X % -C is the same as X % C. */
12504 if (code
== TRUNC_MOD_EXPR
12505 && TYPE_SIGN (type
) == SIGNED
12506 && TREE_CODE (arg1
) == INTEGER_CST
12507 && !TREE_OVERFLOW (arg1
)
12508 && wi::neg_p (arg1
)
12509 && !TYPE_OVERFLOW_TRAPS (type
)
12510 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12511 && !sign_bit_p (arg1
, arg1
))
12512 return fold_build2_loc (loc
, code
, type
,
12513 fold_convert_loc (loc
, type
, arg0
),
12514 fold_convert_loc (loc
, type
,
12515 negate_expr (arg1
)));
12517 /* X % -Y is the same as X % Y. */
12518 if (code
== TRUNC_MOD_EXPR
12519 && !TYPE_UNSIGNED (type
)
12520 && TREE_CODE (arg1
) == NEGATE_EXPR
12521 && !TYPE_OVERFLOW_TRAPS (type
))
12522 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12523 fold_convert_loc (loc
, type
,
12524 TREE_OPERAND (arg1
, 0)));
12526 strict_overflow_p
= false;
12527 if (TREE_CODE (arg1
) == INTEGER_CST
12528 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12529 &strict_overflow_p
)))
12531 if (strict_overflow_p
)
12532 fold_overflow_warning (("assuming signed overflow does not occur "
12533 "when simplifying modulus"),
12534 WARN_STRICT_OVERFLOW_MISC
);
12535 return fold_convert_loc (loc
, type
, tem
);
12538 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12539 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12540 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12541 && (TYPE_UNSIGNED (type
)
12542 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12545 /* Also optimize A % (C << N) where C is a power of 2,
12546 to A & ((C << N) - 1). */
12547 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12548 c
= TREE_OPERAND (arg1
, 0);
12550 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12553 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12554 build_int_cst (TREE_TYPE (arg1
), 1));
12555 if (strict_overflow_p
)
12556 fold_overflow_warning (("assuming signed overflow does not "
12557 "occur when simplifying "
12558 "X % (power of two)"),
12559 WARN_STRICT_OVERFLOW_MISC
);
12560 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12561 fold_convert_loc (loc
, type
, arg0
),
12562 fold_convert_loc (loc
, type
, mask
));
12570 if (integer_all_onesp (arg0
))
12571 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12575 /* Optimize -1 >> x for arithmetic right shifts. */
12576 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12577 && tree_expr_nonnegative_p (arg1
))
12578 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12579 /* ... fall through ... */
12583 if (integer_zerop (arg1
))
12584 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12585 if (integer_zerop (arg0
))
12586 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12588 /* Prefer vector1 << scalar to vector1 << vector2
12589 if vector2 is uniform. */
12590 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12591 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12592 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12594 /* Since negative shift count is not well-defined,
12595 don't try to compute it in the compiler. */
12596 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12599 prec
= element_precision (type
);
12601 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12602 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12603 && tree_to_uhwi (arg1
) < prec
12604 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12605 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12607 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12608 + tree_to_uhwi (arg1
));
12610 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12611 being well defined. */
12614 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12616 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12617 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12618 TREE_OPERAND (arg0
, 0));
12623 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12624 build_int_cst (TREE_TYPE (arg1
), low
));
12627 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12628 into x & ((unsigned)-1 >> c) for unsigned types. */
12629 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12630 || (TYPE_UNSIGNED (type
)
12631 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12632 && tree_fits_uhwi_p (arg1
)
12633 && tree_to_uhwi (arg1
) < prec
12634 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12635 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12637 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12638 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12644 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12646 lshift
= build_minus_one_cst (type
);
12647 lshift
= const_binop (code
, lshift
, arg1
);
12649 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12653 /* Rewrite an LROTATE_EXPR by a constant into an
12654 RROTATE_EXPR by a new constant. */
12655 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12657 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12658 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12659 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12662 /* If we have a rotate of a bit operation with the rotate count and
12663 the second operand of the bit operation both constant,
12664 permute the two operations. */
12665 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12666 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12667 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12668 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12669 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12670 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12671 fold_build2_loc (loc
, code
, type
,
12672 TREE_OPERAND (arg0
, 0), arg1
),
12673 fold_build2_loc (loc
, code
, type
,
12674 TREE_OPERAND (arg0
, 1), arg1
));
12676 /* Two consecutive rotates adding up to the some integer
12677 multiple of the precision of the type can be ignored. */
12678 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12679 && TREE_CODE (arg0
) == RROTATE_EXPR
12680 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12681 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12683 return TREE_OPERAND (arg0
, 0);
12685 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12686 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12687 if the latter can be further optimized. */
12688 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12689 && TREE_CODE (arg0
) == BIT_AND_EXPR
12690 && TREE_CODE (arg1
) == INTEGER_CST
12691 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12693 tree mask
= fold_build2_loc (loc
, code
, type
,
12694 fold_convert_loc (loc
, type
,
12695 TREE_OPERAND (arg0
, 1)),
12697 tree shift
= fold_build2_loc (loc
, code
, type
,
12698 fold_convert_loc (loc
, type
,
12699 TREE_OPERAND (arg0
, 0)),
12701 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12709 if (operand_equal_p (arg0
, arg1
, 0))
12710 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12711 if (INTEGRAL_TYPE_P (type
)
12712 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12713 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12714 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12720 if (operand_equal_p (arg0
, arg1
, 0))
12721 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12722 if (INTEGRAL_TYPE_P (type
)
12723 && TYPE_MAX_VALUE (type
)
12724 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12725 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12726 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12731 case TRUTH_ANDIF_EXPR
:
12732 /* Note that the operands of this must be ints
12733 and their values must be 0 or 1.
12734 ("true" is a fixed value perhaps depending on the language.) */
12735 /* If first arg is constant zero, return it. */
12736 if (integer_zerop (arg0
))
12737 return fold_convert_loc (loc
, type
, arg0
);
12738 case TRUTH_AND_EXPR
:
12739 /* If either arg is constant true, drop it. */
12740 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12741 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12742 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12743 /* Preserve sequence points. */
12744 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12745 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12746 /* If second arg is constant zero, result is zero, but first arg
12747 must be evaluated. */
12748 if (integer_zerop (arg1
))
12749 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12750 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12751 case will be handled here. */
12752 if (integer_zerop (arg0
))
12753 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12755 /* !X && X is always false. */
12756 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12757 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12758 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12759 /* X && !X is always false. */
12760 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12761 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12762 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12764 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12765 means A >= Y && A != MAX, but in this case we know that
12768 if (!TREE_SIDE_EFFECTS (arg0
)
12769 && !TREE_SIDE_EFFECTS (arg1
))
12771 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12772 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12773 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12775 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12776 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12777 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12780 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12786 case TRUTH_ORIF_EXPR
:
12787 /* Note that the operands of this must be ints
12788 and their values must be 0 or true.
12789 ("true" is a fixed value perhaps depending on the language.) */
12790 /* If first arg is constant true, return it. */
12791 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12792 return fold_convert_loc (loc
, type
, arg0
);
12793 case TRUTH_OR_EXPR
:
12794 /* If either arg is constant zero, drop it. */
12795 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12796 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12797 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12798 /* Preserve sequence points. */
12799 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12800 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12801 /* If second arg is constant true, result is true, but we must
12802 evaluate first arg. */
12803 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12804 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12805 /* Likewise for first arg, but note this only occurs here for
12807 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12808 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12810 /* !X || X is always true. */
12811 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12812 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12813 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12814 /* X || !X is always true. */
12815 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12816 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12817 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12819 /* (X && !Y) || (!X && Y) is X ^ Y */
12820 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12821 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12823 tree a0
, a1
, l0
, l1
, n0
, n1
;
12825 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12826 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12828 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12829 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12831 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12832 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12834 if ((operand_equal_p (n0
, a0
, 0)
12835 && operand_equal_p (n1
, a1
, 0))
12836 || (operand_equal_p (n0
, a1
, 0)
12837 && operand_equal_p (n1
, a0
, 0)))
12838 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12841 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12847 case TRUTH_XOR_EXPR
:
12848 /* If the second arg is constant zero, drop it. */
12849 if (integer_zerop (arg1
))
12850 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12851 /* If the second arg is constant true, this is a logical inversion. */
12852 if (integer_onep (arg1
))
12854 tem
= invert_truthvalue_loc (loc
, arg0
);
12855 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12857 /* Identical arguments cancel to zero. */
12858 if (operand_equal_p (arg0
, arg1
, 0))
12859 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12861 /* !X ^ X is always true. */
12862 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12863 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12864 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12866 /* X ^ !X is always true. */
12867 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12868 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12869 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12878 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12879 if (tem
!= NULL_TREE
)
12882 /* bool_var != 0 becomes bool_var. */
12883 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12884 && code
== NE_EXPR
)
12885 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12887 /* bool_var == 1 becomes bool_var. */
12888 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12889 && code
== EQ_EXPR
)
12890 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12892 /* bool_var != 1 becomes !bool_var. */
12893 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12894 && code
== NE_EXPR
)
12895 return fold_convert_loc (loc
, type
,
12896 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12897 TREE_TYPE (arg0
), arg0
));
12899 /* bool_var == 0 becomes !bool_var. */
12900 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12901 && code
== EQ_EXPR
)
12902 return fold_convert_loc (loc
, type
,
12903 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12904 TREE_TYPE (arg0
), arg0
));
12906 /* !exp != 0 becomes !exp */
12907 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12908 && code
== NE_EXPR
)
12909 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12911 /* If this is an equality comparison of the address of two non-weak,
12912 unaliased symbols neither of which are extern (since we do not
12913 have access to attributes for externs), then we know the result. */
12914 if (TREE_CODE (arg0
) == ADDR_EXPR
12915 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12916 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12917 && ! lookup_attribute ("alias",
12918 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12919 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12920 && TREE_CODE (arg1
) == ADDR_EXPR
12921 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12922 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12923 && ! lookup_attribute ("alias",
12924 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12925 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12927 /* We know that we're looking at the address of two
12928 non-weak, unaliased, static _DECL nodes.
12930 It is both wasteful and incorrect to call operand_equal_p
12931 to compare the two ADDR_EXPR nodes. It is wasteful in that
12932 all we need to do is test pointer equality for the arguments
12933 to the two ADDR_EXPR nodes. It is incorrect to use
12934 operand_equal_p as that function is NOT equivalent to a
12935 C equality test. It can in fact return false for two
12936 objects which would test as equal using the C equality
12938 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12939 return constant_boolean_node (equal
12940 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12944 /* Similarly for a NEGATE_EXPR. */
12945 if (TREE_CODE (arg0
) == NEGATE_EXPR
12946 && TREE_CODE (arg1
) == INTEGER_CST
12947 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12949 && TREE_CODE (tem
) == INTEGER_CST
12950 && !TREE_OVERFLOW (tem
))
12951 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12953 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12954 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12955 && TREE_CODE (arg1
) == INTEGER_CST
12956 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12957 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12958 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12959 fold_convert_loc (loc
,
12962 TREE_OPERAND (arg0
, 1)));
12964 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12965 if ((TREE_CODE (arg0
) == PLUS_EXPR
12966 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12967 || TREE_CODE (arg0
) == MINUS_EXPR
)
12968 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12971 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12972 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12974 tree val
= TREE_OPERAND (arg0
, 1);
12975 return omit_two_operands_loc (loc
, type
,
12976 fold_build2_loc (loc
, code
, type
,
12978 build_int_cst (TREE_TYPE (val
),
12980 TREE_OPERAND (arg0
, 0), arg1
);
12983 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12984 if (TREE_CODE (arg0
) == MINUS_EXPR
12985 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12986 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12989 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12991 return omit_two_operands_loc (loc
, type
,
12993 ? boolean_true_node
: boolean_false_node
,
12994 TREE_OPERAND (arg0
, 1), arg1
);
12997 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12998 if (TREE_CODE (arg0
) == ABS_EXPR
12999 && (integer_zerop (arg1
) || real_zerop (arg1
)))
13000 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
13002 /* If this is an EQ or NE comparison with zero and ARG0 is
13003 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13004 two operations, but the latter can be done in one less insn
13005 on machines that have only two-operand insns or on which a
13006 constant cannot be the first operand. */
13007 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13008 && integer_zerop (arg1
))
13010 tree arg00
= TREE_OPERAND (arg0
, 0);
13011 tree arg01
= TREE_OPERAND (arg0
, 1);
13012 if (TREE_CODE (arg00
) == LSHIFT_EXPR
13013 && integer_onep (TREE_OPERAND (arg00
, 0)))
13015 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
13016 arg01
, TREE_OPERAND (arg00
, 1));
13017 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13018 build_int_cst (TREE_TYPE (arg0
), 1));
13019 return fold_build2_loc (loc
, code
, type
,
13020 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13023 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
13024 && integer_onep (TREE_OPERAND (arg01
, 0)))
13026 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
13027 arg00
, TREE_OPERAND (arg01
, 1));
13028 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13029 build_int_cst (TREE_TYPE (arg0
), 1));
13030 return fold_build2_loc (loc
, code
, type
,
13031 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13036 /* If this is an NE or EQ comparison of zero against the result of a
13037 signed MOD operation whose second operand is a power of 2, make
13038 the MOD operation unsigned since it is simpler and equivalent. */
13039 if (integer_zerop (arg1
)
13040 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
13041 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
13042 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
13043 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
13044 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
13045 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13047 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
13048 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
13049 fold_convert_loc (loc
, newtype
,
13050 TREE_OPERAND (arg0
, 0)),
13051 fold_convert_loc (loc
, newtype
,
13052 TREE_OPERAND (arg0
, 1)));
13054 return fold_build2_loc (loc
, code
, type
, newmod
,
13055 fold_convert_loc (loc
, newtype
, arg1
));
13058 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13059 C1 is a valid shift constant, and C2 is a power of two, i.e.
13061 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13062 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
13063 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
13065 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13066 && integer_zerop (arg1
))
13068 tree itype
= TREE_TYPE (arg0
);
13069 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
13070 prec
= TYPE_PRECISION (itype
);
13072 /* Check for a valid shift count. */
13073 if (wi::ltu_p (arg001
, prec
))
13075 tree arg01
= TREE_OPERAND (arg0
, 1);
13076 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13077 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
13078 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13079 can be rewritten as (X & (C2 << C1)) != 0. */
13080 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
13082 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
13083 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
13084 return fold_build2_loc (loc
, code
, type
, tem
,
13085 fold_convert_loc (loc
, itype
, arg1
));
13087 /* Otherwise, for signed (arithmetic) shifts,
13088 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13089 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13090 else if (!TYPE_UNSIGNED (itype
))
13091 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
13092 arg000
, build_int_cst (itype
, 0));
13093 /* Otherwise, of unsigned (logical) shifts,
13094 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13095 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13097 return omit_one_operand_loc (loc
, type
,
13098 code
== EQ_EXPR
? integer_one_node
13099 : integer_zero_node
,
13104 /* If we have (A & C) == C where C is a power of 2, convert this into
13105 (A & C) != 0. Similarly for NE_EXPR. */
13106 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13107 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13108 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13109 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13110 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
13111 integer_zero_node
));
13113 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13114 bit, then fold the expression into A < 0 or A >= 0. */
13115 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
13119 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13120 Similarly for NE_EXPR. */
13121 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13122 && TREE_CODE (arg1
) == INTEGER_CST
13123 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13125 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
13126 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
13127 TREE_OPERAND (arg0
, 1));
13129 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13130 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
13132 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13133 if (integer_nonzerop (dandnotc
))
13134 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13137 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13138 Similarly for NE_EXPR. */
13139 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
13140 && TREE_CODE (arg1
) == INTEGER_CST
13141 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13143 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
13145 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13146 TREE_OPERAND (arg0
, 1),
13147 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
13148 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13149 if (integer_nonzerop (candnotd
))
13150 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13153 /* If this is a comparison of a field, we may be able to simplify it. */
13154 if ((TREE_CODE (arg0
) == COMPONENT_REF
13155 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13156 /* Handle the constant case even without -O
13157 to make sure the warnings are given. */
13158 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13160 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13165 /* Optimize comparisons of strlen vs zero to a compare of the
13166 first character of the string vs zero. To wit,
13167 strlen(ptr) == 0 => *ptr == 0
13168 strlen(ptr) != 0 => *ptr != 0
13169 Other cases should reduce to one of these two (or a constant)
13170 due to the return value of strlen being unsigned. */
13171 if (TREE_CODE (arg0
) == CALL_EXPR
13172 && integer_zerop (arg1
))
13174 tree fndecl
= get_callee_fndecl (arg0
);
13177 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13178 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13179 && call_expr_nargs (arg0
) == 1
13180 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13182 tree iref
= build_fold_indirect_ref_loc (loc
,
13183 CALL_EXPR_ARG (arg0
, 0));
13184 return fold_build2_loc (loc
, code
, type
, iref
,
13185 build_int_cst (TREE_TYPE (iref
), 0));
13189 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13190 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13191 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13192 && integer_zerop (arg1
)
13193 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13195 tree arg00
= TREE_OPERAND (arg0
, 0);
13196 tree arg01
= TREE_OPERAND (arg0
, 1);
13197 tree itype
= TREE_TYPE (arg00
);
13198 if (wi::eq_p (arg01
, TYPE_PRECISION (itype
) - 1))
13200 if (TYPE_UNSIGNED (itype
))
13202 itype
= signed_type_for (itype
);
13203 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13205 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13206 type
, arg00
, build_zero_cst (itype
));
13210 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13211 if (integer_zerop (arg1
)
13212 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13213 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13214 TREE_OPERAND (arg0
, 1));
13216 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13217 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13218 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13219 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13220 build_zero_cst (TREE_TYPE (arg0
)));
13221 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13222 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13223 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13224 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13225 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13226 build_zero_cst (TREE_TYPE (arg0
)));
13228 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13229 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13230 && TREE_CODE (arg1
) == INTEGER_CST
13231 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13232 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13233 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13234 TREE_OPERAND (arg0
, 1), arg1
));
13236 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13237 (X & C) == 0 when C is a single bit. */
13238 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13239 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13240 && integer_zerop (arg1
)
13241 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13243 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13244 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13245 TREE_OPERAND (arg0
, 1));
13246 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13248 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13252 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13253 constant C is a power of two, i.e. a single bit. */
13254 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13255 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13256 && integer_zerop (arg1
)
13257 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13258 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13259 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13261 tree arg00
= TREE_OPERAND (arg0
, 0);
13262 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13263 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13266 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13267 when is C is a power of two, i.e. a single bit. */
13268 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13269 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13270 && integer_zerop (arg1
)
13271 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13272 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13273 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13275 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13276 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13277 arg000
, TREE_OPERAND (arg0
, 1));
13278 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13279 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13282 if (integer_zerop (arg1
)
13283 && tree_expr_nonzero_p (arg0
))
13285 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13286 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13289 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13290 if (TREE_CODE (arg0
) == NEGATE_EXPR
13291 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13292 return fold_build2_loc (loc
, code
, type
,
13293 TREE_OPERAND (arg0
, 0),
13294 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13295 TREE_OPERAND (arg1
, 0)));
13297 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13298 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13299 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13301 tree arg00
= TREE_OPERAND (arg0
, 0);
13302 tree arg01
= TREE_OPERAND (arg0
, 1);
13303 tree arg10
= TREE_OPERAND (arg1
, 0);
13304 tree arg11
= TREE_OPERAND (arg1
, 1);
13305 tree itype
= TREE_TYPE (arg0
);
13307 if (operand_equal_p (arg01
, arg11
, 0))
13308 return fold_build2_loc (loc
, code
, type
,
13309 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13310 fold_build2_loc (loc
,
13311 BIT_XOR_EXPR
, itype
,
13314 build_zero_cst (itype
));
13316 if (operand_equal_p (arg01
, arg10
, 0))
13317 return fold_build2_loc (loc
, code
, type
,
13318 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13319 fold_build2_loc (loc
,
13320 BIT_XOR_EXPR
, itype
,
13323 build_zero_cst (itype
));
13325 if (operand_equal_p (arg00
, arg11
, 0))
13326 return fold_build2_loc (loc
, code
, type
,
13327 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13328 fold_build2_loc (loc
,
13329 BIT_XOR_EXPR
, itype
,
13332 build_zero_cst (itype
));
13334 if (operand_equal_p (arg00
, arg10
, 0))
13335 return fold_build2_loc (loc
, code
, type
,
13336 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13337 fold_build2_loc (loc
,
13338 BIT_XOR_EXPR
, itype
,
13341 build_zero_cst (itype
));
13344 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13345 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13347 tree arg00
= TREE_OPERAND (arg0
, 0);
13348 tree arg01
= TREE_OPERAND (arg0
, 1);
13349 tree arg10
= TREE_OPERAND (arg1
, 0);
13350 tree arg11
= TREE_OPERAND (arg1
, 1);
13351 tree itype
= TREE_TYPE (arg0
);
13353 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13354 operand_equal_p guarantees no side-effects so we don't need
13355 to use omit_one_operand on Z. */
13356 if (operand_equal_p (arg01
, arg11
, 0))
13357 return fold_build2_loc (loc
, code
, type
, arg00
,
13358 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13360 if (operand_equal_p (arg01
, arg10
, 0))
13361 return fold_build2_loc (loc
, code
, type
, arg00
,
13362 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13364 if (operand_equal_p (arg00
, arg11
, 0))
13365 return fold_build2_loc (loc
, code
, type
, arg01
,
13366 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13368 if (operand_equal_p (arg00
, arg10
, 0))
13369 return fold_build2_loc (loc
, code
, type
, arg01
,
13370 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13373 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13374 if (TREE_CODE (arg01
) == INTEGER_CST
13375 && TREE_CODE (arg11
) == INTEGER_CST
)
13377 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13378 fold_convert_loc (loc
, itype
, arg11
));
13379 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13380 return fold_build2_loc (loc
, code
, type
, tem
,
13381 fold_convert_loc (loc
, itype
, arg10
));
13385 /* Attempt to simplify equality/inequality comparisons of complex
13386 values. Only lower the comparison if the result is known or
13387 can be simplified to a single scalar comparison. */
13388 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13389 || TREE_CODE (arg0
) == COMPLEX_CST
)
13390 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13391 || TREE_CODE (arg1
) == COMPLEX_CST
))
13393 tree real0
, imag0
, real1
, imag1
;
13396 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13398 real0
= TREE_OPERAND (arg0
, 0);
13399 imag0
= TREE_OPERAND (arg0
, 1);
13403 real0
= TREE_REALPART (arg0
);
13404 imag0
= TREE_IMAGPART (arg0
);
13407 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13409 real1
= TREE_OPERAND (arg1
, 0);
13410 imag1
= TREE_OPERAND (arg1
, 1);
13414 real1
= TREE_REALPART (arg1
);
13415 imag1
= TREE_IMAGPART (arg1
);
13418 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13419 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13421 if (integer_zerop (rcond
))
13423 if (code
== EQ_EXPR
)
13424 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13426 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13430 if (code
== NE_EXPR
)
13431 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13433 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13437 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13438 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13440 if (integer_zerop (icond
))
13442 if (code
== EQ_EXPR
)
13443 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13445 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13449 if (code
== NE_EXPR
)
13450 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13452 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13463 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13464 if (tem
!= NULL_TREE
)
13467 /* Transform comparisons of the form X +- C CMP X. */
13468 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13469 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13470 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13471 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13472 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13473 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13475 tree arg01
= TREE_OPERAND (arg0
, 1);
13476 enum tree_code code0
= TREE_CODE (arg0
);
13479 if (TREE_CODE (arg01
) == REAL_CST
)
13480 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13482 is_positive
= tree_int_cst_sgn (arg01
);
13484 /* (X - c) > X becomes false. */
13485 if (code
== GT_EXPR
13486 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13487 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13489 if (TREE_CODE (arg01
) == INTEGER_CST
13490 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13491 fold_overflow_warning (("assuming signed overflow does not "
13492 "occur when assuming that (X - c) > X "
13493 "is always false"),
13494 WARN_STRICT_OVERFLOW_ALL
);
13495 return constant_boolean_node (0, type
);
13498 /* Likewise (X + c) < X becomes false. */
13499 if (code
== LT_EXPR
13500 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13501 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13503 if (TREE_CODE (arg01
) == INTEGER_CST
13504 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13505 fold_overflow_warning (("assuming signed overflow does not "
13506 "occur when assuming that "
13507 "(X + c) < X is always false"),
13508 WARN_STRICT_OVERFLOW_ALL
);
13509 return constant_boolean_node (0, type
);
13512 /* Convert (X - c) <= X to true. */
13513 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13515 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13516 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13518 if (TREE_CODE (arg01
) == INTEGER_CST
13519 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13520 fold_overflow_warning (("assuming signed overflow does not "
13521 "occur when assuming that "
13522 "(X - c) <= X is always true"),
13523 WARN_STRICT_OVERFLOW_ALL
);
13524 return constant_boolean_node (1, type
);
13527 /* Convert (X + c) >= X to true. */
13528 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13530 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13531 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13533 if (TREE_CODE (arg01
) == INTEGER_CST
13534 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13535 fold_overflow_warning (("assuming signed overflow does not "
13536 "occur when assuming that "
13537 "(X + c) >= X is always true"),
13538 WARN_STRICT_OVERFLOW_ALL
);
13539 return constant_boolean_node (1, type
);
13542 if (TREE_CODE (arg01
) == INTEGER_CST
)
13544 /* Convert X + c > X and X - c < X to true for integers. */
13545 if (code
== GT_EXPR
13546 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13547 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13549 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13550 fold_overflow_warning (("assuming signed overflow does "
13551 "not occur when assuming that "
13552 "(X + c) > X is always true"),
13553 WARN_STRICT_OVERFLOW_ALL
);
13554 return constant_boolean_node (1, type
);
13557 if (code
== LT_EXPR
13558 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13559 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13561 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13562 fold_overflow_warning (("assuming signed overflow does "
13563 "not occur when assuming that "
13564 "(X - c) < X is always true"),
13565 WARN_STRICT_OVERFLOW_ALL
);
13566 return constant_boolean_node (1, type
);
13569 /* Convert X + c <= X and X - c >= X to false for integers. */
13570 if (code
== LE_EXPR
13571 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13572 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13574 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13575 fold_overflow_warning (("assuming signed overflow does "
13576 "not occur when assuming that "
13577 "(X + c) <= X is always false"),
13578 WARN_STRICT_OVERFLOW_ALL
);
13579 return constant_boolean_node (0, type
);
13582 if (code
== GE_EXPR
13583 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13584 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13586 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13587 fold_overflow_warning (("assuming signed overflow does "
13588 "not occur when assuming that "
13589 "(X - c) >= X is always false"),
13590 WARN_STRICT_OVERFLOW_ALL
);
13591 return constant_boolean_node (0, type
);
13596 /* Comparisons with the highest or lowest possible integer of
13597 the specified precision will have known values. */
13599 tree arg1_type
= TREE_TYPE (arg1
);
13600 unsigned int prec
= TYPE_PRECISION (arg1_type
);
13602 if (TREE_CODE (arg1
) == INTEGER_CST
13603 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13605 wide_int max
= wi::max_value (arg1_type
);
13606 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
13607 wide_int min
= wi::min_value (arg1_type
);
13609 if (wi::eq_p (arg1
, max
))
13613 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13616 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13619 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13622 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13624 /* The GE_EXPR and LT_EXPR cases above are not normally
13625 reached because of previous transformations. */
13630 else if (wi::eq_p (arg1
, max
- 1))
13634 arg1
= const_binop (PLUS_EXPR
, arg1
,
13635 build_int_cst (TREE_TYPE (arg1
), 1));
13636 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13637 fold_convert_loc (loc
,
13638 TREE_TYPE (arg1
), arg0
),
13641 arg1
= const_binop (PLUS_EXPR
, arg1
,
13642 build_int_cst (TREE_TYPE (arg1
), 1));
13643 return fold_build2_loc (loc
, NE_EXPR
, type
,
13644 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13650 else if (wi::eq_p (arg1
, min
))
13654 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13657 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13660 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13663 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13668 else if (wi::eq_p (arg1
, min
+ 1))
13672 arg1
= const_binop (MINUS_EXPR
, arg1
,
13673 build_int_cst (TREE_TYPE (arg1
), 1));
13674 return fold_build2_loc (loc
, NE_EXPR
, type
,
13675 fold_convert_loc (loc
,
13676 TREE_TYPE (arg1
), arg0
),
13679 arg1
= const_binop (MINUS_EXPR
, arg1
,
13680 build_int_cst (TREE_TYPE (arg1
), 1));
13681 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13682 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13689 else if (wi::eq_p (arg1
, signed_max
)
13690 && TYPE_UNSIGNED (arg1_type
)
13691 /* We will flip the signedness of the comparison operator
13692 associated with the mode of arg1, so the sign bit is
13693 specified by this mode. Check that arg1 is the signed
13694 max associated with this sign bit. */
13695 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13696 /* signed_type does not work on pointer types. */
13697 && INTEGRAL_TYPE_P (arg1_type
))
13699 /* The following case also applies to X < signed_max+1
13700 and X >= signed_max+1 because previous transformations. */
13701 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13703 tree st
= signed_type_for (arg1_type
);
13704 return fold_build2_loc (loc
,
13705 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13706 type
, fold_convert_loc (loc
, st
, arg0
),
13707 build_int_cst (st
, 0));
13713 /* If we are comparing an ABS_EXPR with a constant, we can
13714 convert all the cases into explicit comparisons, but they may
13715 well not be faster than doing the ABS and one comparison.
13716 But ABS (X) <= C is a range comparison, which becomes a subtraction
13717 and a comparison, and is probably faster. */
13718 if (code
== LE_EXPR
13719 && TREE_CODE (arg1
) == INTEGER_CST
13720 && TREE_CODE (arg0
) == ABS_EXPR
13721 && ! TREE_SIDE_EFFECTS (arg0
)
13722 && (0 != (tem
= negate_expr (arg1
)))
13723 && TREE_CODE (tem
) == INTEGER_CST
13724 && !TREE_OVERFLOW (tem
))
13725 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13726 build2 (GE_EXPR
, type
,
13727 TREE_OPERAND (arg0
, 0), tem
),
13728 build2 (LE_EXPR
, type
,
13729 TREE_OPERAND (arg0
, 0), arg1
));
13731 /* Convert ABS_EXPR<x> >= 0 to true. */
13732 strict_overflow_p
= false;
13733 if (code
== GE_EXPR
13734 && (integer_zerop (arg1
)
13735 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13736 && real_zerop (arg1
)))
13737 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13739 if (strict_overflow_p
)
13740 fold_overflow_warning (("assuming signed overflow does not occur "
13741 "when simplifying comparison of "
13742 "absolute value and zero"),
13743 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13744 return omit_one_operand_loc (loc
, type
,
13745 constant_boolean_node (true, type
),
13749 /* Convert ABS_EXPR<x> < 0 to false. */
13750 strict_overflow_p
= false;
13751 if (code
== LT_EXPR
13752 && (integer_zerop (arg1
) || real_zerop (arg1
))
13753 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13755 if (strict_overflow_p
)
13756 fold_overflow_warning (("assuming signed overflow does not occur "
13757 "when simplifying comparison of "
13758 "absolute value and zero"),
13759 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13760 return omit_one_operand_loc (loc
, type
,
13761 constant_boolean_node (false, type
),
13765 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13766 and similarly for >= into !=. */
13767 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13768 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13769 && TREE_CODE (arg1
) == LSHIFT_EXPR
13770 && integer_onep (TREE_OPERAND (arg1
, 0)))
13771 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13772 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13773 TREE_OPERAND (arg1
, 1)),
13774 build_zero_cst (TREE_TYPE (arg0
)));
13776 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13777 otherwise Y might be >= # of bits in X's type and thus e.g.
13778 (unsigned char) (1 << Y) for Y 15 might be 0.
13779 If the cast is widening, then 1 << Y should have unsigned type,
13780 otherwise if Y is number of bits in the signed shift type minus 1,
13781 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13782 31 might be 0xffffffff80000000. */
13783 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13784 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13785 && CONVERT_EXPR_P (arg1
)
13786 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13787 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13788 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13789 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13790 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13791 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13792 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13794 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13795 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13796 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13797 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13798 build_zero_cst (TREE_TYPE (arg0
)));
13803 case UNORDERED_EXPR
:
13811 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13813 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13814 if (t1
!= NULL_TREE
)
13818 /* If the first operand is NaN, the result is constant. */
13819 if (TREE_CODE (arg0
) == REAL_CST
13820 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13821 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13823 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13824 ? integer_zero_node
13825 : integer_one_node
;
13826 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13829 /* If the second operand is NaN, the result is constant. */
13830 if (TREE_CODE (arg1
) == REAL_CST
13831 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13832 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13834 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13835 ? integer_zero_node
13836 : integer_one_node
;
13837 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13840 /* Simplify unordered comparison of something with itself. */
13841 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13842 && operand_equal_p (arg0
, arg1
, 0))
13843 return constant_boolean_node (1, type
);
13845 if (code
== LTGT_EXPR
13846 && !flag_trapping_math
13847 && operand_equal_p (arg0
, arg1
, 0))
13848 return constant_boolean_node (0, type
);
13850 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13852 tree targ0
= strip_float_extensions (arg0
);
13853 tree targ1
= strip_float_extensions (arg1
);
13854 tree newtype
= TREE_TYPE (targ0
);
13856 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13857 newtype
= TREE_TYPE (targ1
);
13859 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13860 return fold_build2_loc (loc
, code
, type
,
13861 fold_convert_loc (loc
, newtype
, targ0
),
13862 fold_convert_loc (loc
, newtype
, targ1
));
13867 case COMPOUND_EXPR
:
13868 /* When pedantic, a compound expression can be neither an lvalue
13869 nor an integer constant expression. */
13870 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13872 /* Don't let (0, 0) be null pointer constant. */
13873 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13874 : fold_convert_loc (loc
, type
, arg1
);
13875 return pedantic_non_lvalue_loc (loc
, tem
);
13878 if ((TREE_CODE (arg0
) == REAL_CST
13879 && TREE_CODE (arg1
) == REAL_CST
)
13880 || (TREE_CODE (arg0
) == INTEGER_CST
13881 && TREE_CODE (arg1
) == INTEGER_CST
))
13882 return build_complex (type
, arg0
, arg1
);
13883 if (TREE_CODE (arg0
) == REALPART_EXPR
13884 && TREE_CODE (arg1
) == IMAGPART_EXPR
13885 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13886 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13887 TREE_OPERAND (arg1
, 0), 0))
13888 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13889 TREE_OPERAND (arg1
, 0));
13893 /* An ASSERT_EXPR should never be passed to fold_binary. */
13894 gcc_unreachable ();
13896 case VEC_PACK_TRUNC_EXPR
:
13897 case VEC_PACK_FIX_TRUNC_EXPR
:
13899 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13902 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13903 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13904 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13907 elts
= XALLOCAVEC (tree
, nelts
);
13908 if (!vec_cst_ctor_to_array (arg0
, elts
)
13909 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13912 for (i
= 0; i
< nelts
; i
++)
13914 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13915 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13916 TREE_TYPE (type
), elts
[i
]);
13917 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13921 return build_vector (type
, elts
);
13924 case VEC_WIDEN_MULT_LO_EXPR
:
13925 case VEC_WIDEN_MULT_HI_EXPR
:
13926 case VEC_WIDEN_MULT_EVEN_EXPR
:
13927 case VEC_WIDEN_MULT_ODD_EXPR
:
13929 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13930 unsigned int out
, ofs
, scale
;
13933 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13934 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13935 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13938 elts
= XALLOCAVEC (tree
, nelts
* 4);
13939 if (!vec_cst_ctor_to_array (arg0
, elts
)
13940 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13943 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13944 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13945 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13946 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13947 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13948 scale
= 1, ofs
= 0;
13949 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13950 scale
= 1, ofs
= 1;
13952 for (out
= 0; out
< nelts
; out
++)
13954 unsigned int in1
= (out
<< scale
) + ofs
;
13955 unsigned int in2
= in1
+ nelts
* 2;
13958 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13959 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13961 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13963 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13964 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13968 return build_vector (type
, elts
);
13973 } /* switch (code) */
13976 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13977 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13981 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13983 switch (TREE_CODE (*tp
))
13989 *walk_subtrees
= 0;
13991 /* ... fall through ... */
13998 /* Return whether the sub-tree ST contains a label which is accessible from
13999 outside the sub-tree. */
14002 contains_label_p (tree st
)
14005 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
14008 /* Fold a ternary expression of code CODE and type TYPE with operands
14009 OP0, OP1, and OP2. Return the folded expression if folding is
14010 successful. Otherwise, return NULL_TREE. */
14013 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
14014 tree op0
, tree op1
, tree op2
)
14017 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
14018 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14020 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
14021 && TREE_CODE_LENGTH (code
) == 3);
14023 /* Strip any conversions that don't change the mode. This is safe
14024 for every expression, except for a comparison expression because
14025 its signedness is derived from its operands. So, in the latter
14026 case, only strip conversions that don't change the signedness.
14028 Note that this is done as an internal manipulation within the
14029 constant folder, in order to find the simplest representation of
14030 the arguments so that their form can be studied. In any cases,
14031 the appropriate type conversions should be put back in the tree
14032 that will get out of the constant folder. */
14053 case COMPONENT_REF
:
14054 if (TREE_CODE (arg0
) == CONSTRUCTOR
14055 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
14057 unsigned HOST_WIDE_INT idx
;
14059 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
14066 case VEC_COND_EXPR
:
14067 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14068 so all simple results must be passed through pedantic_non_lvalue. */
14069 if (TREE_CODE (arg0
) == INTEGER_CST
)
14071 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
14072 tem
= integer_zerop (arg0
) ? op2
: op1
;
14073 /* Only optimize constant conditions when the selected branch
14074 has the same type as the COND_EXPR. This avoids optimizing
14075 away "c ? x : throw", where the throw has a void type.
14076 Avoid throwing away that operand which contains label. */
14077 if ((!TREE_SIDE_EFFECTS (unused_op
)
14078 || !contains_label_p (unused_op
))
14079 && (! VOID_TYPE_P (TREE_TYPE (tem
))
14080 || VOID_TYPE_P (type
)))
14081 return pedantic_non_lvalue_loc (loc
, tem
);
14084 else if (TREE_CODE (arg0
) == VECTOR_CST
)
14086 if (integer_all_onesp (arg0
))
14087 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
14088 if (integer_zerop (arg0
))
14089 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
14091 if ((TREE_CODE (arg1
) == VECTOR_CST
14092 || TREE_CODE (arg1
) == CONSTRUCTOR
)
14093 && (TREE_CODE (arg2
) == VECTOR_CST
14094 || TREE_CODE (arg2
) == CONSTRUCTOR
))
14096 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14097 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14098 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
14099 for (i
= 0; i
< nelts
; i
++)
14101 tree val
= VECTOR_CST_ELT (arg0
, i
);
14102 if (integer_all_onesp (val
))
14104 else if (integer_zerop (val
))
14105 sel
[i
] = nelts
+ i
;
14106 else /* Currently unreachable. */
14109 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14110 if (t
!= NULL_TREE
)
14115 if (operand_equal_p (arg1
, op2
, 0))
14116 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14118 /* If we have A op B ? A : C, we may be able to convert this to a
14119 simpler expression, depending on the operation and the values
14120 of B and C. Signed zeros prevent all of these transformations,
14121 for reasons given above each one.
14123 Also try swapping the arguments and inverting the conditional. */
14124 if (COMPARISON_CLASS_P (arg0
)
14125 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14126 arg1
, TREE_OPERAND (arg0
, 1))
14127 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14129 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14134 if (COMPARISON_CLASS_P (arg0
)
14135 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14137 TREE_OPERAND (arg0
, 1))
14138 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14140 location_t loc0
= expr_location_or (arg0
, loc
);
14141 tem
= fold_invert_truthvalue (loc0
, arg0
);
14142 if (tem
&& COMPARISON_CLASS_P (tem
))
14144 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14150 /* If the second operand is simpler than the third, swap them
14151 since that produces better jump optimization results. */
14152 if (truth_value_p (TREE_CODE (arg0
))
14153 && tree_swap_operands_p (op1
, op2
, false))
14155 location_t loc0
= expr_location_or (arg0
, loc
);
14156 /* See if this can be inverted. If it can't, possibly because
14157 it was a floating-point inequality comparison, don't do
14159 tem
= fold_invert_truthvalue (loc0
, arg0
);
14161 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14164 /* Convert A ? 1 : 0 to simply A. */
14165 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
14166 : (integer_onep (op1
)
14167 && !VECTOR_TYPE_P (type
)))
14168 && integer_zerop (op2
)
14169 /* If we try to convert OP0 to our type, the
14170 call to fold will try to move the conversion inside
14171 a COND, which will recurse. In that case, the COND_EXPR
14172 is probably the best choice, so leave it alone. */
14173 && type
== TREE_TYPE (arg0
))
14174 return pedantic_non_lvalue_loc (loc
, arg0
);
14176 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14177 over COND_EXPR in cases such as floating point comparisons. */
14178 if (integer_zerop (op1
)
14179 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
14180 : (integer_onep (op2
)
14181 && !VECTOR_TYPE_P (type
)))
14182 && truth_value_p (TREE_CODE (arg0
)))
14183 return pedantic_non_lvalue_loc (loc
,
14184 fold_convert_loc (loc
, type
,
14185 invert_truthvalue_loc (loc
,
14188 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14189 if (TREE_CODE (arg0
) == LT_EXPR
14190 && integer_zerop (TREE_OPERAND (arg0
, 1))
14191 && integer_zerop (op2
)
14192 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14194 /* sign_bit_p looks through both zero and sign extensions,
14195 but for this optimization only sign extensions are
14197 tree tem2
= TREE_OPERAND (arg0
, 0);
14198 while (tem
!= tem2
)
14200 if (TREE_CODE (tem2
) != NOP_EXPR
14201 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14206 tem2
= TREE_OPERAND (tem2
, 0);
14208 /* sign_bit_p only checks ARG1 bits within A's precision.
14209 If <sign bit of A> has wider type than A, bits outside
14210 of A's precision in <sign bit of A> need to be checked.
14211 If they are all 0, this optimization needs to be done
14212 in unsigned A's type, if they are all 1 in signed A's type,
14213 otherwise this can't be done. */
14215 && TYPE_PRECISION (TREE_TYPE (tem
))
14216 < TYPE_PRECISION (TREE_TYPE (arg1
))
14217 && TYPE_PRECISION (TREE_TYPE (tem
))
14218 < TYPE_PRECISION (type
))
14220 int inner_width
, outer_width
;
14223 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14224 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14225 if (outer_width
> TYPE_PRECISION (type
))
14226 outer_width
= TYPE_PRECISION (type
);
14228 wide_int mask
= wi::shifted_mask
14229 (inner_width
, outer_width
- inner_width
, false,
14230 TYPE_PRECISION (TREE_TYPE (arg1
)));
14232 wide_int common
= mask
& arg1
;
14233 if (common
== mask
)
14235 tem_type
= signed_type_for (TREE_TYPE (tem
));
14236 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14238 else if (common
== 0)
14240 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14241 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14249 fold_convert_loc (loc
, type
,
14250 fold_build2_loc (loc
, BIT_AND_EXPR
,
14251 TREE_TYPE (tem
), tem
,
14252 fold_convert_loc (loc
,
14257 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14258 already handled above. */
14259 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14260 && integer_onep (TREE_OPERAND (arg0
, 1))
14261 && integer_zerop (op2
)
14262 && integer_pow2p (arg1
))
14264 tree tem
= TREE_OPERAND (arg0
, 0);
14266 if (TREE_CODE (tem
) == RSHIFT_EXPR
14267 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
14268 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14269 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
14270 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14271 TREE_OPERAND (tem
, 0), arg1
);
14274 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14275 is probably obsolete because the first operand should be a
14276 truth value (that's why we have the two cases above), but let's
14277 leave it in until we can confirm this for all front-ends. */
14278 if (integer_zerop (op2
)
14279 && TREE_CODE (arg0
) == NE_EXPR
14280 && integer_zerop (TREE_OPERAND (arg0
, 1))
14281 && integer_pow2p (arg1
)
14282 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14283 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14284 arg1
, OEP_ONLY_CONST
))
14285 return pedantic_non_lvalue_loc (loc
,
14286 fold_convert_loc (loc
, type
,
14287 TREE_OPERAND (arg0
, 0)));
14289 /* Disable the transformations below for vectors, since
14290 fold_binary_op_with_conditional_arg may undo them immediately,
14291 yielding an infinite loop. */
14292 if (code
== VEC_COND_EXPR
)
14295 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14296 if (integer_zerop (op2
)
14297 && truth_value_p (TREE_CODE (arg0
))
14298 && truth_value_p (TREE_CODE (arg1
))
14299 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14300 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14301 : TRUTH_ANDIF_EXPR
,
14302 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14304 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14305 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14306 && truth_value_p (TREE_CODE (arg0
))
14307 && truth_value_p (TREE_CODE (arg1
))
14308 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14310 location_t loc0
= expr_location_or (arg0
, loc
);
14311 /* Only perform transformation if ARG0 is easily inverted. */
14312 tem
= fold_invert_truthvalue (loc0
, arg0
);
14314 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14317 type
, fold_convert_loc (loc
, type
, tem
),
14321 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14322 if (integer_zerop (arg1
)
14323 && truth_value_p (TREE_CODE (arg0
))
14324 && truth_value_p (TREE_CODE (op2
))
14325 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14327 location_t loc0
= expr_location_or (arg0
, loc
);
14328 /* Only perform transformation if ARG0 is easily inverted. */
14329 tem
= fold_invert_truthvalue (loc0
, arg0
);
14331 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14332 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14333 type
, fold_convert_loc (loc
, type
, tem
),
14337 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14338 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14339 && truth_value_p (TREE_CODE (arg0
))
14340 && truth_value_p (TREE_CODE (op2
))
14341 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14342 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14343 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14344 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14349 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14350 of fold_ternary on them. */
14351 gcc_unreachable ();
14353 case BIT_FIELD_REF
:
14354 if ((TREE_CODE (arg0
) == VECTOR_CST
14355 || (TREE_CODE (arg0
) == CONSTRUCTOR
14356 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14357 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14358 || (TREE_CODE (type
) == VECTOR_TYPE
14359 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14361 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14362 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14363 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14364 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14367 && (idx
% width
) == 0
14368 && (n
% width
) == 0
14369 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14374 if (TREE_CODE (arg0
) == VECTOR_CST
)
14377 return VECTOR_CST_ELT (arg0
, idx
);
14379 tree
*vals
= XALLOCAVEC (tree
, n
);
14380 for (unsigned i
= 0; i
< n
; ++i
)
14381 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14382 return build_vector (type
, vals
);
14385 /* Constructor elements can be subvectors. */
14386 unsigned HOST_WIDE_INT k
= 1;
14387 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14389 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14390 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14391 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14394 /* We keep an exact subset of the constructor elements. */
14395 if ((idx
% k
) == 0 && (n
% k
) == 0)
14397 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14398 return build_constructor (type
, NULL
);
14403 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14404 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14405 return build_zero_cst (type
);
14408 vec
<constructor_elt
, va_gc
> *vals
;
14409 vec_alloc (vals
, n
);
14410 for (unsigned i
= 0;
14411 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14413 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14415 (arg0
, idx
+ i
)->value
);
14416 return build_constructor (type
, vals
);
14418 /* The bitfield references a single constructor element. */
14419 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14421 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14422 return build_zero_cst (type
);
14424 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14426 return fold_build3_loc (loc
, code
, type
,
14427 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14428 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14433 /* A bit-field-ref that referenced the full argument can be stripped. */
14434 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14435 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14436 && integer_zerop (op2
))
14437 return fold_convert_loc (loc
, type
, arg0
);
14439 /* On constants we can use native encode/interpret to constant
14440 fold (nearly) all BIT_FIELD_REFs. */
14441 if (CONSTANT_CLASS_P (arg0
)
14442 && can_native_interpret_type_p (type
)
14443 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14444 /* This limitation should not be necessary, we just need to
14445 round this up to mode size. */
14446 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14447 /* Need bit-shifting of the buffer to relax the following. */
14448 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14450 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14451 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14452 unsigned HOST_WIDE_INT clen
;
14453 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14454 /* ??? We cannot tell native_encode_expr to start at
14455 some random byte only. So limit us to a reasonable amount
14459 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14460 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14462 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14464 tree v
= native_interpret_expr (type
,
14465 b
+ bitpos
/ BITS_PER_UNIT
,
14466 bitsize
/ BITS_PER_UNIT
);
14476 /* For integers we can decompose the FMA if possible. */
14477 if (TREE_CODE (arg0
) == INTEGER_CST
14478 && TREE_CODE (arg1
) == INTEGER_CST
)
14479 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14480 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14481 if (integer_zerop (arg2
))
14482 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14484 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14486 case VEC_PERM_EXPR
:
14487 if (TREE_CODE (arg2
) == VECTOR_CST
)
14489 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14490 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14491 bool need_mask_canon
= false;
14492 bool all_in_vec0
= true;
14493 bool all_in_vec1
= true;
14494 bool maybe_identity
= true;
14495 bool single_arg
= (op0
== op1
);
14496 bool changed
= false;
14498 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14499 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14500 for (i
= 0; i
< nelts
; i
++)
14502 tree val
= VECTOR_CST_ELT (arg2
, i
);
14503 if (TREE_CODE (val
) != INTEGER_CST
)
14506 /* Make sure that the perm value is in an acceptable
14509 if (wi::gtu_p (t
, mask
))
14511 need_mask_canon
= true;
14512 sel
[i
] = t
.to_uhwi () & mask
;
14515 sel
[i
] = t
.to_uhwi ();
14517 if (sel
[i
] < nelts
)
14518 all_in_vec1
= false;
14520 all_in_vec0
= false;
14522 if ((sel
[i
] & (nelts
-1)) != i
)
14523 maybe_identity
= false;
14526 if (maybe_identity
)
14536 else if (all_in_vec1
)
14539 for (i
= 0; i
< nelts
; i
++)
14541 need_mask_canon
= true;
14544 if ((TREE_CODE (op0
) == VECTOR_CST
14545 || TREE_CODE (op0
) == CONSTRUCTOR
)
14546 && (TREE_CODE (op1
) == VECTOR_CST
14547 || TREE_CODE (op1
) == CONSTRUCTOR
))
14549 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
14550 if (t
!= NULL_TREE
)
14554 if (op0
== op1
&& !single_arg
)
14557 if (need_mask_canon
&& arg2
== op2
)
14559 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14560 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14561 for (i
= 0; i
< nelts
; i
++)
14562 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14563 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14568 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14574 } /* switch (code) */
14577 /* Perform constant folding and related simplification of EXPR.
14578 The related simplifications include x*1 => x, x*0 => 0, etc.,
14579 and application of the associative law.
14580 NOP_EXPR conversions may be removed freely (as long as we
14581 are careful not to change the type of the overall expression).
14582 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14583 but we can constant-fold them if they have constant operands. */
14585 #ifdef ENABLE_FOLD_CHECKING
14586 # define fold(x) fold_1 (x)
14587 static tree
fold_1 (tree
);
14593 const tree t
= expr
;
14594 enum tree_code code
= TREE_CODE (t
);
14595 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14597 location_t loc
= EXPR_LOCATION (expr
);
14599 /* Return right away if a constant. */
14600 if (kind
== tcc_constant
)
14603 /* CALL_EXPR-like objects with variable numbers of operands are
14604 treated specially. */
14605 if (kind
== tcc_vl_exp
)
14607 if (code
== CALL_EXPR
)
14609 tem
= fold_call_expr (loc
, expr
, false);
14610 return tem
? tem
: expr
;
14615 if (IS_EXPR_CODE_CLASS (kind
))
14617 tree type
= TREE_TYPE (t
);
14618 tree op0
, op1
, op2
;
14620 switch (TREE_CODE_LENGTH (code
))
14623 op0
= TREE_OPERAND (t
, 0);
14624 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14625 return tem
? tem
: expr
;
14627 op0
= TREE_OPERAND (t
, 0);
14628 op1
= TREE_OPERAND (t
, 1);
14629 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14630 return tem
? tem
: expr
;
14632 op0
= TREE_OPERAND (t
, 0);
14633 op1
= TREE_OPERAND (t
, 1);
14634 op2
= TREE_OPERAND (t
, 2);
14635 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14636 return tem
? tem
: expr
;
14646 tree op0
= TREE_OPERAND (t
, 0);
14647 tree op1
= TREE_OPERAND (t
, 1);
14649 if (TREE_CODE (op1
) == INTEGER_CST
14650 && TREE_CODE (op0
) == CONSTRUCTOR
14651 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14653 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14654 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14655 unsigned HOST_WIDE_INT begin
= 0;
14657 /* Find a matching index by means of a binary search. */
14658 while (begin
!= end
)
14660 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14661 tree index
= (*elts
)[middle
].index
;
14663 if (TREE_CODE (index
) == INTEGER_CST
14664 && tree_int_cst_lt (index
, op1
))
14665 begin
= middle
+ 1;
14666 else if (TREE_CODE (index
) == INTEGER_CST
14667 && tree_int_cst_lt (op1
, index
))
14669 else if (TREE_CODE (index
) == RANGE_EXPR
14670 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14671 begin
= middle
+ 1;
14672 else if (TREE_CODE (index
) == RANGE_EXPR
14673 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14676 return (*elts
)[middle
].value
;
14683 /* Return a VECTOR_CST if possible. */
14686 tree type
= TREE_TYPE (t
);
14687 if (TREE_CODE (type
) != VECTOR_TYPE
)
14690 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14691 unsigned HOST_WIDE_INT idx
, pos
= 0;
14694 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14696 if (!CONSTANT_CLASS_P (value
))
14698 if (TREE_CODE (value
) == VECTOR_CST
)
14700 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14701 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14704 vec
[pos
++] = value
;
14706 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14707 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14709 return build_vector (type
, vec
);
14713 return fold (DECL_INITIAL (t
));
14717 } /* switch (code) */
14720 #ifdef ENABLE_FOLD_CHECKING
14723 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14724 hash_table
<pointer_hash
<const tree_node
> > *);
14725 static void fold_check_failed (const_tree
, const_tree
);
14726 void print_fold_checksum (const_tree
);
14728 /* When --enable-checking=fold, compute a digest of expr before
14729 and after actual fold call to see if fold did not accidentally
14730 change original expr. */
14736 struct md5_ctx ctx
;
14737 unsigned char checksum_before
[16], checksum_after
[16];
14738 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14740 md5_init_ctx (&ctx
);
14741 fold_checksum_tree (expr
, &ctx
, &ht
);
14742 md5_finish_ctx (&ctx
, checksum_before
);
14745 ret
= fold_1 (expr
);
14747 md5_init_ctx (&ctx
);
14748 fold_checksum_tree (expr
, &ctx
, &ht
);
14749 md5_finish_ctx (&ctx
, checksum_after
);
14751 if (memcmp (checksum_before
, checksum_after
, 16))
14752 fold_check_failed (expr
, ret
);
14758 print_fold_checksum (const_tree expr
)
14760 struct md5_ctx ctx
;
14761 unsigned char checksum
[16], cnt
;
14762 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14764 md5_init_ctx (&ctx
);
14765 fold_checksum_tree (expr
, &ctx
, &ht
);
14766 md5_finish_ctx (&ctx
, checksum
);
14767 for (cnt
= 0; cnt
< 16; ++cnt
)
14768 fprintf (stderr
, "%02x", checksum
[cnt
]);
14769 putc ('\n', stderr
);
14773 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14775 internal_error ("fold check: original tree changed by fold");
14779 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14780 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14782 const tree_node
**slot
;
14783 enum tree_code code
;
14784 union tree_node buf
;
14790 slot
= ht
->find_slot (expr
, INSERT
);
14794 code
= TREE_CODE (expr
);
14795 if (TREE_CODE_CLASS (code
) == tcc_declaration
14796 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14798 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14799 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14800 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14801 expr
= (tree
) &buf
;
14803 else if (TREE_CODE_CLASS (code
) == tcc_type
14804 && (TYPE_POINTER_TO (expr
)
14805 || TYPE_REFERENCE_TO (expr
)
14806 || TYPE_CACHED_VALUES_P (expr
)
14807 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14808 || TYPE_NEXT_VARIANT (expr
)))
14810 /* Allow these fields to be modified. */
14812 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14813 expr
= tmp
= (tree
) &buf
;
14814 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14815 TYPE_POINTER_TO (tmp
) = NULL
;
14816 TYPE_REFERENCE_TO (tmp
) = NULL
;
14817 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14818 if (TYPE_CACHED_VALUES_P (tmp
))
14820 TYPE_CACHED_VALUES_P (tmp
) = 0;
14821 TYPE_CACHED_VALUES (tmp
) = NULL
;
14824 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14825 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14826 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14827 if (TREE_CODE_CLASS (code
) != tcc_type
14828 && TREE_CODE_CLASS (code
) != tcc_declaration
14829 && code
!= TREE_LIST
14830 && code
!= SSA_NAME
14831 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14832 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14833 switch (TREE_CODE_CLASS (code
))
14839 md5_process_bytes (TREE_STRING_POINTER (expr
),
14840 TREE_STRING_LENGTH (expr
), ctx
);
14843 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14844 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14847 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14848 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14854 case tcc_exceptional
:
14858 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14859 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14860 expr
= TREE_CHAIN (expr
);
14861 goto recursive_label
;
14864 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14865 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14871 case tcc_expression
:
14872 case tcc_reference
:
14873 case tcc_comparison
:
14876 case tcc_statement
:
14878 len
= TREE_OPERAND_LENGTH (expr
);
14879 for (i
= 0; i
< len
; ++i
)
14880 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14882 case tcc_declaration
:
14883 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14884 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14885 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14887 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14888 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14889 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14890 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14891 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14894 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14896 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14898 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14899 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14901 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14905 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14906 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14907 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14908 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14909 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14910 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14911 if (INTEGRAL_TYPE_P (expr
)
14912 || SCALAR_FLOAT_TYPE_P (expr
))
14914 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14915 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14917 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14918 if (TREE_CODE (expr
) == RECORD_TYPE
14919 || TREE_CODE (expr
) == UNION_TYPE
14920 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14921 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14922 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14929 /* Helper function for outputting the checksum of a tree T. When
14930 debugging with gdb, you can "define mynext" to be "next" followed
14931 by "call debug_fold_checksum (op0)", then just trace down till the
14934 DEBUG_FUNCTION
void
14935 debug_fold_checksum (const_tree t
)
14938 unsigned char checksum
[16];
14939 struct md5_ctx ctx
;
14940 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14942 md5_init_ctx (&ctx
);
14943 fold_checksum_tree (t
, &ctx
, &ht
);
14944 md5_finish_ctx (&ctx
, checksum
);
14947 for (i
= 0; i
< 16; i
++)
14948 fprintf (stderr
, "%d ", checksum
[i
]);
14950 fprintf (stderr
, "\n");
14955 /* Fold a unary tree expression with code CODE of type TYPE with an
14956 operand OP0. LOC is the location of the resulting expression.
14957 Return a folded expression if successful. Otherwise, return a tree
14958 expression with code CODE of type TYPE with an operand OP0. */
14961 fold_build1_stat_loc (location_t loc
,
14962 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14965 #ifdef ENABLE_FOLD_CHECKING
14966 unsigned char checksum_before
[16], checksum_after
[16];
14967 struct md5_ctx ctx
;
14968 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14970 md5_init_ctx (&ctx
);
14971 fold_checksum_tree (op0
, &ctx
, &ht
);
14972 md5_finish_ctx (&ctx
, checksum_before
);
14976 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14978 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14980 #ifdef ENABLE_FOLD_CHECKING
14981 md5_init_ctx (&ctx
);
14982 fold_checksum_tree (op0
, &ctx
, &ht
);
14983 md5_finish_ctx (&ctx
, checksum_after
);
14985 if (memcmp (checksum_before
, checksum_after
, 16))
14986 fold_check_failed (op0
, tem
);
14991 /* Fold a binary tree expression with code CODE of type TYPE with
14992 operands OP0 and OP1. LOC is the location of the resulting
14993 expression. Return a folded expression if successful. Otherwise,
14994 return a tree expression with code CODE of type TYPE with operands
14998 fold_build2_stat_loc (location_t loc
,
14999 enum tree_code code
, tree type
, tree op0
, tree op1
15003 #ifdef ENABLE_FOLD_CHECKING
15004 unsigned char checksum_before_op0
[16],
15005 checksum_before_op1
[16],
15006 checksum_after_op0
[16],
15007 checksum_after_op1
[16];
15008 struct md5_ctx ctx
;
15009 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15011 md5_init_ctx (&ctx
);
15012 fold_checksum_tree (op0
, &ctx
, &ht
);
15013 md5_finish_ctx (&ctx
, checksum_before_op0
);
15016 md5_init_ctx (&ctx
);
15017 fold_checksum_tree (op1
, &ctx
, &ht
);
15018 md5_finish_ctx (&ctx
, checksum_before_op1
);
15022 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
15024 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
15026 #ifdef ENABLE_FOLD_CHECKING
15027 md5_init_ctx (&ctx
);
15028 fold_checksum_tree (op0
, &ctx
, &ht
);
15029 md5_finish_ctx (&ctx
, checksum_after_op0
);
15032 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15033 fold_check_failed (op0
, tem
);
15035 md5_init_ctx (&ctx
);
15036 fold_checksum_tree (op1
, &ctx
, &ht
);
15037 md5_finish_ctx (&ctx
, checksum_after_op1
);
15039 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15040 fold_check_failed (op1
, tem
);
15045 /* Fold a ternary tree expression with code CODE of type TYPE with
15046 operands OP0, OP1, and OP2. Return a folded expression if
15047 successful. Otherwise, return a tree expression with code CODE of
15048 type TYPE with operands OP0, OP1, and OP2. */
15051 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
15052 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
15055 #ifdef ENABLE_FOLD_CHECKING
15056 unsigned char checksum_before_op0
[16],
15057 checksum_before_op1
[16],
15058 checksum_before_op2
[16],
15059 checksum_after_op0
[16],
15060 checksum_after_op1
[16],
15061 checksum_after_op2
[16];
15062 struct md5_ctx ctx
;
15063 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15065 md5_init_ctx (&ctx
);
15066 fold_checksum_tree (op0
, &ctx
, &ht
);
15067 md5_finish_ctx (&ctx
, checksum_before_op0
);
15070 md5_init_ctx (&ctx
);
15071 fold_checksum_tree (op1
, &ctx
, &ht
);
15072 md5_finish_ctx (&ctx
, checksum_before_op1
);
15075 md5_init_ctx (&ctx
);
15076 fold_checksum_tree (op2
, &ctx
, &ht
);
15077 md5_finish_ctx (&ctx
, checksum_before_op2
);
15081 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
15082 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
15084 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
15086 #ifdef ENABLE_FOLD_CHECKING
15087 md5_init_ctx (&ctx
);
15088 fold_checksum_tree (op0
, &ctx
, &ht
);
15089 md5_finish_ctx (&ctx
, checksum_after_op0
);
15092 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15093 fold_check_failed (op0
, tem
);
15095 md5_init_ctx (&ctx
);
15096 fold_checksum_tree (op1
, &ctx
, &ht
);
15097 md5_finish_ctx (&ctx
, checksum_after_op1
);
15100 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15101 fold_check_failed (op1
, tem
);
15103 md5_init_ctx (&ctx
);
15104 fold_checksum_tree (op2
, &ctx
, &ht
);
15105 md5_finish_ctx (&ctx
, checksum_after_op2
);
15107 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15108 fold_check_failed (op2
, tem
);
15113 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15114 arguments in ARGARRAY, and a null static chain.
15115 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15116 of type TYPE from the given operands as constructed by build_call_array. */
15119 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15120 int nargs
, tree
*argarray
)
15123 #ifdef ENABLE_FOLD_CHECKING
15124 unsigned char checksum_before_fn
[16],
15125 checksum_before_arglist
[16],
15126 checksum_after_fn
[16],
15127 checksum_after_arglist
[16];
15128 struct md5_ctx ctx
;
15129 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15132 md5_init_ctx (&ctx
);
15133 fold_checksum_tree (fn
, &ctx
, &ht
);
15134 md5_finish_ctx (&ctx
, checksum_before_fn
);
15137 md5_init_ctx (&ctx
);
15138 for (i
= 0; i
< nargs
; i
++)
15139 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
15140 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15144 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15146 #ifdef ENABLE_FOLD_CHECKING
15147 md5_init_ctx (&ctx
);
15148 fold_checksum_tree (fn
, &ctx
, &ht
);
15149 md5_finish_ctx (&ctx
, checksum_after_fn
);
15152 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15153 fold_check_failed (fn
, tem
);
15155 md5_init_ctx (&ctx
);
15156 for (i
= 0; i
< nargs
; i
++)
15157 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
15158 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15160 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15161 fold_check_failed (NULL_TREE
, tem
);
15166 /* Perform constant folding and related simplification of initializer
15167 expression EXPR. These behave identically to "fold_buildN" but ignore
15168 potential run-time traps and exceptions that fold must preserve. */
15170 #define START_FOLD_INIT \
15171 int saved_signaling_nans = flag_signaling_nans;\
15172 int saved_trapping_math = flag_trapping_math;\
15173 int saved_rounding_math = flag_rounding_math;\
15174 int saved_trapv = flag_trapv;\
15175 int saved_folding_initializer = folding_initializer;\
15176 flag_signaling_nans = 0;\
15177 flag_trapping_math = 0;\
15178 flag_rounding_math = 0;\
15180 folding_initializer = 1;
15182 #define END_FOLD_INIT \
15183 flag_signaling_nans = saved_signaling_nans;\
15184 flag_trapping_math = saved_trapping_math;\
15185 flag_rounding_math = saved_rounding_math;\
15186 flag_trapv = saved_trapv;\
15187 folding_initializer = saved_folding_initializer;
15190 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15191 tree type
, tree op
)
15196 result
= fold_build1_loc (loc
, code
, type
, op
);
15203 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15204 tree type
, tree op0
, tree op1
)
15209 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15216 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15217 int nargs
, tree
*argarray
)
15222 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15228 #undef START_FOLD_INIT
15229 #undef END_FOLD_INIT
15231 /* Determine if first argument is a multiple of second argument. Return 0 if
15232 it is not, or we cannot easily determined it to be.
15234 An example of the sort of thing we care about (at this point; this routine
15235 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15236 fold cases do now) is discovering that
15238 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15244 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15246 This code also handles discovering that
15248 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15250 is a multiple of 8 so we don't have to worry about dealing with a
15251 possible remainder.
15253 Note that we *look* inside a SAVE_EXPR only to determine how it was
15254 calculated; it is not safe for fold to do much of anything else with the
15255 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15256 at run time. For example, the latter example above *cannot* be implemented
15257 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15258 evaluation time of the original SAVE_EXPR is not necessarily the same at
15259 the time the new expression is evaluated. The only optimization of this
15260 sort that would be valid is changing
15262 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15266 SAVE_EXPR (I) * SAVE_EXPR (J)
15268 (where the same SAVE_EXPR (J) is used in the original and the
15269 transformed version). */
15272 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15274 if (operand_equal_p (top
, bottom
, 0))
15277 if (TREE_CODE (type
) != INTEGER_TYPE
)
15280 switch (TREE_CODE (top
))
15283 /* Bitwise and provides a power of two multiple. If the mask is
15284 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15285 if (!integer_pow2p (bottom
))
15290 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15291 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15295 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15296 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15299 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15303 op1
= TREE_OPERAND (top
, 1);
15304 /* const_binop may not detect overflow correctly,
15305 so check for it explicitly here. */
15306 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
15307 && 0 != (t1
= fold_convert (type
,
15308 const_binop (LSHIFT_EXPR
,
15311 && !TREE_OVERFLOW (t1
))
15312 return multiple_of_p (type
, t1
, bottom
);
15317 /* Can't handle conversions from non-integral or wider integral type. */
15318 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15319 || (TYPE_PRECISION (type
)
15320 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15323 /* .. fall through ... */
15326 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15329 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15330 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15333 if (TREE_CODE (bottom
) != INTEGER_CST
15334 || integer_zerop (bottom
)
15335 || (TYPE_UNSIGNED (type
)
15336 && (tree_int_cst_sgn (top
) < 0
15337 || tree_int_cst_sgn (bottom
) < 0)))
15339 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
15347 /* Return true if CODE or TYPE is known to be non-negative. */
15350 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15352 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15353 && truth_value_p (code
))
15354 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15355 have a signed:1 type (where the value is -1 and 0). */
15360 /* Return true if (CODE OP0) is known to be non-negative. If the return
15361 value is based on the assumption that signed overflow is undefined,
15362 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15363 *STRICT_OVERFLOW_P. */
15366 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15367 bool *strict_overflow_p
)
15369 if (TYPE_UNSIGNED (type
))
15375 /* We can't return 1 if flag_wrapv is set because
15376 ABS_EXPR<INT_MIN> = INT_MIN. */
15377 if (!INTEGRAL_TYPE_P (type
))
15379 if (TYPE_OVERFLOW_UNDEFINED (type
))
15381 *strict_overflow_p
= true;
15386 case NON_LVALUE_EXPR
:
15388 case FIX_TRUNC_EXPR
:
15389 return tree_expr_nonnegative_warnv_p (op0
,
15390 strict_overflow_p
);
15394 tree inner_type
= TREE_TYPE (op0
);
15395 tree outer_type
= type
;
15397 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15399 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15400 return tree_expr_nonnegative_warnv_p (op0
,
15401 strict_overflow_p
);
15402 if (INTEGRAL_TYPE_P (inner_type
))
15404 if (TYPE_UNSIGNED (inner_type
))
15406 return tree_expr_nonnegative_warnv_p (op0
,
15407 strict_overflow_p
);
15410 else if (INTEGRAL_TYPE_P (outer_type
))
15412 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15413 return tree_expr_nonnegative_warnv_p (op0
,
15414 strict_overflow_p
);
15415 if (INTEGRAL_TYPE_P (inner_type
))
15416 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15417 && TYPE_UNSIGNED (inner_type
);
15423 return tree_simple_nonnegative_warnv_p (code
, type
);
15426 /* We don't know sign of `t', so be conservative and return false. */
15430 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15431 value is based on the assumption that signed overflow is undefined,
15432 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15433 *STRICT_OVERFLOW_P. */
15436 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15437 tree op1
, bool *strict_overflow_p
)
15439 if (TYPE_UNSIGNED (type
))
15444 case POINTER_PLUS_EXPR
:
15446 if (FLOAT_TYPE_P (type
))
15447 return (tree_expr_nonnegative_warnv_p (op0
,
15449 && tree_expr_nonnegative_warnv_p (op1
,
15450 strict_overflow_p
));
15452 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15453 both unsigned and at least 2 bits shorter than the result. */
15454 if (TREE_CODE (type
) == INTEGER_TYPE
15455 && TREE_CODE (op0
) == NOP_EXPR
15456 && TREE_CODE (op1
) == NOP_EXPR
)
15458 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15459 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15460 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15461 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15463 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15464 TYPE_PRECISION (inner2
)) + 1;
15465 return prec
< TYPE_PRECISION (type
);
15471 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15473 /* x * x is always non-negative for floating point x
15474 or without overflow. */
15475 if (operand_equal_p (op0
, op1
, 0)
15476 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15477 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15479 if (TYPE_OVERFLOW_UNDEFINED (type
))
15480 *strict_overflow_p
= true;
15485 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15486 both unsigned and their total bits is shorter than the result. */
15487 if (TREE_CODE (type
) == INTEGER_TYPE
15488 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15489 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15491 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15492 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15494 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15495 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15498 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15499 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15501 if (TREE_CODE (op0
) == INTEGER_CST
)
15502 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15504 if (TREE_CODE (op1
) == INTEGER_CST
)
15505 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15507 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15508 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15510 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15511 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
15512 : TYPE_PRECISION (inner0
);
15514 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15515 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
15516 : TYPE_PRECISION (inner1
);
15518 return precision0
+ precision1
< TYPE_PRECISION (type
);
15525 return (tree_expr_nonnegative_warnv_p (op0
,
15527 || tree_expr_nonnegative_warnv_p (op1
,
15528 strict_overflow_p
));
15534 case TRUNC_DIV_EXPR
:
15535 case CEIL_DIV_EXPR
:
15536 case FLOOR_DIV_EXPR
:
15537 case ROUND_DIV_EXPR
:
15538 return (tree_expr_nonnegative_warnv_p (op0
,
15540 && tree_expr_nonnegative_warnv_p (op1
,
15541 strict_overflow_p
));
15543 case TRUNC_MOD_EXPR
:
15544 case CEIL_MOD_EXPR
:
15545 case FLOOR_MOD_EXPR
:
15546 case ROUND_MOD_EXPR
:
15547 return tree_expr_nonnegative_warnv_p (op0
,
15548 strict_overflow_p
);
15550 return tree_simple_nonnegative_warnv_p (code
, type
);
15553 /* We don't know sign of `t', so be conservative and return false. */
15557 /* Return true if T is known to be non-negative. If the return
15558 value is based on the assumption that signed overflow is undefined,
15559 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15560 *STRICT_OVERFLOW_P. */
15563 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15565 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15568 switch (TREE_CODE (t
))
15571 return tree_int_cst_sgn (t
) >= 0;
15574 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15577 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15580 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15582 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15583 strict_overflow_p
));
15585 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15588 /* We don't know sign of `t', so be conservative and return false. */
15592 /* Return true if T is known to be non-negative. If the return
15593 value is based on the assumption that signed overflow is undefined,
15594 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15595 *STRICT_OVERFLOW_P. */
15598 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15599 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15601 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15602 switch (DECL_FUNCTION_CODE (fndecl
))
15604 CASE_FLT_FN (BUILT_IN_ACOS
):
15605 CASE_FLT_FN (BUILT_IN_ACOSH
):
15606 CASE_FLT_FN (BUILT_IN_CABS
):
15607 CASE_FLT_FN (BUILT_IN_COSH
):
15608 CASE_FLT_FN (BUILT_IN_ERFC
):
15609 CASE_FLT_FN (BUILT_IN_EXP
):
15610 CASE_FLT_FN (BUILT_IN_EXP10
):
15611 CASE_FLT_FN (BUILT_IN_EXP2
):
15612 CASE_FLT_FN (BUILT_IN_FABS
):
15613 CASE_FLT_FN (BUILT_IN_FDIM
):
15614 CASE_FLT_FN (BUILT_IN_HYPOT
):
15615 CASE_FLT_FN (BUILT_IN_POW10
):
15616 CASE_INT_FN (BUILT_IN_FFS
):
15617 CASE_INT_FN (BUILT_IN_PARITY
):
15618 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15619 CASE_INT_FN (BUILT_IN_CLZ
):
15620 CASE_INT_FN (BUILT_IN_CLRSB
):
15621 case BUILT_IN_BSWAP32
:
15622 case BUILT_IN_BSWAP64
:
15626 CASE_FLT_FN (BUILT_IN_SQRT
):
15627 /* sqrt(-0.0) is -0.0. */
15628 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15630 return tree_expr_nonnegative_warnv_p (arg0
,
15631 strict_overflow_p
);
15633 CASE_FLT_FN (BUILT_IN_ASINH
):
15634 CASE_FLT_FN (BUILT_IN_ATAN
):
15635 CASE_FLT_FN (BUILT_IN_ATANH
):
15636 CASE_FLT_FN (BUILT_IN_CBRT
):
15637 CASE_FLT_FN (BUILT_IN_CEIL
):
15638 CASE_FLT_FN (BUILT_IN_ERF
):
15639 CASE_FLT_FN (BUILT_IN_EXPM1
):
15640 CASE_FLT_FN (BUILT_IN_FLOOR
):
15641 CASE_FLT_FN (BUILT_IN_FMOD
):
15642 CASE_FLT_FN (BUILT_IN_FREXP
):
15643 CASE_FLT_FN (BUILT_IN_ICEIL
):
15644 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15645 CASE_FLT_FN (BUILT_IN_IRINT
):
15646 CASE_FLT_FN (BUILT_IN_IROUND
):
15647 CASE_FLT_FN (BUILT_IN_LCEIL
):
15648 CASE_FLT_FN (BUILT_IN_LDEXP
):
15649 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15650 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15651 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15652 CASE_FLT_FN (BUILT_IN_LLRINT
):
15653 CASE_FLT_FN (BUILT_IN_LLROUND
):
15654 CASE_FLT_FN (BUILT_IN_LRINT
):
15655 CASE_FLT_FN (BUILT_IN_LROUND
):
15656 CASE_FLT_FN (BUILT_IN_MODF
):
15657 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15658 CASE_FLT_FN (BUILT_IN_RINT
):
15659 CASE_FLT_FN (BUILT_IN_ROUND
):
15660 CASE_FLT_FN (BUILT_IN_SCALB
):
15661 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15662 CASE_FLT_FN (BUILT_IN_SCALBN
):
15663 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15664 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15665 CASE_FLT_FN (BUILT_IN_SINH
):
15666 CASE_FLT_FN (BUILT_IN_TANH
):
15667 CASE_FLT_FN (BUILT_IN_TRUNC
):
15668 /* True if the 1st argument is nonnegative. */
15669 return tree_expr_nonnegative_warnv_p (arg0
,
15670 strict_overflow_p
);
15672 CASE_FLT_FN (BUILT_IN_FMAX
):
15673 /* True if the 1st OR 2nd arguments are nonnegative. */
15674 return (tree_expr_nonnegative_warnv_p (arg0
,
15676 || (tree_expr_nonnegative_warnv_p (arg1
,
15677 strict_overflow_p
)));
15679 CASE_FLT_FN (BUILT_IN_FMIN
):
15680 /* True if the 1st AND 2nd arguments are nonnegative. */
15681 return (tree_expr_nonnegative_warnv_p (arg0
,
15683 && (tree_expr_nonnegative_warnv_p (arg1
,
15684 strict_overflow_p
)));
15686 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15687 /* True if the 2nd argument is nonnegative. */
15688 return tree_expr_nonnegative_warnv_p (arg1
,
15689 strict_overflow_p
);
15691 CASE_FLT_FN (BUILT_IN_POWI
):
15692 /* True if the 1st argument is nonnegative or the second
15693 argument is an even integer. */
15694 if (TREE_CODE (arg1
) == INTEGER_CST
15695 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15697 return tree_expr_nonnegative_warnv_p (arg0
,
15698 strict_overflow_p
);
15700 CASE_FLT_FN (BUILT_IN_POW
):
15701 /* True if the 1st argument is nonnegative or the second
15702 argument is an even integer valued real. */
15703 if (TREE_CODE (arg1
) == REAL_CST
)
15708 c
= TREE_REAL_CST (arg1
);
15709 n
= real_to_integer (&c
);
15712 REAL_VALUE_TYPE cint
;
15713 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
15714 if (real_identical (&c
, &cint
))
15718 return tree_expr_nonnegative_warnv_p (arg0
,
15719 strict_overflow_p
);
15724 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15728 /* Return true if T is known to be non-negative. If the return
15729 value is based on the assumption that signed overflow is undefined,
15730 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15731 *STRICT_OVERFLOW_P. */
15734 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15736 enum tree_code code
= TREE_CODE (t
);
15737 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15744 tree temp
= TARGET_EXPR_SLOT (t
);
15745 t
= TARGET_EXPR_INITIAL (t
);
15747 /* If the initializer is non-void, then it's a normal expression
15748 that will be assigned to the slot. */
15749 if (!VOID_TYPE_P (t
))
15750 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15752 /* Otherwise, the initializer sets the slot in some way. One common
15753 way is an assignment statement at the end of the initializer. */
15756 if (TREE_CODE (t
) == BIND_EXPR
)
15757 t
= expr_last (BIND_EXPR_BODY (t
));
15758 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15759 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15760 t
= expr_last (TREE_OPERAND (t
, 0));
15761 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15766 if (TREE_CODE (t
) == MODIFY_EXPR
15767 && TREE_OPERAND (t
, 0) == temp
)
15768 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15769 strict_overflow_p
);
15776 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15777 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15779 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15780 get_callee_fndecl (t
),
15783 strict_overflow_p
);
15785 case COMPOUND_EXPR
:
15787 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15788 strict_overflow_p
);
15790 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15791 strict_overflow_p
);
15793 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15794 strict_overflow_p
);
15797 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15801 /* We don't know sign of `t', so be conservative and return false. */
15805 /* Return true if T is known to be non-negative. If the return
15806 value is based on the assumption that signed overflow is undefined,
15807 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15808 *STRICT_OVERFLOW_P. */
15811 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15813 enum tree_code code
;
15814 if (t
== error_mark_node
)
15817 code
= TREE_CODE (t
);
15818 switch (TREE_CODE_CLASS (code
))
15821 case tcc_comparison
:
15822 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15824 TREE_OPERAND (t
, 0),
15825 TREE_OPERAND (t
, 1),
15826 strict_overflow_p
);
15829 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15831 TREE_OPERAND (t
, 0),
15832 strict_overflow_p
);
15835 case tcc_declaration
:
15836 case tcc_reference
:
15837 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15845 case TRUTH_AND_EXPR
:
15846 case TRUTH_OR_EXPR
:
15847 case TRUTH_XOR_EXPR
:
15848 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15850 TREE_OPERAND (t
, 0),
15851 TREE_OPERAND (t
, 1),
15852 strict_overflow_p
);
15853 case TRUTH_NOT_EXPR
:
15854 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15856 TREE_OPERAND (t
, 0),
15857 strict_overflow_p
);
15864 case WITH_SIZE_EXPR
:
15866 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15869 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15873 /* Return true if `t' is known to be non-negative. Handle warnings
15874 about undefined signed overflow. */
15877 tree_expr_nonnegative_p (tree t
)
15879 bool ret
, strict_overflow_p
;
15881 strict_overflow_p
= false;
15882 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15883 if (strict_overflow_p
)
15884 fold_overflow_warning (("assuming signed overflow does not occur when "
15885 "determining that expression is always "
15887 WARN_STRICT_OVERFLOW_MISC
);
15892 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15893 For floating point we further ensure that T is not denormal.
15894 Similar logic is present in nonzero_address in rtlanal.h.
15896 If the return value is based on the assumption that signed overflow
15897 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15898 change *STRICT_OVERFLOW_P. */
15901 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15902 bool *strict_overflow_p
)
15907 return tree_expr_nonzero_warnv_p (op0
,
15908 strict_overflow_p
);
15912 tree inner_type
= TREE_TYPE (op0
);
15913 tree outer_type
= type
;
15915 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15916 && tree_expr_nonzero_warnv_p (op0
,
15917 strict_overflow_p
));
15921 case NON_LVALUE_EXPR
:
15922 return tree_expr_nonzero_warnv_p (op0
,
15923 strict_overflow_p
);
15932 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15933 For floating point we further ensure that T is not denormal.
15934 Similar logic is present in nonzero_address in rtlanal.h.
15936 If the return value is based on the assumption that signed overflow
15937 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15938 change *STRICT_OVERFLOW_P. */
15941 tree_binary_nonzero_warnv_p (enum tree_code code
,
15944 tree op1
, bool *strict_overflow_p
)
15946 bool sub_strict_overflow_p
;
15949 case POINTER_PLUS_EXPR
:
15951 if (TYPE_OVERFLOW_UNDEFINED (type
))
15953 /* With the presence of negative values it is hard
15954 to say something. */
15955 sub_strict_overflow_p
= false;
15956 if (!tree_expr_nonnegative_warnv_p (op0
,
15957 &sub_strict_overflow_p
)
15958 || !tree_expr_nonnegative_warnv_p (op1
,
15959 &sub_strict_overflow_p
))
15961 /* One of operands must be positive and the other non-negative. */
15962 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15963 overflows, on a twos-complement machine the sum of two
15964 nonnegative numbers can never be zero. */
15965 return (tree_expr_nonzero_warnv_p (op0
,
15967 || tree_expr_nonzero_warnv_p (op1
,
15968 strict_overflow_p
));
15973 if (TYPE_OVERFLOW_UNDEFINED (type
))
15975 if (tree_expr_nonzero_warnv_p (op0
,
15977 && tree_expr_nonzero_warnv_p (op1
,
15978 strict_overflow_p
))
15980 *strict_overflow_p
= true;
15987 sub_strict_overflow_p
= false;
15988 if (tree_expr_nonzero_warnv_p (op0
,
15989 &sub_strict_overflow_p
)
15990 && tree_expr_nonzero_warnv_p (op1
,
15991 &sub_strict_overflow_p
))
15993 if (sub_strict_overflow_p
)
15994 *strict_overflow_p
= true;
15999 sub_strict_overflow_p
= false;
16000 if (tree_expr_nonzero_warnv_p (op0
,
16001 &sub_strict_overflow_p
))
16003 if (sub_strict_overflow_p
)
16004 *strict_overflow_p
= true;
16006 /* When both operands are nonzero, then MAX must be too. */
16007 if (tree_expr_nonzero_warnv_p (op1
,
16008 strict_overflow_p
))
16011 /* MAX where operand 0 is positive is positive. */
16012 return tree_expr_nonnegative_warnv_p (op0
,
16013 strict_overflow_p
);
16015 /* MAX where operand 1 is positive is positive. */
16016 else if (tree_expr_nonzero_warnv_p (op1
,
16017 &sub_strict_overflow_p
)
16018 && tree_expr_nonnegative_warnv_p (op1
,
16019 &sub_strict_overflow_p
))
16021 if (sub_strict_overflow_p
)
16022 *strict_overflow_p
= true;
16028 return (tree_expr_nonzero_warnv_p (op1
,
16030 || tree_expr_nonzero_warnv_p (op0
,
16031 strict_overflow_p
));
16040 /* Return true when T is an address and is known to be nonzero.
16041 For floating point we further ensure that T is not denormal.
16042 Similar logic is present in nonzero_address in rtlanal.h.
16044 If the return value is based on the assumption that signed overflow
16045 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16046 change *STRICT_OVERFLOW_P. */
16049 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16051 bool sub_strict_overflow_p
;
16052 switch (TREE_CODE (t
))
16055 return !integer_zerop (t
);
16059 tree base
= TREE_OPERAND (t
, 0);
16061 if (!DECL_P (base
))
16062 base
= get_base_address (base
);
16067 /* For objects in symbol table check if we know they are non-zero.
16068 Don't do anything for variables and functions before symtab is built;
16069 it is quite possible that they will be declared weak later. */
16070 if (DECL_P (base
) && decl_in_symtab_p (base
))
16072 struct symtab_node
*symbol
;
16074 symbol
= symtab_node::get (base
);
16076 return symbol
->nonzero_address ();
16081 /* Function local objects are never NULL. */
16083 && (DECL_CONTEXT (base
)
16084 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
16085 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
16088 /* Constants are never weak. */
16089 if (CONSTANT_CLASS_P (base
))
16096 sub_strict_overflow_p
= false;
16097 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16098 &sub_strict_overflow_p
)
16099 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16100 &sub_strict_overflow_p
))
16102 if (sub_strict_overflow_p
)
16103 *strict_overflow_p
= true;
16114 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16115 attempt to fold the expression to a constant without modifying TYPE,
16118 If the expression could be simplified to a constant, then return
16119 the constant. If the expression would not be simplified to a
16120 constant, then return NULL_TREE. */
16123 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16125 tree tem
= fold_binary (code
, type
, op0
, op1
);
16126 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16129 /* Given the components of a unary expression CODE, TYPE and OP0,
16130 attempt to fold the expression to a constant without modifying
16133 If the expression could be simplified to a constant, then return
16134 the constant. If the expression would not be simplified to a
16135 constant, then return NULL_TREE. */
16138 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16140 tree tem
= fold_unary (code
, type
, op0
);
16141 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16144 /* If EXP represents referencing an element in a constant string
16145 (either via pointer arithmetic or array indexing), return the
16146 tree representing the value accessed, otherwise return NULL. */
16149 fold_read_from_constant_string (tree exp
)
16151 if ((TREE_CODE (exp
) == INDIRECT_REF
16152 || TREE_CODE (exp
) == ARRAY_REF
)
16153 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16155 tree exp1
= TREE_OPERAND (exp
, 0);
16158 location_t loc
= EXPR_LOCATION (exp
);
16160 if (TREE_CODE (exp
) == INDIRECT_REF
)
16161 string
= string_constant (exp1
, &index
);
16164 tree low_bound
= array_ref_low_bound (exp
);
16165 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16167 /* Optimize the special-case of a zero lower bound.
16169 We convert the low_bound to sizetype to avoid some problems
16170 with constant folding. (E.g. suppose the lower bound is 1,
16171 and its mode is QI. Without the conversion,l (ARRAY
16172 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16173 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16174 if (! integer_zerop (low_bound
))
16175 index
= size_diffop_loc (loc
, index
,
16176 fold_convert_loc (loc
, sizetype
, low_bound
));
16182 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16183 && TREE_CODE (string
) == STRING_CST
16184 && TREE_CODE (index
) == INTEGER_CST
16185 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16186 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16188 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16189 return build_int_cst_type (TREE_TYPE (exp
),
16190 (TREE_STRING_POINTER (string
)
16191 [TREE_INT_CST_LOW (index
)]));
16196 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16197 an integer constant, real, or fixed-point constant.
16199 TYPE is the type of the result. */
16202 fold_negate_const (tree arg0
, tree type
)
16204 tree t
= NULL_TREE
;
16206 switch (TREE_CODE (arg0
))
16211 wide_int val
= wi::neg (arg0
, &overflow
);
16212 t
= force_fit_type (type
, val
, 1,
16213 (overflow
| TREE_OVERFLOW (arg0
))
16214 && !TYPE_UNSIGNED (type
));
16219 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16224 FIXED_VALUE_TYPE f
;
16225 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16226 &(TREE_FIXED_CST (arg0
)), NULL
,
16227 TYPE_SATURATING (type
));
16228 t
= build_fixed (type
, f
);
16229 /* Propagate overflow flags. */
16230 if (overflow_p
| TREE_OVERFLOW (arg0
))
16231 TREE_OVERFLOW (t
) = 1;
16236 gcc_unreachable ();
16242 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16243 an integer constant or real constant.
16245 TYPE is the type of the result. */
16248 fold_abs_const (tree arg0
, tree type
)
16250 tree t
= NULL_TREE
;
16252 switch (TREE_CODE (arg0
))
16256 /* If the value is unsigned or non-negative, then the absolute value
16257 is the same as the ordinary value. */
16258 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
16261 /* If the value is negative, then the absolute value is
16266 wide_int val
= wi::neg (arg0
, &overflow
);
16267 t
= force_fit_type (type
, val
, -1,
16268 overflow
| TREE_OVERFLOW (arg0
));
16274 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16275 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16281 gcc_unreachable ();
16287 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16288 constant. TYPE is the type of the result. */
16291 fold_not_const (const_tree arg0
, tree type
)
16293 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16295 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
16298 /* Given CODE, a relational operator, the target type, TYPE and two
16299 constant operands OP0 and OP1, return the result of the
16300 relational operation. If the result is not a compile time
16301 constant, then return NULL_TREE. */
16304 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16306 int result
, invert
;
16308 /* From here on, the only cases we handle are when the result is
16309 known to be a constant. */
16311 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16313 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16314 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16316 /* Handle the cases where either operand is a NaN. */
16317 if (real_isnan (c0
) || real_isnan (c1
))
16327 case UNORDERED_EXPR
:
16341 if (flag_trapping_math
)
16347 gcc_unreachable ();
16350 return constant_boolean_node (result
, type
);
16353 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16356 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16358 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16359 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16360 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16363 /* Handle equality/inequality of complex constants. */
16364 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16366 tree rcond
= fold_relational_const (code
, type
,
16367 TREE_REALPART (op0
),
16368 TREE_REALPART (op1
));
16369 tree icond
= fold_relational_const (code
, type
,
16370 TREE_IMAGPART (op0
),
16371 TREE_IMAGPART (op1
));
16372 if (code
== EQ_EXPR
)
16373 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16374 else if (code
== NE_EXPR
)
16375 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16380 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16382 unsigned count
= VECTOR_CST_NELTS (op0
);
16383 tree
*elts
= XALLOCAVEC (tree
, count
);
16384 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16385 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16387 for (unsigned i
= 0; i
< count
; i
++)
16389 tree elem_type
= TREE_TYPE (type
);
16390 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16391 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16393 tree tem
= fold_relational_const (code
, elem_type
,
16396 if (tem
== NULL_TREE
)
16399 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16402 return build_vector (type
, elts
);
16405 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16407 To compute GT, swap the arguments and do LT.
16408 To compute GE, do LT and invert the result.
16409 To compute LE, swap the arguments, do LT and invert the result.
16410 To compute NE, do EQ and invert the result.
16412 Therefore, the code below must handle only EQ and LT. */
16414 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16419 code
= swap_tree_comparison (code
);
16422 /* Note that it is safe to invert for real values here because we
16423 have already handled the one case that it matters. */
16426 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16429 code
= invert_tree_comparison (code
, false);
16432 /* Compute a result for LT or EQ if args permit;
16433 Otherwise return T. */
16434 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16436 if (code
== EQ_EXPR
)
16437 result
= tree_int_cst_equal (op0
, op1
);
16439 result
= tree_int_cst_lt (op0
, op1
);
16446 return constant_boolean_node (result
, type
);
16449 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16450 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16454 fold_build_cleanup_point_expr (tree type
, tree expr
)
16456 /* If the expression does not have side effects then we don't have to wrap
16457 it with a cleanup point expression. */
16458 if (!TREE_SIDE_EFFECTS (expr
))
16461 /* If the expression is a return, check to see if the expression inside the
16462 return has no side effects or the right hand side of the modify expression
16463 inside the return. If either don't have side effects set we don't need to
16464 wrap the expression in a cleanup point expression. Note we don't check the
16465 left hand side of the modify because it should always be a return decl. */
16466 if (TREE_CODE (expr
) == RETURN_EXPR
)
16468 tree op
= TREE_OPERAND (expr
, 0);
16469 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16471 op
= TREE_OPERAND (op
, 1);
16472 if (!TREE_SIDE_EFFECTS (op
))
16476 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16479 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16480 of an indirection through OP0, or NULL_TREE if no simplification is
16484 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16490 subtype
= TREE_TYPE (sub
);
16491 if (!POINTER_TYPE_P (subtype
))
16494 if (TREE_CODE (sub
) == ADDR_EXPR
)
16496 tree op
= TREE_OPERAND (sub
, 0);
16497 tree optype
= TREE_TYPE (op
);
16498 /* *&CONST_DECL -> to the value of the const decl. */
16499 if (TREE_CODE (op
) == CONST_DECL
)
16500 return DECL_INITIAL (op
);
16501 /* *&p => p; make sure to handle *&"str"[cst] here. */
16502 if (type
== optype
)
16504 tree fop
= fold_read_from_constant_string (op
);
16510 /* *(foo *)&fooarray => fooarray[0] */
16511 else if (TREE_CODE (optype
) == ARRAY_TYPE
16512 && type
== TREE_TYPE (optype
)
16513 && (!in_gimple_form
16514 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16516 tree type_domain
= TYPE_DOMAIN (optype
);
16517 tree min_val
= size_zero_node
;
16518 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16519 min_val
= TYPE_MIN_VALUE (type_domain
);
16521 && TREE_CODE (min_val
) != INTEGER_CST
)
16523 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16524 NULL_TREE
, NULL_TREE
);
16526 /* *(foo *)&complexfoo => __real__ complexfoo */
16527 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16528 && type
== TREE_TYPE (optype
))
16529 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16530 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16531 else if (TREE_CODE (optype
) == VECTOR_TYPE
16532 && type
== TREE_TYPE (optype
))
16534 tree part_width
= TYPE_SIZE (type
);
16535 tree index
= bitsize_int (0);
16536 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16540 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16541 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16543 tree op00
= TREE_OPERAND (sub
, 0);
16544 tree op01
= TREE_OPERAND (sub
, 1);
16547 if (TREE_CODE (op00
) == ADDR_EXPR
)
16550 op00
= TREE_OPERAND (op00
, 0);
16551 op00type
= TREE_TYPE (op00
);
16553 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16554 if (TREE_CODE (op00type
) == VECTOR_TYPE
16555 && type
== TREE_TYPE (op00type
))
16557 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16558 tree part_width
= TYPE_SIZE (type
);
16559 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16560 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16561 tree index
= bitsize_int (indexi
);
16563 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
16564 return fold_build3_loc (loc
,
16565 BIT_FIELD_REF
, type
, op00
,
16566 part_width
, index
);
16569 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16570 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16571 && type
== TREE_TYPE (op00type
))
16573 tree size
= TYPE_SIZE_UNIT (type
);
16574 if (tree_int_cst_equal (size
, op01
))
16575 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16577 /* ((foo *)&fooarray)[1] => fooarray[1] */
16578 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16579 && type
== TREE_TYPE (op00type
))
16581 tree type_domain
= TYPE_DOMAIN (op00type
);
16582 tree min_val
= size_zero_node
;
16583 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16584 min_val
= TYPE_MIN_VALUE (type_domain
);
16585 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16586 TYPE_SIZE_UNIT (type
));
16587 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16588 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16589 NULL_TREE
, NULL_TREE
);
16594 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16595 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16596 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16597 && (!in_gimple_form
16598 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16601 tree min_val
= size_zero_node
;
16602 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16603 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16604 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16605 min_val
= TYPE_MIN_VALUE (type_domain
);
16607 && TREE_CODE (min_val
) != INTEGER_CST
)
16609 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16616 /* Builds an expression for an indirection through T, simplifying some
16620 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16622 tree type
= TREE_TYPE (TREE_TYPE (t
));
16623 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16628 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16631 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16634 fold_indirect_ref_loc (location_t loc
, tree t
)
16636 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16644 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16645 whose result is ignored. The type of the returned tree need not be
16646 the same as the original expression. */
16649 fold_ignored_result (tree t
)
16651 if (!TREE_SIDE_EFFECTS (t
))
16652 return integer_zero_node
;
16655 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16658 t
= TREE_OPERAND (t
, 0);
16662 case tcc_comparison
:
16663 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16664 t
= TREE_OPERAND (t
, 0);
16665 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16666 t
= TREE_OPERAND (t
, 1);
16671 case tcc_expression
:
16672 switch (TREE_CODE (t
))
16674 case COMPOUND_EXPR
:
16675 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16677 t
= TREE_OPERAND (t
, 0);
16681 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16682 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16684 t
= TREE_OPERAND (t
, 0);
16697 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16700 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16702 tree div
= NULL_TREE
;
16707 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16708 have to do anything. Only do this when we are not given a const,
16709 because in that case, this check is more expensive than just
16711 if (TREE_CODE (value
) != INTEGER_CST
)
16713 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16715 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16719 /* If divisor is a power of two, simplify this to bit manipulation. */
16720 if (divisor
== (divisor
& -divisor
))
16722 if (TREE_CODE (value
) == INTEGER_CST
)
16724 wide_int val
= value
;
16727 if ((val
& (divisor
- 1)) == 0)
16730 overflow_p
= TREE_OVERFLOW (value
);
16731 val
&= ~(divisor
- 1);
16736 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16742 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16743 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16744 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16745 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16751 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16752 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16753 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16759 /* Likewise, but round down. */
16762 round_down_loc (location_t loc
, tree value
, int divisor
)
16764 tree div
= NULL_TREE
;
16766 gcc_assert (divisor
> 0);
16770 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16771 have to do anything. Only do this when we are not given a const,
16772 because in that case, this check is more expensive than just
16774 if (TREE_CODE (value
) != INTEGER_CST
)
16776 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16778 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16782 /* If divisor is a power of two, simplify this to bit manipulation. */
16783 if (divisor
== (divisor
& -divisor
))
16787 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16788 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16793 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16794 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16795 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16801 /* Returns the pointer to the base of the object addressed by EXP and
16802 extracts the information about the offset of the access, storing it
16803 to PBITPOS and POFFSET. */
16806 split_address_to_core_and_offset (tree exp
,
16807 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16810 enum machine_mode mode
;
16811 int unsignedp
, volatilep
;
16812 HOST_WIDE_INT bitsize
;
16813 location_t loc
= EXPR_LOCATION (exp
);
16815 if (TREE_CODE (exp
) == ADDR_EXPR
)
16817 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16818 poffset
, &mode
, &unsignedp
, &volatilep
,
16820 core
= build_fold_addr_expr_loc (loc
, core
);
16826 *poffset
= NULL_TREE
;
16832 /* Returns true if addresses of E1 and E2 differ by a constant, false
16833 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16836 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16839 HOST_WIDE_INT bitpos1
, bitpos2
;
16840 tree toffset1
, toffset2
, tdiff
, type
;
16842 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16843 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16845 if (bitpos1
% BITS_PER_UNIT
!= 0
16846 || bitpos2
% BITS_PER_UNIT
!= 0
16847 || !operand_equal_p (core1
, core2
, 0))
16850 if (toffset1
&& toffset2
)
16852 type
= TREE_TYPE (toffset1
);
16853 if (type
!= TREE_TYPE (toffset2
))
16854 toffset2
= fold_convert (type
, toffset2
);
16856 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16857 if (!cst_and_fits_in_hwi (tdiff
))
16860 *diff
= int_cst_value (tdiff
);
16862 else if (toffset1
|| toffset2
)
16864 /* If only one of the offsets is non-constant, the difference cannot
16871 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16875 /* Simplify the floating point expression EXP when the sign of the
16876 result is not significant. Return NULL_TREE if no simplification
16880 fold_strip_sign_ops (tree exp
)
16883 location_t loc
= EXPR_LOCATION (exp
);
16885 switch (TREE_CODE (exp
))
16889 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16890 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16894 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16896 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16897 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16898 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16899 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16900 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16901 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16904 case COMPOUND_EXPR
:
16905 arg0
= TREE_OPERAND (exp
, 0);
16906 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16908 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16912 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16913 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16915 return fold_build3_loc (loc
,
16916 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16917 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16918 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16923 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16926 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16927 /* Strip copysign function call, return the 1st argument. */
16928 arg0
= CALL_EXPR_ARG (exp
, 0);
16929 arg1
= CALL_EXPR_ARG (exp
, 1);
16930 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16933 /* Strip sign ops from the argument of "odd" math functions. */
16934 if (negate_mathfn_p (fcode
))
16936 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16938 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);